From efef7988f197a65f0dc4eeadbc31416a33b8c141 Mon Sep 17 00:00:00 2001 From: Margubur Rahman Date: Fri, 17 Oct 2025 11:26:11 +0000 Subject: [PATCH] Migrate gsutil usage to gcloud storage --- .../bigquery_ml/bqml-online-prediction.ipynb | 6 ++-- .../migration/UJ13 Data Labeling task.ipynb | 18 ++++------- .../get_started_vertex_experiments.ipynb | 12 +++---- ...el_garden_movinet_action_recognition.ipynb | 31 +++++++------------ .../model_garden_phi3_deployment.ipynb | 13 +++----- ...ents_bqml_pipeline_anomaly_detection.ipynb | 15 +++------ ...l_image_object_detection_export_edge.ipynb | 24 +++++--------- ...text_classification_model_evaluation.ipynb | 14 ++++----- .../batch_prediction_model_monitoring.ipynb | 17 ++++------ .../hyperparameter_tuning_xgboost.ipynb | 16 +++------- 10 files changed, 58 insertions(+), 108 deletions(-) diff --git a/notebooks/community/bigquery_ml/bqml-online-prediction.ipynb b/notebooks/community/bigquery_ml/bqml-online-prediction.ipynb index 99def69cd..bfb8d8d8d 100644 --- a/notebooks/community/bigquery_ml/bqml-online-prediction.ipynb +++ b/notebooks/community/bigquery_ml/bqml-online-prediction.ipynb @@ -359,8 +359,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {REGION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={REGION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -1098,8 +1097,7 @@ " ! bq rm -r -f $PROJECT_ID:$BQ_DATASET_NAME\n", "# delete the Cloud Storage bucket\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil -m rm -r $BUCKET_URI" - ] + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": { diff --git a/notebooks/community/migration/UJ13 Data Labeling task.ipynb b/notebooks/community/migration/UJ13 Data Labeling task.ipynb index 41121e917..6b30f26f7 100644 --- a/notebooks/community/migration/UJ13 Data Labeling task.ipynb +++ b/notebooks/community/migration/UJ13 Data Labeling task.ipynb @@ -325,8 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -345,8 +344,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -513,8 +511,7 @@ "IMPORT_FILE = \"gs://\" + BUCKET_NAME + \"/labeling.csv\"\n", "with tf.io.gfile.GFile(IMPORT_FILE, \"w\") as f:\n", " for lf in LABELING_FILES:\n", - " ! wget {lf} | gsutil cp {lf.split(\"/\")[-1]} gs://{BUCKET_NAME}\n", - " f.write(\"gs://\" + BUCKET_NAME + \"/\" + lf.split(\"/\")[-1] + \"\\n\")" + " ! wget {lf} | gcloud storage cp {lf.split(\"/\")[-1]} gs://{BUCKET_NAME}\n", " f.write(\"gs://\" + BUCKET_NAME + \"/\" + lf.split(\"/\")[-1] + \"\\n\")" ] }, { @@ -525,8 +522,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE" - ] + "! gcloud storage cat $IMPORT_FILE" ] }, { "cell_type": "markdown", @@ -1007,8 +1003,7 @@ "outputs": [], "source": [ "# create placeholder file for valid PDF file with instruction for data labeling\n", - "! echo \"this is instruction\" >> instruction.txt | gsutil cp instruction.txt gs://$BUCKET_NAME" - ] + "! echo \"this is instruction\" >> instruction.txt | gcloud storage cp instruction.txt gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -1382,8 +1377,7 @@ "\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" - ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/ml_ops/stage2/get_started_vertex_experiments.ipynb b/notebooks/community/ml_ops/stage2/get_started_vertex_experiments.ipynb index 826e28e1c..6f4eac3f9 100644 --- a/notebooks/community/ml_ops/stage2/get_started_vertex_experiments.ipynb +++ b/notebooks/community/ml_ops/stage2/get_started_vertex_experiments.ipynb @@ -472,8 +472,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -492,8 +491,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" - ] + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -1353,8 +1351,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer.tar.gz" ] }, { "cell_type": "markdown", @@ -1554,8 +1551,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -rf {BUCKET_URI}" - ] + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], "metadata": { diff --git a/notebooks/community/model_garden/model_garden_movinet_action_recognition.ipynb b/notebooks/community/model_garden/model_garden_movinet_action_recognition.ipynb index 6166766c7..ed4897897 100644 --- a/notebooks/community/model_garden/model_garden_movinet_action_recognition.ipynb +++ b/notebooks/community/model_garden/model_garden_movinet_action_recognition.ipynb @@ -166,11 +166,9 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gsutil mb -l {REGION} {BUCKET_URI}\n", - "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -194,8 +192,8 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", - "\n", + "# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"\n", @@ -431,8 +429,7 @@ " \"\"\"\n", " label_map_filename = os.path.basename(label_map_yaml_filepath)\n", " subprocess.check_output(\n", - " [\"gsutil\", \"cp\", label_map_yaml_filepath, label_map_filename],\n", - " stderr=subprocess.STDOUT,\n", + " [\"gcloud\", \"storage\", \"cp\", label_map_yaml_filepath, label_map_filename],\n", " stderr=subprocess.STDOUT,\n", " )\n", " with open(label_map_filename, \"rb\") as input_file:\n", " label_map = yaml.safe_load(input_file.read())[\"label_map\"]\n", @@ -469,8 +466,7 @@ " checkpoint_path = find_checkpoint_in_dir(checkpoint_name)\n", " checkpoint_path = os.path.relpath(checkpoint_path, checkpoint_name)\n", "\n", - " ! gsutil cp -r $checkpoint_name $CHECKPOINT_BUCKET/\n", - " checkpoint_uri = os.path.join(CHECKPOINT_BUCKET, checkpoint_name, checkpoint_path)\n", + " ! gcloud storage cp --recursive $checkpoint_name $CHECKPOINT_BUCKET/\n", " checkpoint_uri = os.path.join(CHECKPOINT_BUCKET, checkpoint_name, checkpoint_path)\n", " print(\"Checkpoint uploaded to\", checkpoint_uri)\n", " return checkpoint_uri\n", "\n", @@ -481,8 +477,7 @@ " destination = os.path.join(CONFIG_DIR, filename)\n", " print(\"Copy\", url, \"to\", destination)\n", " ! wget \"$url\" -O \"$filename\"\n", - " ! gsutil cp \"$filename\" \"$destination\"\n", - " return destination\n", + " ! gcloud storage cp \"$filename\" \"$destination\"\n", " return destination\n", "\n", "\n", "train_job_name = common_util.get_job_name_with_datetime(\n", @@ -625,8 +620,7 @@ " current_trial_best_ckpt_evaluation_filepath = os.path.join(\n", " current_trial_best_ckpt_dir, \"info.json\"\n", " )\n", - " ! gsutil cp $current_trial_best_ckpt_evaluation_filepath .\n", - " with open(\"info.json\", \"r\") as f:\n", + " ! gcloud storage cp $current_trial_best_ckpt_evaluation_filepath .\n", " with open(\"info.json\", \"r\") as f:\n", " eval_metric_results = json.load(f)\n", " current_performance = eval_metric_results[evaluation_metric]\n", " if current_performance > best_performance:\n", @@ -641,8 +635,7 @@ " \"\"\"Finds the best checkpoint path.\"\"\"\n", " try:\n", " checkpoint_files = (\n", - " subprocess.check_output([\"gsutil\", \"ls\", checkpoint_dir])\n", - " .decode(\"utf-8\")\n", + " subprocess.check_output([\"gcloud\", \"storage\", \"ls\", checkpoint_dir])\n", " .decode(\"utf-8\")\n", " .strip()\n", " )\n", " for file in checkpoint_files.splitlines():\n", @@ -864,8 +857,7 @@ "# The label map file was generated from the section above (`Prepare input data for training`).\n", "\n", "dir_name = os.path.basename(predict_destination_prefix)\n", - "! gsutil -m cp -R $predict_destination_prefix /tmp\n", - "\n", + "! gcloud storage cp --recursive $predict_destination_prefix /tmp\n", "\n", "local_path = os.path.join(\"/tmp\", dir_name)\n", "file_paths = []\n", "for root, _, files in os.walk(local_path):\n", @@ -919,8 +911,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/model_garden/model_garden_phi3_deployment.ipynb b/notebooks/community/model_garden/model_garden_phi3_deployment.ipynb index 4837ef93d..4767d00b3 100644 --- a/notebooks/community/model_garden/model_garden_phi3_deployment.ipynb +++ b/notebooks/community/model_garden/model_garden_phi3_deployment.ipynb @@ -157,11 +157,9 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gsutil mb -l {REGION} {BUCKET_URI}\n", - "else:\n", + " ! gcloud storage buckets create --location {REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -185,8 +183,8 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", - "\n", + "! # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage buckets add-iam-policy-binding and/or gcloud storage buckets remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"" @@ -890,8 +888,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/pipelines/google_cloud_pipeline_components_bqml_pipeline_anomaly_detection.ipynb b/notebooks/community/pipelines/google_cloud_pipeline_components_bqml_pipeline_anomaly_detection.ipynb index 438c3d2e4..c7765a0a8 100644 --- a/notebooks/community/pipelines/google_cloud_pipeline_components_bqml_pipeline_anomaly_detection.ipynb +++ b/notebooks/community/pipelines/google_cloud_pipeline_components_bqml_pipeline_anomaly_detection.ipynb @@ -373,8 +373,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI" - ] + "! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -641,14 +640,11 @@ " + \"/evaluation_metrics\"\n", " )\n", " if tf.io.gfile.exists(EXECUTE_OUTPUT):\n", - " ! gsutil cat $EXECUTE_OUTPUT\n", - " return EXECUTE_OUTPUT\n", + " ! gcloud storage cat $EXECUTE_OUTPUT\n", " return EXECUTE_OUTPUT\n", " elif tf.io.gfile.exists(GCP_RESOURCES):\n", - " ! gsutil cat $GCP_RESOURCES\n", - " return GCP_RESOURCES\n", + " ! gcloud storage cat $GCP_RESOURCES\n", " return GCP_RESOURCES\n", " elif tf.io.gfile.exists(EVAL_METRICS):\n", - " ! gsutil cat $EVAL_METRICS\n", - " return EVAL_METRICS\n", + " ! gcloud storage cat $EVAL_METRICS\n", " return EVAL_METRICS\n", "\n", " return None" ] @@ -1470,8 +1466,7 @@ "# delete bucket\n", "delete_bucket = False\n", "if os.getenv(\"IS_TESTING\") or delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI\n", - "\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "# Remove local resorces\n", "delete_local_resources = False\n", "if delete_local_resources:\n", diff --git a/notebooks/official/automl/automl_image_object_detection_export_edge.ipynb b/notebooks/official/automl/automl_image_object_detection_export_edge.ipynb index bfaad7c6b..c8de12888 100644 --- a/notebooks/official/automl/automl_image_object_detection_export_edge.ipynb +++ b/notebooks/official/automl/automl_image_object_detection_export_edge.ipynb @@ -284,8 +284,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION $BUCKET_URI" - ] + "! gcloud storage buckets create --location $LOCATION $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -381,8 +380,7 @@ "\n", "# Copy images using gsutil commands directly\n", "for src, dest in zip(df.iloc[:, 0], df[\"destination_path\"]):\n", - " ! gsutil -m cp {src} {dest}\n", - "\n", + " ! gcloud storage cp {src} {dest}\n", "\n", "print(f\"Files copied to {BUCKET_URI}\")" ] }, @@ -462,12 +460,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -675,10 +671,8 @@ }, "outputs": [], "source": [ - "! gsutil ls $model_package\n", - "# Download the model artifacts\n", - "! gsutil cp -r $model_package tflite\n", - "\n", + "! gcloud storage ls $model_package\n", "# Download the model artifacts\n", + "! gcloud storage cp --recursive $model_package tflite\n", "\n", "tflite_path = \"tflite/model.tflite\"" ] }, @@ -736,8 +730,7 @@ }, "outputs": [], "source": [ - "test_items = ! gsutil cat $IMPORT_FILE | head -n1\n", - "test_item = test_items[0].split(\",\")[0]\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "test_item = test_items[0].split(\",\")[0]\n", "\n", "with tf.io.gfile.GFile(test_item, \"rb\") as f:\n", " content = f.read()\n", @@ -824,8 +817,7 @@ "dag.delete()\n", "\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI" - ] + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": { diff --git a/notebooks/official/model_evaluation/automl_text_classification_model_evaluation.ipynb b/notebooks/official/model_evaluation/automl_text_classification_model_evaluation.ipynb index ff97de8e7..f2f866aad 100644 --- a/notebooks/official/model_evaluation/automl_text_classification_model_evaluation.ipynb +++ b/notebooks/official/model_evaluation/automl_text_classification_model_evaluation.ipynb @@ -363,8 +363,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {REGION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={REGION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -437,10 +436,10 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", - "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" - ] + "! # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", + # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. + ! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer ] }, { "cell_type": "markdown", @@ -1362,8 +1361,7 @@ "\n", "# delete the Cloud Storage bucket\n", "if delete_bucket and os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -r $BUCKET_URI" - ] + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": { diff --git a/notebooks/official/model_monitoring/batch_prediction_model_monitoring.ipynb b/notebooks/official/model_monitoring/batch_prediction_model_monitoring.ipynb index 09c4b154c..64cd1db69 100644 --- a/notebooks/official/model_monitoring/batch_prediction_model_monitoring.ipynb +++ b/notebooks/official/model_monitoring/batch_prediction_model_monitoring.ipynb @@ -359,8 +359,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location {LOCATION} --project {PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -592,8 +591,8 @@ "TRAINING_DATASET = f\"{INPUT_GS_PATH}/churn_bp_insample.csv\"\n", "TRAINING_DATASET_FORMAT = \"csv\"\n", "\n", - "! gsutil copy $PUBLIC_TRAINING_DATASET $TRAINING_DATASET" - ] + # TODO: Command "gsutil copy" not found in migration guide. Manual review required. + "! gsutil copy $PUBLIC_TRAINING_DATASET $TRAINING_DATASET" ] }, { "cell_type": "markdown", @@ -780,9 +779,7 @@ "PREDICTION_STATS_GCS_PATH = STATS_GCS_FOLDER + PREDICTION_STATS_SUBPATH\n", "print(\"Looking up statistics from: \" + PREDICTION_STATS_GCS_PATH)\n", "\n", - "! gsutil cp $TRAINING_STATS_GCS_PATH ./training_stats.pb\n", - "! gsutil cp $PREDICTION_STATS_GCS_PATH ./prediction_stats.pb\n", - "\n", + "! gcloud storage cp $TRAINING_STATS_GCS_PATH ./training_stats.pb\n", "! gcloud storage cp $PREDICTION_STATS_GCS_PATH ./prediction_stats.pb\n", "\n", "\n", "# util function to load stats binary file from GCS\n", "def load_stats_binary(input_path):\n", @@ -820,8 +817,7 @@ " STATS_GCS_FOLDER\n", " + \"stats_and_anomalies/anomalies/training_prediction_skew_anomalies\"\n", ")\n", - "! gsutil cat $SKEW_GS_PATH" - ] + "! gcloud storage cat $SKEW_GS_PATH" ] }, { "cell_type": "markdown", @@ -879,8 +875,7 @@ "# Delete Cloud Storage bucket\n", "delete_bucket = False\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI\n", - "\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "! rm -f ./training_stats.pb\n", "! rm -f ./prediction_stats.pb" ] diff --git a/notebooks/official/training/hyperparameter_tuning_xgboost.ipynb b/notebooks/official/training/hyperparameter_tuning_xgboost.ipynb index d11d272ca..8b918afaf 100644 --- a/notebooks/official/training/hyperparameter_tuning_xgboost.ipynb +++ b/notebooks/official/training/hyperparameter_tuning_xgboost.ipynb @@ -283,8 +283,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -532,9 +531,7 @@ "def get_data():\n", " logging.info(\"Downloading training data and labelsfrom: {}, {}\".format(args.dataset_data_url, args.dataset_labels_url))\n", " # gsutil outputs everything to stderr. Hence, the need to divert it to stdout.\n", - " subprocess.check_call(['gsutil', 'cp', args.dataset_data_url, 'data.csv'], stderr=sys.stdout)\n", - " subprocess.check_call(['gsutil', 'cp', args.dataset_labels_url, 'labels.csv'], stderr=sys.stdout)\n", - "\n", + " subprocess.check_call(['gcloud', 'storage', 'cp', args.dataset_data_url, 'data.csv'], stderr=sys.stdout)\n", " subprocess.check_call(['gcloud', 'storage', 'cp', args.dataset_labels_url, 'labels.csv'], stderr=sys.stdout)\n", "\n", "\n", " # Load data into pandas, then use `.values` to get NumPy arrays\n", " data = pd.read_csv('data.csv').values\n", @@ -619,8 +616,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_iris.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_iris.tar.gz" ] }, { "cell_type": "markdown", @@ -922,8 +918,7 @@ "# Fetch the best model\n", "BEST_MODEL_DIR = MODEL_DIR + \"/\" + best[0] + \"/model\"\n", "\n", - "! gsutil ls {BEST_MODEL_DIR}" - ] + "! gcloud storage ls {BEST_MODEL_DIR}" ] }, { "cell_type": "markdown", @@ -958,8 +953,7 @@ "delete_bucket = False # Set True to delete the bucket\n", "\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI\n", - "\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "# Delete the locally generated files\n", "! rm -rf custom/\n", "! rm custom.tar.gz"