diff --git a/notebooks/community/gapic/automl/showcase_automl_tabular_binary_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_tabular_binary_classification_batch.ipynb index 0916cfe2b..c1036733e 100644 --- a/notebooks/community/gapic/automl/showcase_automl_tabular_binary_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_tabular_binary_classification_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -809,14 +807,11 @@ }, "outputs": [], "source": [ - "count = ! gsutil cat $IMPORT_FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $IMPORT_FILE | head\n", - "\n", - "heading = ! gsutil cat $IMPORT_FILE | head -n1\n", - "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", + "! gcloud storage cat $IMPORT_FILE | head\n", "\n", + "heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", "print(\"Label Column Name\", label_column)\n", "if label_column is None:\n", " raise Exception(\"label column missing\")" @@ -1377,8 +1372,7 @@ " f.write(str(INSTANCE_2) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1646,8 +1640,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1664,10 +1657,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.csv\n", - "\n", - " ! gsutil cat $folder/prediction*.csv\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.csv\n", "\n", + " ! gcloud storage cat $folder/prediction*.csv\n", " break\n", " time.sleep(60)" ] }, @@ -1761,8 +1752,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_batch_explain.ipynb b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_batch_explain.ipynb index c82f245d7..9e7311729 100644 --- a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_batch_explain.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_batch_explain.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -810,14 +808,11 @@ }, "outputs": [], "source": [ - "count = ! gsutil cat $IMPORT_FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $IMPORT_FILE | head\n", - "\n", - "heading = ! gsutil cat $IMPORT_FILE | head -n1\n", - "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", + "! gcloud storage cat $IMPORT_FILE | head\n", "\n", + "heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", "print(\"Label Column Name\", label_column)\n", "if label_column is None:\n", " raise Exception(\"label column missing\")" @@ -1363,8 +1358,7 @@ " f.write(str(INSTANCE_2) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1631,8 +1625,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1649,10 +1642,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/explanation*.csv\n", - "\n", - " ! gsutil cat $folder/explanation*.csv\n", - " break\n", + " ! gcloud storage ls $folder/explanation*.csv\n", "\n", + " ! gcloud storage cat $folder/explanation*.csv\n", " break\n", " time.sleep(60)" ] }, @@ -1746,8 +1737,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_video_action_recognition_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_video_action_recognition_batch.ipynb index 105558f05..95351fd79 100644 --- a/notebooks/community/gapic/automl/showcase_automl_video_action_recognition_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_video_action_recognition_batch.ipynb @@ -420,8 +420,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -440,8 +439,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -888,12 +886,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1310,8 +1306,7 @@ "import json\n", "\n", "import_file = IMPORT_FILES[0]\n", - "test_items = ! gsutil cat $import_file | head -n2\n", - "\n", + "test_items = ! gcloud storage cat $import_file | head -n2\n", "\n", "cols = str(test_items[0]).split(',')\n", "test_item_1 = str(cols[0])\n", "test_label_1 = str(cols[-1])\n", @@ -1360,8 +1355,7 @@ " f.write(json.dumps(data) + '\\n')\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1637,8 +1631,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " ''' Get the latest prediction subfolder using the timestamp in the subfolder name'''\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split('/')[-2]\n", " if subfolder.startswith('prediction-'):\n", @@ -1655,10 +1648,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1752,8 +1743,7 @@ " print(e)\n", "\n", "if delete_bucket and 'BUCKET_NAME' in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_video_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_video_classification_batch.ipynb index 99cf3a921..b7f8996de 100644 --- a/notebooks/community/gapic/automl/showcase_automl_video_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_video_classification_batch.ipynb @@ -420,8 +420,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -440,8 +439,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -885,12 +883,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1301,8 +1297,7 @@ }, "outputs": [], "source": [ - "test_items = ! gsutil cat $IMPORT_FILE | head -n2\n", - "\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n2\n", "\n", "if len(test_items[0]) == 5:\n", " _, test_item_1, test_label_1, _, _ = str(test_items[0]).split(',')\n", " _, test_item_2, test_label_2, _, _ = str(test_items[1]).split(',')\n", @@ -1351,8 +1346,7 @@ " f.write(json.dumps(data) + '\\n')\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1630,8 +1624,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " ''' Get the latest prediction subfolder using the timestamp in the subfolder name'''\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split('/')[-2]\n", " if subfolder.startswith('prediction-'):\n", @@ -1648,10 +1641,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1745,8 +1736,7 @@ " print(e)\n", "\n", "if delete_bucket and 'BUCKET_NAME' in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_video_object_tracking_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_video_object_tracking_batch.ipynb index b357ff76e..929d189d9 100644 --- a/notebooks/community/gapic/automl/showcase_automl_video_object_tracking_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_video_object_tracking_batch.ipynb @@ -420,8 +420,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -440,8 +439,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -890,12 +888,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1309,8 +1305,7 @@ }, "outputs": [], "source": [ - "test_items = ! gsutil cat $IMPORT_FILE | head -n2\n", - "\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n2\n", "\n", "cols_1 = test_items[0].split(',')\n", "cols_2 = test_items[1].split(',')\n", "if len(cols_1) > 12:\n", @@ -1365,8 +1360,7 @@ " f.write(json.dumps(data) + '\\n')\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1638,8 +1632,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " ''' Get the latest prediction subfolder using the timestamp in the subfolder name'''\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split('/')[-2]\n", " if subfolder.startswith('prediction-'):\n", @@ -1656,10 +1649,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1753,8 +1744,7 @@ " print(e)\n", "\n", "if delete_bucket and 'BUCKET_NAME' in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/migration/UJ2,12 legacy Custom Training Prebuilt Container TF Keras.ipynb b/notebooks/community/migration/UJ2,12 legacy Custom Training Prebuilt Container TF Keras.ipynb index c1d395020..936430514 100644 --- a/notebooks/community/migration/UJ2,12 legacy Custom Training Prebuilt Container TF Keras.ipynb +++ b/notebooks/community/migration/UJ2,12 legacy Custom Training Prebuilt Container TF Keras.ipynb @@ -308,8 +308,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -328,8 +327,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -586,8 +584,7 @@ "! rm -f cifar.tar cifar.tar.gz\n", "! tar cvf cifar.tar cifar\n", "! gzip cifar.tar\n", - "! gsutil cp cifar.tar.gz gs://$BUCKET_NAME/trainer_cifar.tar.gz" - ] + "! gcloud storage cp cifar.tar.gz gs://$BUCKET_NAME/trainer_cifar.tar.gz" ] }, { "cell_type": "markdown", @@ -1071,8 +1068,7 @@ " b64str = base64.b64encode(bytes.numpy()).decode(\"utf-8\")\n", " f.write(json.dumps({\"key\": img, input_name: {\"b64\": b64str}}) + \"\\n\")\n", "\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1361,10 +1357,8 @@ " break\n", " else:\n", " folder = response[\"predictionInput\"][\"outputPath\"][:-1]\n", - " ! gsutil ls $folder/prediction*\n", - "\n", - " ! gsutil cat $folder/prediction*\n", - " break\n", + " ! gcloud storage ls $folder/prediction*\n", "\n", + " ! gcloud storage cat $folder/prediction*\n", " break\n", " time.sleep(60)" ] }, @@ -2046,8 +2040,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" - ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/ml_ops/stage7/get_started_with_model_monitoring_custom_tf_serving.ipynb b/notebooks/community/ml_ops/stage7/get_started_with_model_monitoring_custom_tf_serving.ipynb index c9e6679ac..182305ba9 100644 --- a/notebooks/community/ml_ops/stage7/get_started_with_model_monitoring_custom_tf_serving.ipynb +++ b/notebooks/community/ml_ops/stage7/get_started_with_model_monitoring_custom_tf_serving.ipynb @@ -561,8 +561,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -581,8 +580,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" - ] + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -976,8 +974,7 @@ "MODEL_ARTIFACT_URI = \"gs://mco-mm/churn\"\n", "MODEL_DIR = BUCKET_URI + \"/model/1\"\n", "\n", - "! gsutil cp -r $MODEL_ARTIFACT_URI $MODEL_DIR" - ] + "! gcloud storage cp --recursive $MODEL_ARTIFACT_URI $MODEL_DIR" ] }, { "cell_type": "markdown", @@ -1361,8 +1358,7 @@ "with open(\"schema.yaml\", \"w\") as f:\n", " f.write(yaml)\n", "\n", - "! gsutil cp schema.yaml {BUCKET_URI}/schema.yaml" - ] + "! gcloud storage cp schema.yaml {BUCKET_URI}/schema.yaml" ] }, { "cell_type": "markdown", @@ -1849,8 +1845,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -rf {BUCKET_URI}\n", - "\n", + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}\n", "\n", "! rm -f schema.yaml\n", "\n", "! bq rm -f {PROJECT_ID}.model_deployment_monitoring_{ENDPOINT_ID}" diff --git a/notebooks/community/model_garden/model_garden_axolotl_qwen3_finetuning.ipynb b/notebooks/community/model_garden/model_garden_axolotl_qwen3_finetuning.ipynb index 1ba1d9fb1..5c257c073 100644 --- a/notebooks/community/model_garden/model_garden_axolotl_qwen3_finetuning.ipynb +++ b/notebooks/community/model_garden/model_garden_axolotl_qwen3_finetuning.ipynb @@ -343,11 +343,10 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gsutil mb -l {REGION} {BUCKET_URI}\n", - "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + " # Note: The format of the full listing output is different. gcloud storage uses a title case for keys and will not display a field if its value is \"None\".\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -371,8 +370,8 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", - "\n", + "# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"" @@ -713,8 +712,7 @@ "! accelerate launch -m axolotl.cli.train $axolotl_args $local_config_path\n", "\n", "# @markdown 4. Check the output in the bucket.\n", - "! gsutil ls $AXOLOTL_OUTPUT_GCS_URI" - ] + "! gcloud storage ls $AXOLOTL_OUTPUT_GCS_URI" ] }, { "cell_type": "code", @@ -852,8 +850,7 @@ "vertex_ai_config_path = AXOLOTL_CONFIG_PATH\n", "# Copy the config file to the bucket.\n", "if AXOLOTL_SOURCE == \"LOCAL\":\n", - " ! gsutil -m cp $AXOLOTL_CONFIG_PATH $MODEL_BUCKET/config/\n", - " vertex_ai_config_path = f\"{common_util.gcs_fuse_path(MODEL_BUCKET)}/config/{pathlib.Path(AXOLOTL_CONFIG_PATH).name}\"\n", + " ! gcloud storage cp $AXOLOTL_CONFIG_PATH $MODEL_BUCKET/config/\n", " vertex_ai_config_path = f\"{common_util.gcs_fuse_path(MODEL_BUCKET)}/config/{pathlib.Path(AXOLOTL_CONFIG_PATH).name}\"\n", "\n", "job_name = common_util.get_job_name_with_datetime(\"axolotl-train\")\n", "AXOLOTL_OUTPUT_GCS_URI = f\"{BASE_AXOLOTL_OUTPUT_GCS_URI}/{job_name}\"\n", @@ -1381,8 +1378,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/vertex_endpoints/optimized_tensorflow_runtime/bert_optimized_online_prediction.ipynb b/notebooks/community/vertex_endpoints/optimized_tensorflow_runtime/bert_optimized_online_prediction.ipynb index 336add9c1..67599a179 100644 --- a/notebooks/community/vertex_endpoints/optimized_tensorflow_runtime/bert_optimized_online_prediction.ipynb +++ b/notebooks/community/vertex_endpoints/optimized_tensorflow_runtime/bert_optimized_online_prediction.ipynb @@ -493,8 +493,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI" - ] + "! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -513,8 +512,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" - ] + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -1148,8 +1146,7 @@ }, "outputs": [], "source": [ - "!gsutil rm -r $BUCKET_URI/*" - ] + "!gcloud storage rm --recursive $BUCKET_URI/*" ] }, { "cell_type": "code", @@ -1159,8 +1156,7 @@ }, "outputs": [], "source": [ - "!gsutil cp -r $LOCAL_DIRECTORY_FULL/* $BUCKET_URI" - ] + "!gcloud storage cp --recursive $LOCAL_DIRECTORY_FULL/* $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -2104,8 +2100,7 @@ "# Set this to true only if you'd like to delete your bucket\n", "delete_bucket = False\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " !gsutil rm -r $BUCKET_URI" - ] + " !gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": { diff --git a/notebooks/official/training/sdk_pytorch_torchrun_custom_container_training_imagenet.ipynb b/notebooks/official/training/sdk_pytorch_torchrun_custom_container_training_imagenet.ipynb index 72a290e5f..88134e8ae 100644 --- a/notebooks/official/training/sdk_pytorch_torchrun_custom_container_training_imagenet.ipynb +++ b/notebooks/official/training/sdk_pytorch_torchrun_custom_container_training_imagenet.ipynb @@ -280,8 +280,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -690,14 +689,12 @@ " HOST_IP=$(hostname -i)\n", " echo \"HOST_IP=\"$HOST_IP\n", " echo \"Writing host IP address to \"$gcsfilepath\n", - " echo $HOST_IP| gsutil cp - $gcsfilepath\n", - " setup_etcd $HOST_IP\n", + " echo $HOST_IP| gcloud storage cp - $gcsfilepath\n", " setup_etcd $HOST_IP\n", "else\n", " echo \"Wait 60s for the host server to come online\"\n", " sleep 60\n", " echo \"reading host IP address from \"$gcsfilepath\n", - " HOST_IP=$(gsutil cat $gcsfilepath)\n", - " echo \"HOST_IP=\"$HOST_IP\n", + " HOST_IP=$(gcloud storage cat $gcsfilepath)\n", " echo \"HOST_IP=\"$HOST_IP\n", "fi\n", "\n", "env=\"env://\"\n", @@ -1812,8 +1809,7 @@ "# Delete Cloud Storage objects that were created\n", "delete_bucket = False\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" - ] + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": {