diff --git a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_online.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_online.ipynb index 05a8b8425..5acdf4bf2 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_online.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_online.ipynb @@ -722,12 +722,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1345,8 +1343,7 @@ }, "outputs": [], "source": [ - "test_items = !gsutil cat $IMPORT_FILE | head -n1\n", - "cols = str(test_items[0]).split(\",\")\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n1\n", "cols = str(test_items[0]).split(\",\")\n", "if len(cols) == 11:\n", " test_item = str(cols[1])\n", " test_label = str(cols[2])\n", @@ -1574,8 +1571,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_tabular_regression_online_bq.ipynb b/notebooks/community/gapic/automl/showcase_automl_tabular_regression_online_bq.ipynb index 3f88bc9f0..46c7e3f00 100644 --- a/notebooks/community/gapic/automl/showcase_automl_tabular_regression_online_bq.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_tabular_regression_online_bq.ipynb @@ -1614,8 +1614,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/custom/showcase_local_tabular_regression_online.ipynb b/notebooks/community/gapic/custom/showcase_local_tabular_regression_online.ipynb index 7851bef83..7e011cec3 100644 --- a/notebooks/community/gapic/custom/showcase_local_tabular_regression_online.ipynb +++ b/notebooks/community/gapic/custom/showcase_local_tabular_regression_online.ipynb @@ -423,8 +423,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -443,8 +442,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -1639,8 +1637,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/custom/showcase_local_text_binary_classification_online.ipynb b/notebooks/community/gapic/custom/showcase_local_text_binary_classification_online.ipynb index 12c88e50a..4c74027a7 100644 --- a/notebooks/community/gapic/custom/showcase_local_text_binary_classification_online.ipynb +++ b/notebooks/community/gapic/custom/showcase_local_text_binary_classification_online.ipynb @@ -423,8 +423,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -443,8 +442,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -1616,8 +1614,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/ml_ops/stage6/get_started_with_automl_tabular_model_batch.ipynb b/notebooks/community/ml_ops/stage6/get_started_with_automl_tabular_model_batch.ipynb index 7b66d5240..9b1f1c8de 100644 --- a/notebooks/community/ml_ops/stage6/get_started_with_automl_tabular_model_batch.ipynb +++ b/notebooks/community/ml_ops/stage6/get_started_with_automl_tabular_model_batch.ipynb @@ -487,8 +487,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -507,8 +506,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" - ] + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -634,14 +632,11 @@ }, "outputs": [], "source": [ - "count = ! gsutil cat $IMPORT_FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $IMPORT_FILE | head\n", - "\n", - "heading = ! gsutil cat $IMPORT_FILE | head -n1\n", - "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", + "! gcloud storage cat $IMPORT_FILE | head\n", "\n", + "heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", "print(\"Label Column Name\", label_column)\n", "if label_column is None:\n", " raise Exception(\"label column missing\")" @@ -913,15 +908,12 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 1 > tmp.csv\n", - "! gsutil cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", - "\n", + "! gcloud storage cat $IMPORT_FILE | head -n 1 > tmp.csv\n", "! gcloud storage cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", "\n", "! cut -d, -f1-16 tmp.csv > batch.csv\n", "\n", "gcs_input_uri = BUCKET_URI + \"/test.csv\"\n", "\n", - "! gsutil cp batch.csv $gcs_input_uri" - ] + "! gcloud storage cp batch.csv $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1552,8 +1544,7 @@ "outputs": [], "source": [ "# Create test data w/o the label\n", - "output = ! gsutil cat $IMPORT_FILE | head -n 10\n", - "rows = []\n", + "output = ! gcloud storage cat $IMPORT_FILE | head -n 10\n", "rows = []\n", "for i in range(0, 10):\n", " rows.append(output[i][:-2])\n", "\n", @@ -1831,8 +1822,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -rf {BUCKET_URI}" - ] + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], "metadata": { diff --git a/notebooks/community/model_garden/model_garden_axolotl_gemma3_finetuning.ipynb b/notebooks/community/model_garden/model_garden_axolotl_gemma3_finetuning.ipynb index a2d935f67..1d045fbb5 100644 --- a/notebooks/community/model_garden/model_garden_axolotl_gemma3_finetuning.ipynb +++ b/notebooks/community/model_garden/model_garden_axolotl_gemma3_finetuning.ipynb @@ -346,11 +346,10 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gsutil mb -l {REGION} {BUCKET_URI}\n", - "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + # Note: The format of the full listing output is different. gcloud storage uses a title case for keys and will not display a field if its value is "None". + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -374,7 +373,8 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", + # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", @@ -716,8 +716,7 @@ "! accelerate launch -m axolotl.cli.train $axolotl_args $local_config_path\n", "\n", "# @markdown 4. Check the output in the bucket.\n", - "! gsutil ls $AXOLOTL_OUTPUT_GCS_URI" - ] + "! gcloud storage ls $AXOLOTL_OUTPUT_GCS_URI" ] }, { "cell_type": "code", @@ -855,8 +854,7 @@ "vertex_ai_config_path = AXOLOTL_CONFIG_PATH\n", "# Copy the config file to the bucket.\n", "if AXOLOTL_SOURCE == \"LOCAL\":\n", - " ! gsutil -m cp $AXOLOTL_CONFIG_PATH $MODEL_BUCKET/config/\n", - " vertex_ai_config_path = f\"{common_util.gcs_fuse_path(MODEL_BUCKET)}/config/{pathlib.Path(AXOLOTL_CONFIG_PATH).name}\"\n", + " ! gcloud storage cp $AXOLOTL_CONFIG_PATH $MODEL_BUCKET/config/\n", " vertex_ai_config_path = f\"{common_util.gcs_fuse_path(MODEL_BUCKET)}/config/{pathlib.Path(AXOLOTL_CONFIG_PATH).name}\"\n", "\n", "job_name = common_util.get_job_name_with_datetime(\"axolotl-train\")\n", "AXOLOTL_OUTPUT_GCS_URI = f\"{BASE_AXOLOTL_OUTPUT_GCS_URI}/{job_name}\"\n", @@ -1312,8 +1310,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/official/migration/sdk-custom-image-classification-custom-container.ipynb b/notebooks/official/migration/sdk-custom-image-classification-custom-container.ipynb index 080d2a9eb..67e3cd24d 100644 --- a/notebooks/official/migration/sdk-custom-image-classification-custom-container.ipynb +++ b/notebooks/official/migration/sdk-custom-image-classification-custom-container.ipynb @@ -331,8 +331,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION -p $PROJECT_ID $BUCKET_URI" - ] + "! gcloud storage buckets create --location $LOCATION --project $PROJECT_ID $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -892,8 +891,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_cifar10.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_cifar10.tar.gz" ] }, { "cell_type": "markdown", @@ -1300,9 +1298,7 @@ }, "outputs": [], "source": [ - "! gsutil cp tmp1.jpg $BUCKET_URI/tmp1.jpg\n", - "! gsutil cp tmp2.jpg $BUCKET_URI/tmp2.jpg\n", - "\n", + "! gcloud storage cp tmp1.jpg $BUCKET_URI/tmp1.jpg\n", "! gcloud storage cp tmp2.jpg $BUCKET_URI/tmp2.jpg\n", "\n", "test_item_1 = BUCKET_URI + \"/tmp1.jpg\"\n", "test_item_2 = BUCKET_URI + \"/tmp2.jpg\"" ] @@ -1695,8 +1691,7 @@ "# Delete Cloud Storage objects that were created\n", "delete_bucket = False # Set True for deletion\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI\n", - "\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "# remove the local users and movies avro files\n", "! rm -rf custom\n", "! rm -f custom.tar.gz\n", diff --git a/notebooks/official/migration/sdk-custom-image-classification-prebuilt-container.ipynb b/notebooks/official/migration/sdk-custom-image-classification-prebuilt-container.ipynb index 67cef577b..b5321076a 100644 --- a/notebooks/official/migration/sdk-custom-image-classification-prebuilt-container.ipynb +++ b/notebooks/official/migration/sdk-custom-image-classification-prebuilt-container.ipynb @@ -297,8 +297,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -678,8 +677,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_cifar10.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_cifar10.tar.gz" ] }, { "cell_type": "markdown", @@ -1145,9 +1143,7 @@ }, "outputs": [], "source": [ - "! gsutil cp tmp1.jpg $BUCKET_URI/tmp1.jpg\n", - "! gsutil cp tmp2.jpg $BUCKET_URI/tmp2.jpg\n", - "\n", + "! gcloud storage cp tmp1.jpg $BUCKET_URI/tmp1.jpg\n", "! gcloud storage cp tmp2.jpg $BUCKET_URI/tmp2.jpg\n", "\n", "test_item_1 = BUCKET_URI + \"/tmp1.jpg\"\n", "test_item_2 = BUCKET_URI + \"/tmp2.jpg\"" ] @@ -1637,8 +1633,7 @@ "# delete cloud storage bucket\n", "delete_bucket = False # set True for deletion\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI" - ] + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": { diff --git a/notebooks/official/pipelines/rapid_prototyping_bqml_automl.ipynb b/notebooks/official/pipelines/rapid_prototyping_bqml_automl.ipynb index 05dc087fb..c866ba12c 100644 --- a/notebooks/official/pipelines/rapid_prototyping_bqml_automl.ipynb +++ b/notebooks/official/pipelines/rapid_prototyping_bqml_automl.ipynb @@ -394,8 +394,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location {LOCATION} --project {PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -467,10 +466,11 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", - "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" - ] + "# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "# Note: gsutil iam ch does not support modifying IAM policies that contain conditions. gcloud storage commands do support conditions.\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", + # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. + ! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer ] }, { "cell_type": "markdown", @@ -602,7 +602,7 @@ }, "outputs": [], "source": [ - "! gsutil cp gs://cloud-samples-data/vertex-ai/community-content/datasets/abalone/abalone.data {RAW_INPUT_DATA}" + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/community-content/datasets/abalone/abalone.data {RAW_INPUT_DATA}" ] }, { @@ -1611,8 +1611,7 @@ "# Delete Cloud Storage bucket\n", "delete_bucket = True\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI\n", - "\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "# Delete the pipeline package file\n", "! rm PIPELINE_YAML_PKG_PATH" ] diff --git a/notebooks/official/workbench/demand_forecasting/forecasting-retail-demand.ipynb b/notebooks/official/workbench/demand_forecasting/forecasting-retail-demand.ipynb index e19b593c6..a84984fe2 100644 --- a/notebooks/official/workbench/demand_forecasting/forecasting-retail-demand.ipynb +++ b/notebooks/official/workbench/demand_forecasting/forecasting-retail-demand.ipynb @@ -275,8 +275,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION $BUCKET_URI" - ] + "! gcloud storage buckets create --location=$LOCATION $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -2237,8 +2236,7 @@ "# Delete Cloud Storage objects that were created\n", "delete_bucket = True # Set True for deletion\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" - ] + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": {