diff --git a/notebooks/community/feature_store/get_started_vertex_feature_store.ipynb b/notebooks/community/feature_store/get_started_vertex_feature_store.ipynb index 18ebc9922..d6f0e33c8 100644 --- a/notebooks/community/feature_store/get_started_vertex_feature_store.ipynb +++ b/notebooks/community/feature_store/get_started_vertex_feature_store.ipynb @@ -471,8 +471,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -491,8 +490,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" - ] + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -511,8 +509,7 @@ }, "outputs": [], "source": [ - "! gsutil uniformbucketlevelaccess set on {BUCKET_URI}" - ] + "! gcloud storage buckets update --uniform-bucket-level-access {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -1910,9 +1907,7 @@ }, "outputs": [], "source": [ - "! gsutil -m rm -r $BUCKET_URI\n", - "! gsutil rb $BUCKET_URI" - ] + "! gcloud storage rm --recursive $BUCKET_URI\n", "! gcloud storage buckets delete $BUCKET_URI" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_image_segmentation_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_segmentation_batch.ipynb index c1c4d1085..8f04035a9 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_segmentation_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_segmentation_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -891,12 +889,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1325,8 +1321,7 @@ "source": [ "import json\n", "\n", - "test_items = !gsutil cat $IMPORT_FILE | head -n2\n", - "test_data_1 = test_items[0].replace(\"'\", '\"')\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "test_data_1 = test_items[0].replace(\"'\", '\"')\n", "test_data_1 = json.loads(test_data_1)\n", "test_data_2 = test_items[0].replace(\"'\", '\"')\n", "test_data_2 = json.loads(test_data_2)\n", @@ -1367,9 +1362,8 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 $BUCKET_NAME/$file_1\n", - "! gsutil cp $test_item_2 $BUCKET_NAME/$file_2\n", - "\n", + "! gcloud storage cp $test_item_1 $BUCKET_NAME/$file_1\n", + "! gcloud storage cp $test_item_2 $BUCKET_NAME/$file_2\n", "\n", "test_item_1 = BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = BUCKET_NAME + \"/\" + file_2" ] @@ -1412,8 +1406,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1681,8 +1674,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1699,10 +1691,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1796,8 +1786,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_tabular_forecasting_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_tabular_forecasting_batch.ipynb index 9f564f732..4f80389b8 100644 --- a/notebooks/community/gapic/automl/showcase_automl_tabular_forecasting_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_tabular_forecasting_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -779,14 +777,11 @@ }, "outputs": [], "source": [ - "count = ! gsutil cat $IMPORT_FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $IMPORT_FILE | head\n", - "\n", - "heading = ! gsutil cat $IMPORT_FILE | head -n1\n", - "label_column = \"deaths\" # @param {type:\"string\"}\n", + "! gcloud storage cat $IMPORT_FILE | head\n", "\n", + "heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = \"deaths\" # @param {type:\"string\"}\n", "time_column = \"date\" # @param {type:\"string\"}\n", "time_series_identifier_column = \"county\" # @param {type:\"string\"}\n", "print(\"Label Column Name\", label_column)\n", @@ -1358,8 +1353,7 @@ " f.write(str(INSTANCE_2) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1627,8 +1621,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1645,10 +1638,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.csv\n", - "\n", - " ! gsutil cat $folder/prediction*.csv\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.csv\n", "\n", + " ! gcloud storage cat $folder/prediction*.csv\n", " break\n", " time.sleep(60)" ] }, @@ -1742,8 +1733,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/custom/showcase_local_image_classification_online.ipynb b/notebooks/community/gapic/custom/showcase_local_image_classification_online.ipynb index 1eca1035c..65656203a 100644 --- a/notebooks/community/gapic/custom/showcase_local_image_classification_online.ipynb +++ b/notebooks/community/gapic/custom/showcase_local_image_classification_online.ipynb @@ -423,8 +423,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -443,8 +442,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -1718,8 +1716,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/ml_ops/stage2/get_started_vertex_distributed_training.ipynb b/notebooks/community/ml_ops/stage2/get_started_vertex_distributed_training.ipynb index 08eae0b39..5739d2f26 100644 --- a/notebooks/community/ml_ops/stage2/get_started_vertex_distributed_training.ipynb +++ b/notebooks/community/ml_ops/stage2/get_started_vertex_distributed_training.ipynb @@ -476,8 +476,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -496,8 +495,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" - ] + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -1006,8 +1004,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_boston.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_boston.tar.gz" ] }, { "cell_type": "markdown", @@ -1977,8 +1974,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -r $BUCKET_URI" - ] + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": { diff --git a/notebooks/community/ml_ops/stage2/get_started_vertex_training.ipynb b/notebooks/community/ml_ops/stage2/get_started_vertex_training.ipynb index 7431c813a..f79cf946f 100644 --- a/notebooks/community/ml_ops/stage2/get_started_vertex_training.ipynb +++ b/notebooks/community/ml_ops/stage2/get_started_vertex_training.ipynb @@ -462,8 +462,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -482,8 +481,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" - ] + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -819,8 +817,7 @@ "\n", "job.run(args=CMDARGS, replica_count=1, machine_type=TRAIN_COMPUTE, sync=True)\n", "\n", - "! gsutil cat {BUCKET_URI}/test.txt" - ] + "! gcloud storage cat {BUCKET_URI}/test.txt" ] }, { "cell_type": "markdown", @@ -1021,8 +1018,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_boston.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_boston.tar.gz" ] }, { "cell_type": "markdown", @@ -1047,8 +1043,7 @@ "\n", "job.run(args=CMDARGS, replica_count=1, machine_type=TRAIN_COMPUTE, sync=True)\n", "\n", - "! gsutil cat {BUCKET_URI}/test.txt" - ] + "! gcloud storage cat {BUCKET_URI}/test.txt" ] }, { "cell_type": "markdown", @@ -1418,8 +1413,7 @@ "\n", "job.run(args=CMDARGS, replica_count=1, machine_type=TRAIN_COMPUTE, sync=True)\n", "\n", - "! gsutil cat {BUCKET_URI}/test.txt" - ] + "! gcloud storage cat {BUCKET_URI}/test.txt" ] }, { "cell_type": "markdown", @@ -1623,8 +1617,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_boston.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_boston.tar.gz" ] }, { "cell_type": "markdown", @@ -1943,8 +1936,7 @@ " print(e)\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -rf {BUCKET_URI}" - ] + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], "metadata": { diff --git a/notebooks/community/ml_ops/stage6/get_started_with_matching_engine_swivel.ipynb b/notebooks/community/ml_ops/stage6/get_started_with_matching_engine_swivel.ipynb index 53619bf9b..e614f221a 100644 --- a/notebooks/community/ml_ops/stage6/get_started_with_matching_engine_swivel.ipynb +++ b/notebooks/community/ml_ops/stage6/get_started_with_matching_engine_swivel.ipynb @@ -548,8 +548,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -568,8 +567,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" - ] + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -638,10 +636,11 @@ }, "outputs": [], "source": [ - "!gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_NAME\n", - "\n", - "!gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_NAME" - ] + "# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "!gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", +# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. +# Note: gsutil iam ch does not support modifying IAM policies that contain conditions. gcloud storage commands do support conditions. + "!gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ] }, { "cell_type": "markdown", @@ -769,8 +768,7 @@ }, "outputs": [], "source": [ - "! gsutil cp gs://cloud-samples-data/vertex-ai/matching-engine/swivel/pipeline/* ." - ] + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/matching-engine/swivel/pipeline/* ." ] }, { "cell_type": "markdown", @@ -901,8 +899,7 @@ "outputs": [], "source": [ "# Copy the MovieLens sample dataset\n", - "! gsutil cp -r gs://cloud-samples-data/vertex-ai/matching-engine/swivel/movielens_25m/train/* {SOURCE_DATA_PATH}/movielens_25m" - ] + "! gcloud storage cp --recursive gs://cloud-samples-data/vertex-ai/matching-engine/swivel/movielens_25m/train/* {SOURCE_DATA_PATH}/movielens_25m" ] }, { "cell_type": "markdown", @@ -972,8 +969,7 @@ }, "outputs": [], "source": [ - "! gsutil -m cp -r gs://cloud-samples-data/vertex-ai/matching-engine/swivel/models/movielens/model {SOURCE_DATA_PATH}/movielens_model" - ] + "! gcloud storage cp --recursive gs://cloud-samples-data/vertex-ai/matching-engine/swivel/models/movielens/model {SOURCE_DATA_PATH}/movielens_model" ] }, { "cell_type": "code", @@ -1172,8 +1168,7 @@ }, "outputs": [], "source": [ - "! gsutil cp gs://cloud-samples-data/vertex-ai/matching-engine/swivel/movielens_25m/movies.csv ./movies.csv\n", - "\n", + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/matching-engine/swivel/movielens_25m/movies.csv ./movies.csv\n", "\n", "movies = pd.read_csv(\"movies.csv\")\n", "print(f\"Movie count: {len(movies.index)}\")\n", "movies.head()" @@ -1361,8 +1356,7 @@ " f.write(json.dumps(query) + \"\\n\")\n", "\n", "print(\"\\nNumber of embeddings: \")\n", - "! gsutil cat {QUERY_EMBEDDING_PATH} | wc -l" - ] + "! gcloud storage cat {QUERY_EMBEDDING_PATH} | wc -l" ] }, { "cell_type": "code", @@ -1372,8 +1366,7 @@ }, "outputs": [], "source": [ - "! gsutil cat {QUERY_EMBEDDING_PATH} | head" - ] +"! gcloud storage cat {QUERY_EMBEDDING_PATH} | head" ] }, { "cell_type": "markdown", @@ -1535,8 +1528,7 @@ "outputs": [], "source": [ "EMBEDDINGS_URI = f\"{BUCKET_URI}/embeddings/swivel/\"\n", - "! gsutil cp embeddings.json {EMBEDDINGS_URI}" - ] + "! gcloud storage cp embeddings.json {EMBEDDINGS_URI}" ] }, { "cell_type": "markdown", @@ -1894,8 +1886,7 @@ "# Delete Cloud Storage objects that were created\n", "delete_bucket = False\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil -m rm -r $OUTPUT_DIR" - ] + " ! gcloud storage rm --recursive $OUTPUT_DIR" ] } ], "metadata": { diff --git a/notebooks/community/model_registry/vertex_ai_model_registry_automl_model_versioning.ipynb b/notebooks/community/model_registry/vertex_ai_model_registry_automl_model_versioning.ipynb index 121c81c4f..3a0862003 100644 --- a/notebooks/community/model_registry/vertex_ai_model_registry_automl_model_versioning.ipynb +++ b/notebooks/community/model_registry/vertex_ai_model_registry_automl_model_versioning.ipynb @@ -527,8 +527,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI" - ] + "! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -547,8 +546,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" - ] + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -617,9 +615,8 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" - ] + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ] }, { "cell_type": "markdown", @@ -1133,8 +1130,7 @@ }, "outputs": [], "source": [ - "! gsutil cp $PREPARED_FILE_PATH $PREPARED_FILE_URI" - ] + "! gcloud storage cp $PREPARED_FILE_PATH $PREPARED_FILE_URI" ] }, { "cell_type": "markdown", @@ -1200,7 +1196,7 @@ }, "outputs": [], "source": [ - "!gsutil cp gs://spark-lib/bigquery/spark-bigquery-with-dependencies_2.12-0.22.2.jar $DATAPROC_IMAGE_BUILD_PATH\n", + "!gcloud storage cp gs://spark-lib/bigquery/spark-bigquery-with-dependencies_2.12-0.22.2.jar $DATAPROC_IMAGE_BUILD_PATH\n", "!wget -P $DATAPROC_IMAGE_BUILD_PATH https://s3.amazonaws.com/auxdata.johnsnowlabs.com/public/jars/spark-nlp-assembly-4.0.2.jar\n", "!wget -P $DATAPROC_IMAGE_BUILD_PATH https://repo.anaconda.com/miniconda/Miniconda3-py38_4.9.2-Linux-x86_64.sh" ] @@ -1317,9 +1313,9 @@ "source": [ "CLOUD_BUILD_SERVICE_ACCOUNT = f\"{PROJECT_NUMBER}@cloudbuild.gserviceaccount.com\"\n", "\n", - "! gsutil iam ch serviceAccount:{CLOUD_BUILD_SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", - "! gsutil iam ch serviceAccount:{CLOUD_BUILD_SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" - ] + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{CLOUD_BUILD_SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. + # Note: gsutil iam ch does not support modifying IAM policies that contain conditions. gcloud storage commands do support conditions. + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=\"serviceAccount:{CLOUD_BUILD_SERVICE_ACCOUNT}\" --role=\"roles/storage.objectViewer\"" ] }, { "cell_type": "code", @@ -1625,9 +1621,7 @@ }, "outputs": [], "source": [ - "!gsutil cp $SRC_PATH/__init__.py $MODULE_URI/__init__.py\n", - "!gsutil cp $SRC_PATH/preprocess.py $MODULE_URI/preprocess.py" - ] + "!gcloud storage cp $SRC_PATH/__init__.py $MODULE_URI/__init__.py\n", "!gcloud storage cp $SRC_PATH/preprocess.py $MODULE_URI/preprocess.py" ] }, { "cell_type": "markdown", @@ -1649,8 +1643,7 @@ "outputs": [], "source": [ "!wget https://raw.githubusercontent.com/mahavivo/vocabulary/master/lemmas/AntBNC_lemmas_ver_001.txt -O $LEMMA_DICTIONARY_PATH\n", - "!gsutil cp $LEMMA_DICTIONARY_PATH $LEMMA_DICTIONARY_URI" - ] + "!gcloud storage cp $LEMMA_DICTIONARY_PATH $LEMMA_DICTIONARY_URI" ] }, { "cell_type": "markdown", diff --git a/notebooks/community/sdk/sdk_custom_image_classification_batch_explain.ipynb b/notebooks/community/sdk/sdk_custom_image_classification_batch_explain.ipynb index 318e29846..9f9ff1646 100644 --- a/notebooks/community/sdk/sdk_custom_image_classification_batch_explain.ipynb +++ b/notebooks/community/sdk/sdk_custom_image_classification_batch_explain.ipynb @@ -488,8 +488,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -508,8 +507,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -930,8 +928,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz" ] }, { "cell_type": "markdown", @@ -1547,9 +1544,7 @@ }, "outputs": [], "source": [ - "! gsutil cp tmp1.jpg $BUCKET_NAME/tmp1.jpg\n", - "! gsutil cp tmp2.jpg $BUCKET_NAME/tmp2.jpg\n", - "\n", + "! gcloud storage cp tmp1.jpg $BUCKET_NAME/tmp1.jpg\n", "! gcloud storage cp tmp2.jpg $BUCKET_NAME/tmp2.jpg\n", "\n", "test_item_1 = BUCKET_NAME + \"/tmp1.jpg\"\n", "test_item_2 = BUCKET_NAME + \"/tmp2.jpg\"" ] @@ -1795,8 +1790,7 @@ " print(e)\n", "\n", " if \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/sdk/sdk_custom_image_classification_online_explain.ipynb b/notebooks/community/sdk/sdk_custom_image_classification_online_explain.ipynb index c7f49a4d3..788f4eed0 100644 --- a/notebooks/community/sdk/sdk_custom_image_classification_online_explain.ipynb +++ b/notebooks/community/sdk/sdk_custom_image_classification_online_explain.ipynb @@ -490,8 +490,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -510,8 +509,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -932,8 +930,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz" ] }, { "cell_type": "markdown", @@ -1835,8 +1832,7 @@ " print(e)\n", "\n", " if \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": {