diff --git a/notebooks/community/gapic/automl/showcase_automl_image_classification_online.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_classification_online.ipynb index ebbd7968f..e3021d2f7 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_classification_online.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_classification_online.ipynb @@ -721,12 +721,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1342,8 +1340,7 @@ }, "outputs": [], "source": [ - "test_item = !gsutil cat $IMPORT_FILE | head -n1\n", - "if len(str(test_item[0]).split(\",\")) == 3:\n", + "test_item = !gcloud storage cat $IMPORT_FILE | head -n1\n", "if len(str(test_item[0]).split(\",\")) == 3:\n", " _, test_item, test_label = str(test_item[0]).split(\",\")\n", "else:\n", " test_item, test_label = str(test_item[0]).split(\",\")\n", @@ -1568,8 +1565,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_image_segmentation_online.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_segmentation_online.ipynb index 2b394f59a..605eefabd 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_segmentation_online.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_segmentation_online.ipynb @@ -726,12 +726,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1338,8 +1336,7 @@ "source": [ "import json\n", "\n", - "test_items = !gsutil cat $IMPORT_FILE | head -n1\n", - "test_data = test_items[0].replace(\"'\", '\"')\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n1\n", "test_data = test_items[0].replace(\"'\", '\"')\n", "test_data = json.loads(test_data)\n", "try:\n", " test_item = test_data[\"image_gcs_uri\"]\n", @@ -1555,8 +1552,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_online.ipynb b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_online.ipynb index e97fce9ba..3852200c4 100644 --- a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_online.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_online.ipynb @@ -723,12 +723,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1544,8 +1542,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/matching_engine/matching_engine_for_indexing.ipynb b/notebooks/community/matching_engine/matching_engine_for_indexing.ipynb index 59e25acd3..aa05b5f2e 100644 --- a/notebooks/community/matching_engine/matching_engine_for_indexing.ipynb +++ b/notebooks/community/matching_engine/matching_engine_for_indexing.ipynb @@ -477,8 +477,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -497,8 +496,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -579,8 +577,7 @@ }, "outputs": [], "source": [ - "! gsutil cp gs://cloud-samples-data/vertex-ai/matching_engine/glove-100-angular.hdf5 ." - ] + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/matching_engine/glove-100-angular.hdf5 ." ] }, { "cell_type": "markdown", @@ -661,8 +658,7 @@ "source": [ "# NOTE: Everything in this Google Cloud Storage directory will be DELETED before uploading the data\n", "\n", - "! gsutil rm -raf {BUCKET_NAME}/** 2> /dev/null || true" - ] + "! gcloud storage rm --recursive --all-versions --continue-on-error {BUCKET_NAME}/** 2> /dev/null || true" ] }, { "cell_type": "code", @@ -672,8 +668,7 @@ }, "outputs": [], "source": [ - "! gsutil cp glove100.json {BUCKET_NAME}/glove100.json" - ] + "! gcloud storage cp glove100.json {BUCKET_NAME}/glove100.json" ] }, { "cell_type": "code", @@ -683,8 +678,7 @@ }, "outputs": [], "source": [ - "! gsutil ls {BUCKET_NAME}" - ] + "! gcloud storage ls {BUCKET_NAME}" ] }, { "cell_type": "markdown", @@ -966,8 +960,7 @@ }, "outputs": [], "source": [ - "! gsutil cp glove100_incremental.json {BUCKET_NAME}/incremental/glove100.json" - ] + "! gcloud storage cp glove100_incremental.json {BUCKET_NAME}/incremental/glove100.json" ] }, { "cell_type": "markdown", @@ -2049,8 +2042,7 @@ "\n", "delete_bucket = False\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil -m rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/migration/UJ14 AutoML for vision with Vertex AI Video Classification.ipynb b/notebooks/community/migration/UJ14 AutoML for vision with Vertex AI Video Classification.ipynb index ba7f031e4..f0cb6e509 100644 --- a/notebooks/community/migration/UJ14 AutoML for vision with Vertex AI Video Classification.ipynb +++ b/notebooks/community/migration/UJ14 AutoML for vision with Vertex AI Video Classification.ipynb @@ -325,8 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -345,8 +344,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -542,8 +540,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 10" - ] + "! gcloud storage cat $IMPORT_FILE | head -n 10" ] }, { "cell_type": "markdown", @@ -1435,8 +1432,7 @@ }, "outputs": [], "source": [ - "test_items = ! gsutil cat $IMPORT_FILE | head -n2\n", - "\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n2\n", "\n", "cols = str(test_items[0]).split(\",\")\n", "test_item_1 = str(cols[0])\n", "test_label_1 = str(cols[1])\n", @@ -1510,8 +1506,7 @@ "\n", "print(gcs_input_uri)\n", "\n", - "!gsutil cat $gcs_input_uri" - ] + "!gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1842,8 +1837,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1862,10 +1856,8 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1941,8 +1933,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" - ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/ml_ops/stage1/get_started_vertex_datasets.ipynb b/notebooks/community/ml_ops/stage1/get_started_vertex_datasets.ipynb index 8cd166618..83567447f 100644 --- a/notebooks/community/ml_ops/stage1/get_started_vertex_datasets.ipynb +++ b/notebooks/community/ml_ops/stage1/get_started_vertex_datasets.ipynb @@ -489,8 +489,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -509,8 +508,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" - ] + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -1300,14 +1298,12 @@ "\n", "! bq --location=us extract --destination_format CSV $BQ_PROJECT_DATASET_TABLE $BUCKET_URI/mydata*.csv\n", "\n", - "IMPORT_FILES = ! gsutil ls $BUCKET_URI/mydata*.csv\n", - "\n", + "IMPORT_FILES = ! gcloud storage ls $BUCKET_URI/mydata*.csv\n", "\n", "print(IMPORT_FILES)\n", "\n", "EXAMPLE_FILE = IMPORT_FILES[0]\n", "\n", - "! gsutil cat $EXAMPLE_FILE | head" - ] + "! gcloud storage cat $EXAMPLE_FILE | head" ] }, { "cell_type": "markdown", @@ -1453,8 +1449,7 @@ "EXPORTED_DIR = f\"{BUCKET_URI}/exported\"\n", "exported_files = dataset.export_data(output_dir=EXPORTED_DIR)\n", "\n", - "! gsutil ls $EXPORTED_DIR" - ] + "! gcloud storage ls $EXPORTED_DIR" ] }, { "cell_type": "markdown", @@ -1483,8 +1478,7 @@ "source": [ "jsonl_index = exported_files[0]\n", "\n", - "! gsutil cat $jsonl_index | head" - ] + "! gcloud storage cat $jsonl_index | head" ] }, { "cell_type": "markdown", @@ -1695,8 +1689,7 @@ " if n % 10 == 0:\n", " print(n, image)\n", "\n", - "listing = ! gsutil ls -la $GCS_TFRECORD_URI\n", - "print(\"TFRecord File\", listing)" + "listing = ! gcloud storage ls --long --all-versions $GCS_TFRECORD_URI\n", "print(\"TFRecord File\", listing)" ] }, { @@ -1733,8 +1726,7 @@ "\n", "# Delete the bucket\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -r $BUCKET_URI" - ] + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": { diff --git a/notebooks/community/sdk/sdk_custom_image_classification_online_custom_training_container.ipynb b/notebooks/community/sdk/sdk_custom_image_classification_online_custom_training_container.ipynb index 5df1b129f..36524a63f 100644 --- a/notebooks/community/sdk/sdk_custom_image_classification_online_custom_training_container.ipynb +++ b/notebooks/community/sdk/sdk_custom_image_classification_online_custom_training_container.ipynb @@ -475,8 +475,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -495,8 +494,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -1057,8 +1055,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz" ] }, { "cell_type": "markdown", @@ -1679,8 +1676,7 @@ " print(e)\n", "\n", " if \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/sdk/sdk_custom_tabular_regression_batch_explain.ipynb b/notebooks/community/sdk/sdk_custom_tabular_regression_batch_explain.ipynb index 154ad197c..42db4671c 100644 --- a/notebooks/community/sdk/sdk_custom_tabular_regression_batch_explain.ipynb +++ b/notebooks/community/sdk/sdk_custom_tabular_regression_batch_explain.ipynb @@ -474,8 +474,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -494,8 +493,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -920,8 +918,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_NAME/trainer_boston.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_NAME/trainer_boston.tar.gz" ] }, { "cell_type": "markdown", @@ -1463,15 +1460,12 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 1 > tmp.csv\n", - "! gsutil cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", - "\n", + "! gcloud storage cat $IMPORT_FILE | head -n 1 > tmp.csv\n", "! gcloud storage cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", "\n", "! cut -d, -f1-16 tmp.csv > batch.csv\n", "\n", "gcs_input_uri = BUCKET_NAME + \"/test.csv\"\n", "\n", - "! gsutil cp batch.csv $gcs_input_uri" - ] + "! gcloud storage cp batch.csv $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1669,8 +1663,7 @@ " print(e)\n", "\n", " if \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/official/explainable_ai/sdk_custom_image_classification_online_explain.ipynb b/notebooks/official/explainable_ai/sdk_custom_image_classification_online_explain.ipynb index 425ec25bd..0ffb19a01 100644 --- a/notebooks/official/explainable_ai/sdk_custom_image_classification_online_explain.ipynb +++ b/notebooks/official/explainable_ai/sdk_custom_image_classification_online_explain.ipynb @@ -300,8 +300,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION -p $PROJECT_ID $BUCKET_URI" - ] + "! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -733,8 +732,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_cifar10.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_cifar10.tar.gz" ] }, { "cell_type": "markdown", @@ -1573,8 +1571,7 @@ "# Delete the Cloud Storage bucket\n", "delete_bucket = False # Set True for deletion\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI\n", - "\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "# Delete locally generated files\n", "! rm -rf custom/\n", "! rm custom.tar.gz\n", diff --git a/notebooks/official/training/get_started_vertex_training_lightgbm.ipynb b/notebooks/official/training/get_started_vertex_training_lightgbm.ipynb index e5c2d0aa3..7b968e9c7 100644 --- a/notebooks/official/training/get_started_vertex_training_lightgbm.ipynb +++ b/notebooks/official/training/get_started_vertex_training_lightgbm.ipynb @@ -295,8 +295,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -732,8 +731,7 @@ "# Create a zip file\n", "! gzip custom.tar\n", "# Copy the package to Cloud Storage bucket\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_iris.tar.gz" - ] + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_iris.tar.gz" ] }, { "cell_type": "markdown", @@ -894,8 +892,7 @@ "print(f\"Model path with trained model artifacts {model_path_to_deploy}\")\n", "\n", "# List the contents of the model folder\n", - "! gsutil ls $model_path_to_deploy" - ] + "! gcloud storage ls $model_path_to_deploy" ] }, { "cell_type": "markdown", @@ -1158,8 +1155,7 @@ "MODEL_DIR=$1\n", "PORT=$2\n", "mkdir -p ./serve/model/\n", - "gsutil cp $MODEL_DIR/model/* ./serve/model/\n", - "\n", + "gcloud storage cp $MODEL_DIR/model/* ./serve/model/\n", "\n", "cat > ./serve/Dockerfile <