diff --git a/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb b/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb index 82638488f..027d9c6ca 100644 --- a/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb +++ b/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb @@ -472,8 +472,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -492,8 +491,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -565,8 +563,7 @@ "outputs": [], "source": [ "# Copy the sample data into your DATA_PATH\n", - "! gsutil cp \"gs://cloud-samples-data/vertex-ai/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/u.data\" $DATA_PATH" - ] + "! gcloud storage cp \"gs://cloud-samples-data/vertex-ai/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/u.data\" $DATA_PATH" ] }, { "cell_type": "code", @@ -1784,8 +1781,7 @@ "! gcloud ai models delete $model.name --quiet\n", "\n", "# Delete Cloud Storage objects that were created\n", - "! gsutil -m rm -r $ARTIFACTS_DIR" - ] + "! gcloud storage rm --recursive $ARTIFACTS_DIR" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb index 747b149c0..a25e4b9e9 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -886,12 +884,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1329,8 +1325,7 @@ }, "outputs": [], "source": [ - "test_items = !gsutil cat $IMPORT_FILE | head -n2\n", - "if len(str(test_items[0]).split(\",\")) == 3:\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "if len(str(test_items[0]).split(\",\")) == 3:\n", " _, test_item_1, test_label_1 = str(test_items[0]).split(\",\")\n", " _, test_item_2, test_label_2 = str(test_items[1]).split(\",\")\n", "else:\n", @@ -1363,9 +1358,7 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 $BUCKET_NAME/$file_1\n", - "! gsutil cp $test_item_2 $BUCKET_NAME/$file_2\n", - "\n", + "! gcloud storage cp $test_item_1 $BUCKET_NAME/$file_1\n", "! gcloud storage cp $test_item_2 $BUCKET_NAME/$file_2\n", "\n", "test_item_1 = BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = BUCKET_NAME + \"/\" + file_2" ] @@ -1408,8 +1401,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1693,8 +1685,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1711,10 +1702,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1808,8 +1797,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb index 96c79f565..be3cddc91 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -887,12 +885,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1333,8 +1329,7 @@ }, "outputs": [], "source": [ - "test_items = !gsutil cat $IMPORT_FILE | head -n2\n", - "cols_1 = str(test_items[0]).split(\",\")\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "cols_1 = str(test_items[0]).split(\",\")\n", "cols_2 = str(test_items[1]).split(\",\")\n", "if len(cols_1) == 11:\n", " test_item_1 = str(cols_1[1])\n", @@ -1373,9 +1368,7 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 $BUCKET_NAME/$file_1\n", - "! gsutil cp $test_item_2 $BUCKET_NAME/$file_2\n", - "\n", + "! gcloud storage cp $test_item_1 $BUCKET_NAME/$file_1\n", "! gcloud storage cp $test_item_2 $BUCKET_NAME/$file_2\n", "\n", "test_item_1 = BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = BUCKET_NAME + \"/\" + file_2" ] @@ -1418,8 +1411,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1705,8 +1697,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1723,10 +1714,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1820,8 +1809,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb index b8e0039f0..928769361 100644 --- a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb @@ -735,14 +735,11 @@ }, "outputs": [], "source": [ - "count = ! gsutil cat $IMPORT_FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $IMPORT_FILE | head\n", - "\n", - "heading = ! gsutil cat $IMPORT_FILE | head -n1\n", - "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", + "! gcloud storage cat $IMPORT_FILE | head\n", "\n", + "heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", "print(\"Label Column Name\", label_column)\n", "if label_column is None:\n", " raise Exception(\"label column missing\")" @@ -1820,8 +1817,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb index 6c9fec862..9f1f6a33c 100644 --- a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -888,12 +886,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1377,8 +1373,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1653,8 +1648,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1671,10 +1665,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1768,8 +1760,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb b/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb index 809951b31..ada16ce66 100644 --- a/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb +++ b/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb @@ -325,8 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -345,8 +344,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -545,8 +543,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 10" - ] + "! gcloud storage cat $IMPORT_FILE | head -n 10" ] }, { "cell_type": "markdown", @@ -1493,15 +1490,12 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 1 > tmp.csv\n", - "! gsutil cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", - "\n", + "! gcloud storage cat $IMPORT_FILE | head -n 1 > tmp.csv\n", "! gcloud storage cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", "\n", "! cut -d, -f1-16 tmp.csv > batch.csv\n", "\n", "gcs_input_uri = \"gs://\" + BUCKET_NAME + \"/test.csv\"\n", "\n", - "! gsutil cp batch.csv $gcs_input_uri" - ] + "! gcloud storage cp batch.csv $gcs_input_uri" ] }, { "cell_type": "code", @@ -1511,8 +1505,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1819,8 +1812,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1839,10 +1831,8 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gsutil ls $folder/prediction*\n", - "\n", - " ! gsutil cat $folder/prediction*\n", - " break\n", + " ! gcloud storage ls $folder/prediction*\n", "\n", + " ! gcloud storage cat $folder/prediction*\n", " break\n", " time.sleep(60)" ] }, @@ -2452,8 +2442,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" - ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb b/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb index 5474e8ec7..988c3914d 100644 --- a/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb +++ b/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb @@ -325,8 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -345,8 +344,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -542,8 +540,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 10" - ] + "! gcloud storage cat $IMPORT_FILE | head -n 10" ] }, { "cell_type": "markdown", @@ -1428,17 +1425,14 @@ }, "outputs": [], "source": [ - "test_items = ! gsutil cat $IMPORT_FILE | head -n2\n", - "\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n2\n", "\n", "test_item_1, test_label_1 = test_items[0].split(\",\")[1], test_items[0].split(\",\")[2]\n", "test_item_2, test_label_2 = test_items[0].split(\",\")[1], test_items[0].split(\",\")[2]\n", "\n", "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 gs://$BUCKET_NAME/$file_1\n", - "! gsutil cp $test_item_2 gs://$BUCKET_NAME/$file_2\n", - "\n", + "! gcloud storage cp $test_item_1 gs://$BUCKET_NAME/$file_1\n", "! gcloud storage cp $test_item_2 gs://$BUCKET_NAME/$file_2\n", "\n", "test_item_1 = \"gs://\" + BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = \"gs://\" + BUCKET_NAME + \"/\" + file_2\n", "\n", @@ -1478,8 +1472,7 @@ " data = {\"content\": test_item_2, \"mime_type\": \"image/jpeg\"}\n", " f.write(json.dumps(data) + \"\\n\")\n", "\n", - "!gsutil cat $gcs_input_uri" - ] + "!gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1800,8 +1793,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1820,9 +1812,8 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] @@ -2172,8 +2163,7 @@ "\n", "import tensorflow as tf\n", "\n", - "single_file = ! gsutil cat $IMPORT_FILE | head -n 1\n", - "single_file = single_file[0].split(\",\")[1]\n", + "single_file = ! gcloud storage cat $IMPORT_FILE | head -n 1\n", "single_file = single_file[0].split(\",\")[1]\n", "\n", "with tf.io.gfile.GFile(single_file, \"rb\") as f:\n", " content = f.read()\n", @@ -2443,8 +2433,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" - ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb b/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb index df9e2fdd0..d86a33bcc 100644 --- a/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb +++ b/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb @@ -325,8 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -345,8 +344,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -543,8 +541,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 10" - ] + "! gcloud storage cat $IMPORT_FILE | head -n 10" ] }, { "cell_type": "markdown", @@ -1552,8 +1549,7 @@ }, "outputs": [], "source": [ - "test_item = ! gsutil cat $IMPORT_FILE | head -n1\n", - "test_item, test_label = str(test_item[0]).split(\",\")\n", + "test_item = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "test_item, test_label = str(test_item[0]).split(\",\")\n", "\n", "print(test_item, test_label)" ] @@ -1614,9 +1610,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $gcs_input_uri\n", - "! gsutil cat $test_item_uri" - ] + "! gcloud storage cat $gcs_input_uri\n", "! gcloud storage cat $test_item_uri" ] }, { "cell_type": "markdown", @@ -1917,8 +1911,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1937,10 +1930,8 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -2260,8 +2251,7 @@ }, "outputs": [], "source": [ - "test_item = ! gsutil cat $IMPORT_FILE | head -n1\n", - "test_item, test_label = str(test_item[0]).split(\",\")\n", + "test_item = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "test_item, test_label = str(test_item[0]).split(\",\")\n", "\n", "instances_list = [{\"content\": test_item}]\n", "instances = [json_format.ParseDict(s, Value()) for s in instances_list]\n", @@ -2510,8 +2500,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" - ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb b/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb index 4bb16bb38..190873fdc 100644 --- a/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb +++ b/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb @@ -360,8 +360,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {REGION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={REGION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -1431,8 +1430,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -l {train_experiment_uri}" - ] + "! gcloud storage ls --long {train_experiment_uri}" ] }, { "cell_type": "markdown", @@ -1501,8 +1499,7 @@ }, "outputs": [], "source": [ - "! gsutil -q cp -r {train_experiment_uri}/* {experiments_path}" - ] + "! gcloud storage cp --recursive {train_experiment_uri}/* {experiments_path}" ] }, { "cell_type": "markdown", @@ -1896,8 +1893,7 @@ }, "outputs": [], "source": [ - "! gsutil -q cp -r {models_path} {MODELS_PATH}" - ] + "! gcloud storage cp --recursive {models_path} {MODELS_PATH}" ] }, { "cell_type": "markdown", @@ -2207,8 +2203,7 @@ "\n", "# Delete Cloud Storage objects that were created\n", "if delete_bucket:\n", - " ! gsutil -q -m rm -r {BUCKET_URI}\n", - "\n", + " ! gcloud storage rm --recursive {BUCKET_URI}\n", "\n", "# Delete tutorial folder\n", "if delete_tutorial:\n", " shutil.rmtree(tutorial_path)" diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index 9c2ad6759..ef096d9fc 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -229,11 +229,9 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gsutil mb -l {REGION} {BUCKET_URI}\n", - "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -257,8 +255,7 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", - "\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"" @@ -1259,8 +1256,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] }, { "cell_type": "markdown",