diff --git a/notebooks/community/gapic/automl/showcase_automl_image_classification_online_proxy.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_classification_online_proxy.ipynb index e39aff40e..6be4bda57 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_classification_online_proxy.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_classification_online_proxy.ipynb @@ -721,12 +721,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1342,8 +1340,7 @@ }, "outputs": [], "source": [ - "test_item = !gsutil cat $IMPORT_FILE | head -n1\n", - "if len(str(test_item[0]).split(\",\")) == 3:\n", + "test_item = !gcloud storage cat $IMPORT_FILE | head -n1\n", "if len(str(test_item[0]).split(\",\")) == 3:\n", " _, test_item, test_label = str(test_item[0]).split(\",\")\n", "else:\n", " test_item, test_label = str(test_item[0]).split(\",\")\n", @@ -1747,8 +1744,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_batch.ipynb index 0fb7fb6ed..93dd52360 100644 --- a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -810,14 +808,11 @@ }, "outputs": [], "source": [ - "count = ! gsutil cat $IMPORT_FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $IMPORT_FILE | head\n", - "\n", - "heading = ! gsutil cat $IMPORT_FILE | head -n1\n", - "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", + "! gcloud storage cat $IMPORT_FILE | head\n", "\n", + "heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", "print(\"Label Column Name\", label_column)\n", "if label_column is None:\n", " raise Exception(\"label column missing\")" @@ -1363,8 +1358,7 @@ " f.write(str(INSTANCE_2) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1632,8 +1626,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1650,10 +1643,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.csv\n", - "\n", - " ! gsutil cat $folder/prediction*.csv\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.csv\n", "\n", + " ! gcloud storage cat $folder/prediction*.csv\n", " break\n", " time.sleep(60)" ] }, @@ -1747,8 +1738,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_text_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_text_classification_batch.ipynb index 8c9d39911..31cab51f5 100644 --- a/notebooks/community/gapic/automl/showcase_automl_text_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_text_classification_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -881,12 +879,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1314,8 +1310,7 @@ }, "outputs": [], "source": [ - "test_items = ! gsutil cat $IMPORT_FILE | head -n2\n", - "if len(test_items[0]) == 3:\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n2\n", "if len(test_items[0]) == 3:\n", " _, test_item_1, test_label_1 = str(test_items[0]).split(\",\")\n", " _, test_item_2, test_label_2 = str(test_items[1]).split(\",\")\n", "else:\n", @@ -1371,8 +1366,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1644,8 +1638,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1662,10 +1655,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1759,8 +1750,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_text_multi-label_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_text_multi-label_classification_batch.ipynb index 4b14a9eb9..4427c07fa 100644 --- a/notebooks/community/gapic/automl/showcase_automl_text_multi-label_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_text_multi-label_classification_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -881,12 +879,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1314,8 +1310,7 @@ }, "outputs": [], "source": [ - "test_items = ! gsutil cat $IMPORT_FILE | head -n2\n", - "\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n2\n", "\n", "cols_1 = str(test_items[0]).split(\",\")\n", "cols_2 = str(test_items[1]).split(\",\")\n", "test_item_1 = cols_1[0]\n", @@ -1372,8 +1367,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1645,8 +1639,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1663,10 +1656,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1760,8 +1751,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_text_sentiment_analysis_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_text_sentiment_analysis_batch.ipynb index 1fc2fce91..7f6597d06 100644 --- a/notebooks/community/gapic/automl/showcase_automl_text_sentiment_analysis_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_text_sentiment_analysis_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -883,12 +881,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1316,8 +1312,7 @@ }, "outputs": [], "source": [ - "test_items = ! gsutil cat $IMPORT_FILE | head -n2\n", - "\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n2\n", "\n", "if len(test_items[0]) == 4:\n", " _, test_item_1, test_label_1, _ = str(test_items[0]).split(\",\")\n", " _, test_item_2, test_label_2, _ = str(test_items[1]).split(\",\")\n", @@ -1375,8 +1370,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1643,8 +1637,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1661,10 +1654,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1758,8 +1749,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/ml_ops/stage2/get_started_vertex_training_lightgbm.ipynb b/notebooks/community/ml_ops/stage2/get_started_vertex_training_lightgbm.ipynb index 24c4f7395..818200bf6 100644 --- a/notebooks/community/ml_ops/stage2/get_started_vertex_training_lightgbm.ipynb +++ b/notebooks/community/ml_ops/stage2/get_started_vertex_training_lightgbm.ipynb @@ -485,8 +485,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -505,8 +504,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" - ] + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -934,8 +932,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_iris.tar.gz" - ] +"! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_iris.tar.gz" ] }, { "cell_type": "markdown", @@ -1139,8 +1136,7 @@ "source": [ "print(f\"Model path with trained model artifacts {model_path_to_deploy}\")\n", "\n", - "! gsutil ls $model_path_to_deploy" - ] + "! gcloud storage ls $model_path_to_deploy" ] }, { "cell_type": "markdown", @@ -1362,8 +1358,7 @@ "MODEL_DIR=$1\n", "\n", "mkdir -p ./serve/model/\n", - "gsutil cp -r ${MODEL_DIR}/model/ ./serve/model/ \n", - "\n", + "gcloud storage cp --recursive ${MODEL_DIR}/model/ ./serve/model/ \n", "\n", "cat > ./serve/Dockerfile <