这是indexloc提供的服务,不要输入任何密码
Skip to content

Sync change #3556

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,377 changes: 689 additions & 688 deletions notebooks/community/model_garden/model_garden_camp_zipnerf.ipynb

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -30,18 +30,18 @@
},
"source": [
"# Vertex AI Model Garden - CamP ZipNeRF (Jax) Gradio Notebook\n",
"<table><tbody><tr>\n",
" <td style=\"text-align: center\">\n",
" <a href=\"https://console.cloud.google.com/vertex-ai/colab/import/https:%2F%2Fraw.githubusercontent.com%2FGoogleCloudPlatform%2Fvertex-ai-samples%2Fmain%2Fnotebooks%2Fcommunity%2Fmodel_garden%2Fmodel_garden_camp_zipnerf_gradio.ipynb\">\n",
" <img alt=\"Google Cloud Colab Enterprise logo\" src=\"https://lh3.googleusercontent.com/JmcxdQi-qOpctIvWKgPtrzZdJJK-J3sWE1RsfjZNwshCFgE_9fULcNpuXYTilIR2hjwN\" width=\"32px\"><br> Run in Colab Enterprise\n",
" </a>\n",
" </td>\n",
" <td style=\"text-align: center\">\n",
" <a href=\"https://github.com/GoogleCloudPlatform/vertex-ai-samples/blob/main/notebooks/community/model_garden/model_garden_camp_zipnerf_gradio.ipynb\">\n",
" <img alt=\"GitHub logo\" src=\"https://cloud.google.com/ml-engine/images/github-logo-32px.png\" width=\"32px\"><br> View on GitHub\n",
" </a>\n",
" </td>\n",
"</tr></tbody></table>"
"\u003ctable\u003e\u003ctbody\u003e\u003ctr\u003e\n",
" \u003ctd style=\"text-align: center\"\u003e\n",
" \u003ca href=\"https://console.cloud.google.com/vertex-ai/colab/import/https:%2F%2Fraw.githubusercontent.com%2FGoogleCloudPlatform%2Fvertex-ai-samples%2Fmain%2Fnotebooks%2Fcommunity%2Fmodel_garden%2Fmodel_garden_camp_zipnerf_gradio.ipynb\"\u003e\n",
" \u003cimg alt=\"Google Cloud Colab Enterprise logo\" src=\"https://lh3.googleusercontent.com/JmcxdQi-qOpctIvWKgPtrzZdJJK-J3sWE1RsfjZNwshCFgE_9fULcNpuXYTilIR2hjwN\" width=\"32px\"\u003e\u003cbr\u003e Run in Colab Enterprise\n",
" \u003c/a\u003e\n",
" \u003c/td\u003e\n",
" \u003ctd style=\"text-align: center\"\u003e\n",
" \u003ca href=\"https://github.com/GoogleCloudPlatform/vertex-ai-samples/blob/main/notebooks/community/model_garden/model_garden_camp_zipnerf_gradio.ipynb\"\u003e\n",
" \u003cimg alt=\"GitHub logo\" src=\"https://cloud.google.com/ml-engine/images/github-logo-32px.png\" width=\"32px\"\u003e\u003cbr\u003e View on GitHub\n",
" \u003c/a\u003e\n",
" \u003c/td\u003e\n",
"\u003c/tr\u003e\u003c/tbody\u003e\u003c/table\u003e"
]
},
{
Expand Down Expand Up @@ -421,7 +421,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form",
"id": "j1evctm2h34g"
},
"outputs": [],
Expand All @@ -431,8 +430,8 @@
"\n",
"import concurrent.futures\n",
"import glob\n",
"import hashlib\n",
"import logging\n",
"import hashlib\n",
"import mimetypes\n",
"import os\n",
"import re\n",
Expand Down Expand Up @@ -480,11 +479,9 @@
"GCS_API_ENDPOINT = \"https://storage.cloud.google.com/\"\n",
"\n",
"# Configure logging\n",
"logging.basicConfig(\n",
" level=logging.INFO, format=\"%(asctime)s - %(levelname)s - %(message)s\"\n",
")\n",
"logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')\n",
"\n",
"# track unique experiments\n",
"# Track unique experiments\n",
"unique_experiments = set()\n",
"\n",
"# Define dataset and table IDs\n",
Expand Down Expand Up @@ -525,11 +522,11 @@
" return None\n",
"\n",
"\n",
"def get_job_name_with_datetime(prefix: str) -> str:\n",
"def get_job_name_with_datetime(prefix: str) -\u003e str:\n",
" return prefix + datetime.now().strftime(\"_%Y%m%d_%H%M%S\")\n",
"\n",
"\n",
"def get_vertex_ai_job_status(job_id: str) -> str:\n",
"def get_vertex_ai_job_status(job_id: str) -\u003e str:\n",
" job = aiplatform.CustomJob.get(job_id)\n",
" return job.state\n",
"\n",
Expand All @@ -548,17 +545,17 @@
" return link\n",
"\n",
"\n",
"def get_vertex_ai_pipeline_job_status(job_id: str) -> str:\n",
"def get_vertex_ai_pipeline_job_status(job_id: str) -\u003e str:\n",
" job = aiplatform.PipelineJob.get(job_id)\n",
" return job.state\n",
"\n",
"\n",
"def get_bucket_and_blob_name(filepath: str) -> tuple:\n",
"def get_bucket_and_blob_name(filepath: str) -\u003e tuple:\n",
" gs_suffix = filepath.split(\"gs://\", 1)[1]\n",
" return tuple(gs_suffix.split(\"/\", 1))\n",
"\n",
"\n",
"def get_bucket_and_blob_name_https(filepath: str) -> tuple:\n",
"def get_bucket_and_blob_name_https(filepath: str) -\u003e tuple:\n",
" gs_suffix = filepath.split(\"https://\", 1)[1]\n",
" return tuple(gs_suffix.split(\"/\", 1))\n",
"\n",
Expand All @@ -567,7 +564,7 @@
" return bigquery.Client()\n",
"\n",
"\n",
"def is_gcs_path(input_path: str) -> bool:\n",
"def is_gcs_path(input_path: str) -\u003e bool:\n",
" \"\"\"Checks if the input path is a Google Cloud Storage (GCS) path.\n",
"\n",
" Args:\n",
Expand Down Expand Up @@ -685,7 +682,7 @@
" client.download_blob_to_file(gcs_uri, f)\n",
"\n",
"\n",
"def list_gcs_bucket_contents() -> dict:\n",
"def list_gcs_bucket_contents() -\u003e dict:\n",
" client = storage.Client()\n",
" bucket_name = get_bucket_and_blob_name(BUCKET_NAME)[0]\n",
" bucket = client.get_bucket(bucket_name)\n",
Expand Down Expand Up @@ -747,12 +744,12 @@
" return f\"Images uploaded successfully to {gcs_dir_path}.\"\n",
"\n",
"\n",
"def fetch_job_times(job_id: str) -> tuple:\n",
"def fetch_job_times(job_id: str) -\u003e tuple:\n",
" job = aiplatform.CustomJob.get(job_id)\n",
" return job.create_time, job.start_time, job.end_time\n",
"\n",
"\n",
"def fetch_pipeline_job_times(job_id: str) -> tuple:\n",
"def fetch_pipeline_job_times(job_id: str) -\u003e tuple:\n",
" pipeline_job = aiplatform.PipelineJob.get(job_id)\n",
" while True:\n",
" try:\n",
Expand All @@ -768,7 +765,7 @@
" return create_time, start_time, update_time\n",
"\n",
"\n",
"def list_bq_folder_contents_colmap(table_id: str) -> dict:\n",
"def list_bq_folder_contents_colmap(table_id: str) -\u003e dict:\n",
" client = get_bigquery_client()\n",
" query = f\"\"\"\n",
" SELECT Experiment_ID, Scene_Name, Job_Status, Image_Count, Colmap_Job_ID, Created_Time, Start_Time, End_Time,\n",
Expand Down Expand Up @@ -823,7 +820,7 @@
" return folder_counts\n",
"\n",
"\n",
"def list_bq_folder_contents_training(table_id: str) -> dict:\n",
"def list_bq_folder_contents_training(table_id: str) -\u003e dict:\n",
" client = get_bigquery_client()\n",
" query = f\"\"\"\n",
" SELECT Experiment_ID, Scene_Name, Job_Status, Image_Count, Colmap_Job_ID, Training_Job_ID, Training_Job_Name, Created_Time, Start_Time, End_Time,\n",
Expand Down Expand Up @@ -875,7 +872,7 @@
" return folder_counts\n",
"\n",
"\n",
"def list_bq_folder_contents_rendering(table_id: str) -> dict:\n",
"def list_bq_folder_contents_rendering(table_id: str) -\u003e dict:\n",
" client = get_bigquery_client()\n",
" query = f\"\"\"\n",
" SELECT Experiment_ID, Scene_Name, Job_Status, Image_Count, Colmap_Job_ID, Training_Job_ID, Training_Job_Name, Rendering_Job_ID, Rendering_Job_Name, Created_Time, Start_Time, End_Time,\n",
Expand Down Expand Up @@ -937,7 +934,7 @@
" return folder_counts\n",
"\n",
"\n",
"def get_bq_folders_dataframe_colmap(table_id: str) -> pd.DataFrame:\n",
"def get_bq_folders_dataframe_colmap(table_id: str) -\u003e pd.DataFrame:\n",
" try:\n",
" folder_counts = list_bq_folder_contents_colmap(table_id)\n",
" data = [\n",
Expand All @@ -959,11 +956,11 @@
" ]\n",
" return pd.DataFrame(data).sort_values(by=\"Experiment ID\").reset_index(drop=True)\n",
" except Exception as e:\n",
" logging.info(f\"Please upload a dataset to obtain an {e}.\", exc_info=True)\n",
" logging.info(f\"Exception encountered in {e}.\", exc_info=True)\n",
" return pd.DataFrame()\n",
"\n",
"\n",
"def get_bq_folders_dataframe_training(table_id: str) -> pd.DataFrame:\n",
"def get_bq_folders_dataframe_training(table_id: str) -\u003e pd.DataFrame:\n",
" try:\n",
" folder_counts = list_bq_folder_contents_training(table_id)\n",
" data = [\n",
Expand All @@ -985,11 +982,11 @@
" ]\n",
" return pd.DataFrame(data).sort_values(by=\"Experiment ID\").reset_index(drop=True)\n",
" except Exception as e:\n",
" logging.info(f\"Please upload a dataset to obtain an {e}.\", exc_info=True)\n",
" logging.info(f\"Exception encountered in {e}.\", exc_info=True)\n",
" return pd.DataFrame()\n",
"\n",
"\n",
"def get_bq_folders_dataframe_rendering(table_id: str) -> pd.DataFrame:\n",
"def get_bq_folders_dataframe_rendering(table_id: str) -\u003e pd.DataFrame:\n",
" try:\n",
" folder_counts = list_bq_folder_contents_rendering(table_id)\n",
" data = [\n",
Expand Down Expand Up @@ -1020,7 +1017,7 @@
" ]\n",
" return pd.DataFrame(data).sort_values(by=\"Experiment ID\").reset_index(drop=True)\n",
" except Exception as e:\n",
" logging.info(f\"Please upload a dataset to obtain an {e}.\", exc_info=True)\n",
" logging.info(f\"Exception encountered in {e}.\", exc_info=True)\n",
" return pd.DataFrame()\n",
"\n",
"\n",
Expand Down Expand Up @@ -1444,10 +1441,6 @@
" frame_rate=1,\n",
" progress=gr.Progress(),\n",
" ):\n",
" if not scene_name:\n",
" gr.Warning(\"Please provide a name for the scene. This field is required.\")\n",
" return get_bq_folders_dataframe_colmap(colmap_table_id)\n",
"\n",
" if gcs_folder:\n",
" prepare_instance_images_from_gcs(\n",
" scene_name, experiment_name, gcs_folder, progress\n",
Expand Down Expand Up @@ -1650,7 +1643,7 @@
"\n",
" def on_row_select(folders_df, evt: gr.SelectData):\n",
" row_index = evt.index[0]\n",
" if 0 <= row_index < len(folders_df):\n",
" if 0 \u003c= row_index \u003c len(folders_df):\n",
" selected_value = folders_df.iloc[row_index, 0]\n",
" colmap_job_id = folders_df[\n",
" folders_df[\"Experiment ID\"] == selected_value\n",
Expand Down Expand Up @@ -1901,9 +1894,9 @@
" data_training_job_name = get_job_name_with_datetime(\"cloudnerf_gradio_training\")\n",
" unique_experiments.add(selected_folder)\n",
"\n",
" colmap_job_status = colmap_df[colmap_df[\"Experiment ID\"] == selected_folder][\n",
" \"Job Status\"\n",
" ].iloc[0]\n",
" colmap_job_status = colmap_df[\n",
" colmap_df[\"Experiment ID\"] == selected_folder\n",
" ][\"Job Status\"].iloc[0]\n",
" if colmap_job_status != \"SUCCEEDED\":\n",
" gr.Warning(\"Please wait until the colmap job is finished.\")\n",
" return get_bq_folders_dataframe_training(training_table_id)\n",
Expand Down Expand Up @@ -2077,7 +2070,7 @@
"\n",
" def on_row_select(training_df, evt: gr.SelectData):\n",
" row_index = evt.index[0]\n",
" if 0 <= row_index < len(training_df):\n",
" if 0 \u003c= row_index \u003c len(training_df):\n",
" selected_value = training_df.iloc[row_index, 0]\n",
" training_job_id = training_df[\n",
" training_df[\"Experiment ID\"] == selected_value\n",
Expand Down Expand Up @@ -2229,6 +2222,7 @@
" rendering_df = get_bq_folders_dataframe_rendering(rendering_table_id)\n",
" training_df = get_bq_folders_dataframe_training(training_table_id)\n",
"\n",
"\n",
" training_job_status = training_df[\n",
" training_df[\"Experiment ID\"] == selected_folder\n",
" ][\"Job Status\"].iloc[0]\n",
Expand Down Expand Up @@ -2460,7 +2454,7 @@
"\n",
" def on_row_select(rendering_df, evt: gr.SelectData):\n",
" row_index = evt.index[0]\n",
" if 0 <= row_index < len(rendering_df):\n",
" if 0 \u003c= row_index \u003c len(rendering_df):\n",
" selected_value = rendering_df.iloc[row_index, 0]\n",
" rendering_job_status = rendering_df[\n",
" rendering_df[\"Experiment ID\"] == selected_value\n",
Expand Down Expand Up @@ -2902,7 +2896,7 @@
"\n",
" def on_row_select(folders_df, evt: gr.SelectData):\n",
" row_index = evt.index[0]\n",
" if 0 <= row_index < len(folders_df):\n",
" if 0 \u003c= row_index \u003c len(folders_df):\n",
" selected_value = folders_df.iloc[row_index, 0]\n",
" colmap_job_id = folders_df[\n",
" folders_df[\"Experiment ID\"] == selected_value\n",
Expand Down Expand Up @@ -3175,6 +3169,7 @@
"metadata": {
"colab": {
"name": "model_garden_camp_zipnerf_gradio.ipynb",
"provenance": [],
"toc_visible": true
},
"kernelspec": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -651,7 +651,7 @@
"jax_owl2_endpoint = jax_owl2_model.deploy(\n",
" deployed_model_display_name=\"jax_owl2_deployed\",\n",
" traffic_split={\"0\": 100},\n",
" machine_type=\"n1-highmem-16\",\n",
" machine_type=\"n1-highmem-64\",\n",
" min_replica_count=1,\n",
" max_replica_count=1,\n",
")"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,7 @@
"source": [
"## Overview\n",
"\n",
"This notebook demonstrates finetuning a [JAX ViT-B16 model](https://github.com/google-research/vision_transformer#available-vit-models) for image classification task on GPU and deploying them on Vertex AI for online prediction.\n",
"\n",
"Learn more about [Generative AI Support in Vertex AI](https://cloud.google.com/blog/products/ai-machine-learning/vertex-ai-model-garden-and-generative-ai-studio)."
"This notebook demonstrates finetuning a [JAX ViT-B16 model](https://github.com/google-research/vision_transformer#available-vit-models) for image classification task on GPU and deploying them on Vertex AI for online prediction."
]
},
{
Expand All @@ -85,21 +83,18 @@
"source": [
"### Objective\n",
"\n",
"In this tutorial, you learn how fine-tune, deploy and predict with a Vertex AI pretrained JAX Vision Transformer based model.\n",
"In this tutorial, you learn how to:\n",
"\n",
"- Finetune a JAX Vision Transformer based model.\n",
"- Upload the model to [Model Registry](https://cloud.google.com/vertex-ai/docs/model-registry/introduction).\n",
"- Deploy the model on [Endpoint](https://cloud.google.com/vertex-ai/docs/predictions/using-private-endpoints).\n",
"- Run online predictions for image classification.\n",
"\n",
"This tutorial uses the following Google Cloud ML services and resources:\n",
"\n",
"- Vertex AI Model Garden\n",
"- Vertex AI Training\n",
"- Vertex AI Model Registry\n",
"- Vertex AI Online Prediction\n",
"\n",
"The steps performed are:\n",
"\n",
"- Finetune a JAX Vision Transformer based model.\n",
"- Upload the model to [Model Registry](https://cloud.google.com/vertex-ai/docs/model-registry/introduction).\n",
"- Deploy the model on [Endpoint](https://cloud.google.com/vertex-ai/docs/predictions/using-private-endpoints).\n",
"- Run online predictions for image classification.\n"
"- Vertex AI Online Prediction"
]
},
{
Expand Down Expand Up @@ -170,10 +165,10 @@
"outputs": [],
"source": [
"# Automatically restart kernel after installs so that your environment can access the new packages.\n",
"# import IPython\n",
"import IPython\n",
"\n",
"# app = IPython.Application.instance()\n",
"# app.kernel.do_shutdown(True)"
"app = IPython.Application.instance()\n",
"app.kernel.do_shutdown(True)"
]
},
{
Expand Down Expand Up @@ -219,7 +214,7 @@
},
"outputs": [],
"source": [
"PROJECT_ID = \"[your-project-id]\" # @param {type:\"string\"}\n",
"PROJECT_ID = \"your-project-id\" # @param {type:\"string\"}\n",
"\n",
"# Set the project id\n",
"! gcloud config set project {PROJECT_ID}"
Expand Down Expand Up @@ -434,9 +429,11 @@
"outputs": [],
"source": [
"# The pre-built training docker image.\n",
"TRAIN_DOCKER_URI = \"us-docker.pkg.dev/vertex-ai-restricted/vertex-vision-model-garden-dockers/jax-vit-train-gpu\"\n",
"TRAIN_DOCKER_URI = (\n",
" \"us-docker.pkg.dev/vertex-ai/vertex-vision-model-garden-dockers/jax-vit-train-gpu\"\n",
")\n",
"# The pre-built TF SavedModel conversion docker image.\n",
"MODEL_CONVERSION_DOCKER_URI = \"us-docker.pkg.dev/vertex-ai-restricted/vertex-vision-model-garden-dockers/jax-vit-model-conversion\"\n",
"MODEL_CONVERSION_DOCKER_URI = \"us-docker.pkg.dev/vertex-ai/vertex-vision-model-garden-dockers/jax-vit-model-conversion\"\n",
"# The pre-built prediction docker image.\n",
"OPTIMIZED_TF_RUNTIME_IMAGE_URI = (\n",
" \"us-docker.pkg.dev/vertex-ai-restricted/prediction/tf_opt-gpu.nightly:latest\"\n",
Expand Down Expand Up @@ -516,7 +513,7 @@
"### Prepare dataset\n",
"\n",
"If you are not using [TensorFlow Datasets](https://www.tensorflow.org/datasets/catalog/overview#all_datasets), then you need to prepare your dataset and store it on Cloud Storage. The following example shows\n",
"how to do this for the [tf_flowers dataset](https://www.tensorflow.org/datasets/catalog/tf_flowers). If using TensorFlow Datasets, you pass\n",
"how to do this for the [tf_flowers dataset](https://www.tensorflow.org/datasets/catalog/tf_flowers). If using TensorFlow Datasets, you can just pass\n",
"the dataset name such as `tf_flowers` to the `--config.dataset` flag and bypass this section."
]
},
Expand Down Expand Up @@ -864,7 +861,7 @@
"jax_vit_model.delete()\n",
"\n",
"# Delete Cloud Storage objects that were created.\n",
"delete_bucket = True\n",
"delete_bucket = False\n",
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
" ! gsutil -m rm -r $BUCKET_URI"
]
Expand Down
Loading
Loading