From a29af56ae3c31f07115cb938bcf3f0f77241b725 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 17 May 2025 21:30:28 +0530 Subject: [PATCH 01/31] feat: add throughput_mode to UpdateDatabaseDdlRequest to be used by Spanner Migration Tool. See https://github.com/GoogleCloudPlatform/spanner-migration-tool (#2304) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add throughput_mode to UpdateDatabaseDdlRequest to be used by Spanner Migration Tool. See https://github.com/GoogleCloudPlatform/spanner-migration-tool PiperOrigin-RevId: 759735605 Source-Link: https://github.com/googleapis/googleapis/commit/cbc536a4cd772caa221fdedec950f3bd3c874a74 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a5d1878f4f0bf01eb9e88c0c8c1692a238fe6e4c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTVkMTg3OGY0ZjBiZjAxZWI5ZTg4YzBjOGMxNjkyYTIzOGZlNmU0YyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../database/v1/spanner_database_admin.proto | 5 ++++ protos/protos.d.ts | 6 +++++ protos/protos.js | 23 +++++++++++++++++++ protos/protos.json | 7 ++++++ src/v1/database_admin_client.ts | 4 ++++ 5 files changed, 45 insertions(+) diff --git a/protos/google/spanner/admin/database/v1/spanner_database_admin.proto b/protos/google/spanner/admin/database/v1/spanner_database_admin.proto index 084f98c68..36e06f1e1 100644 --- a/protos/google/spanner/admin/database/v1/spanner_database_admin.proto +++ b/protos/google/spanner/admin/database/v1/spanner_database_admin.proto @@ -813,6 +813,11 @@ message UpdateDatabaseDdlRequest { // For more details, see protobuffer [self // description](https://developers.google.com/protocol-buffers/docs/techniques#self-description). bytes proto_descriptors = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. This field is exposed to be used by the Spanner Migration Tool. + // For more details, see + // [SMT](https://github.com/GoogleCloudPlatform/spanner-migration-tool). + bool throughput_mode = 5 [(google.api.field_behavior) = OPTIONAL]; } // Action information extracted from a DDL statement. This proto is used to diff --git a/protos/protos.d.ts b/protos/protos.d.ts index e07ab8c2b..18501fd1f 100644 --- a/protos/protos.d.ts +++ b/protos/protos.d.ts @@ -11486,6 +11486,9 @@ export namespace google { /** UpdateDatabaseDdlRequest protoDescriptors */ protoDescriptors?: (Uint8Array|Buffer|string|null); + + /** UpdateDatabaseDdlRequest throughputMode */ + throughputMode?: (boolean|null); } /** Represents an UpdateDatabaseDdlRequest. */ @@ -11509,6 +11512,9 @@ export namespace google { /** UpdateDatabaseDdlRequest protoDescriptors. */ public protoDescriptors: (Uint8Array|Buffer|string); + /** UpdateDatabaseDdlRequest throughputMode. */ + public throughputMode: boolean; + /** * Creates a new UpdateDatabaseDdlRequest instance using the specified properties. * @param [properties] Properties to set diff --git a/protos/protos.js b/protos/protos.js index 225e78fc9..0e2ea8d79 100644 --- a/protos/protos.js +++ b/protos/protos.js @@ -29635,6 +29635,7 @@ * @property {Array.|null} [statements] UpdateDatabaseDdlRequest statements * @property {string|null} [operationId] UpdateDatabaseDdlRequest operationId * @property {Uint8Array|null} [protoDescriptors] UpdateDatabaseDdlRequest protoDescriptors + * @property {boolean|null} [throughputMode] UpdateDatabaseDdlRequest throughputMode */ /** @@ -29685,6 +29686,14 @@ */ UpdateDatabaseDdlRequest.prototype.protoDescriptors = $util.newBuffer([]); + /** + * UpdateDatabaseDdlRequest throughputMode. + * @member {boolean} throughputMode + * @memberof google.spanner.admin.database.v1.UpdateDatabaseDdlRequest + * @instance + */ + UpdateDatabaseDdlRequest.prototype.throughputMode = false; + /** * Creates a new UpdateDatabaseDdlRequest instance using the specified properties. * @function create @@ -29718,6 +29727,8 @@ writer.uint32(/* id 3, wireType 2 =*/26).string(message.operationId); if (message.protoDescriptors != null && Object.hasOwnProperty.call(message, "protoDescriptors")) writer.uint32(/* id 4, wireType 2 =*/34).bytes(message.protoDescriptors); + if (message.throughputMode != null && Object.hasOwnProperty.call(message, "throughputMode")) + writer.uint32(/* id 5, wireType 0 =*/40).bool(message.throughputMode); return writer; }; @@ -29772,6 +29783,10 @@ message.protoDescriptors = reader.bytes(); break; } + case 5: { + message.throughputMode = reader.bool(); + break; + } default: reader.skipType(tag & 7); break; @@ -29823,6 +29838,9 @@ if (message.protoDescriptors != null && message.hasOwnProperty("protoDescriptors")) if (!(message.protoDescriptors && typeof message.protoDescriptors.length === "number" || $util.isString(message.protoDescriptors))) return "protoDescriptors: buffer expected"; + if (message.throughputMode != null && message.hasOwnProperty("throughputMode")) + if (typeof message.throughputMode !== "boolean") + return "throughputMode: boolean expected"; return null; }; @@ -29854,6 +29872,8 @@ $util.base64.decode(object.protoDescriptors, message.protoDescriptors = $util.newBuffer($util.base64.length(object.protoDescriptors)), 0); else if (object.protoDescriptors.length >= 0) message.protoDescriptors = object.protoDescriptors; + if (object.throughputMode != null) + message.throughputMode = Boolean(object.throughputMode); return message; }; @@ -29882,6 +29902,7 @@ if (options.bytes !== Array) object.protoDescriptors = $util.newBuffer(object.protoDescriptors); } + object.throughputMode = false; } if (message.database != null && message.hasOwnProperty("database")) object.database = message.database; @@ -29894,6 +29915,8 @@ object.operationId = message.operationId; if (message.protoDescriptors != null && message.hasOwnProperty("protoDescriptors")) object.protoDescriptors = options.bytes === String ? $util.base64.encode(message.protoDescriptors, 0, message.protoDescriptors.length) : options.bytes === Array ? Array.prototype.slice.call(message.protoDescriptors) : message.protoDescriptors; + if (message.throughputMode != null && message.hasOwnProperty("throughputMode")) + object.throughputMode = message.throughputMode; return object; }; diff --git a/protos/protos.json b/protos/protos.json index f12164254..c178e9765 100644 --- a/protos/protos.json +++ b/protos/protos.json @@ -3322,6 +3322,13 @@ "options": { "(google.api.field_behavior)": "OPTIONAL" } + }, + "throughputMode": { + "type": "bool", + "id": 5, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } } }, diff --git a/src/v1/database_admin_client.ts b/src/v1/database_admin_client.ts index 4572ad618..d0e38d49d 100644 --- a/src/v1/database_admin_client.ts +++ b/src/v1/database_admin_client.ts @@ -2814,6 +2814,10 @@ export class DatabaseAdminClient { * ``` * For more details, see protobuffer [self * description](https://developers.google.com/protocol-buffers/docs/techniques#self-description). + * @param {boolean} [request.throughputMode] + * Optional. This field is exposed to be used by the Spanner Migration Tool. + * For more details, see + * [SMT](https://github.com/GoogleCloudPlatform/spanner-migration-tool). * @param {object} [options] * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. * @returns {Promise} - The promise which resolves to an array. From 63573a1dc7d73c50c211111bc35e20204a83a7dd Mon Sep 17 00:00:00 2001 From: surbhigarg92 Date: Wed, 21 May 2025 21:53:57 +0530 Subject: [PATCH 02/31] chore: updated the maxcommitdelay sample to be compatible with other lang (#2307) --- samples/max-commit-delay.js | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/samples/max-commit-delay.js b/samples/max-commit-delay.js index 56419c6b3..3dbbabcea 100644 --- a/samples/max-commit-delay.js +++ b/samples/max-commit-delay.js @@ -23,7 +23,6 @@ function main( projectId = 'my-project-id', ) { // [START spanner_set_max_commit_delay] - // Imports the Google Cloud client library. const {Spanner, protos} = require('@google-cloud/spanner'); /** @@ -38,8 +37,7 @@ function main( projectId: projectId, }); - async function spannerSetMaxCommitDelay() { - // Gets a reference to a Cloud Spanner instance and database. + async function setMaxCommitDelay() { const instance = spanner.instance(instanceId); const database = instance.database(databaseId); @@ -62,11 +60,9 @@ function main( ); await transaction.commit({ - // The maximum amount of time to delay the transaction to improve - // throughput. maxCommitDelay: protos.google.protobuf.Duration({ seconds: 0, // 0 seconds - nanos: 100000000, // 100,000,000 nanoseconds = 100 milliseconds + nanos: 100000000, // 100 milliseconds }), }); } catch (err) { @@ -77,7 +73,7 @@ function main( } }); } - spannerSetMaxCommitDelay(); + setMaxCommitDelay(); // [END spanner_set_max_commit_delay] } process.on('unhandledRejection', err => { From e0ef6d8f61aafd5a8cd108256a36ea8727aa3938 Mon Sep 17 00:00:00 2001 From: aksharauke <126752897+aksharauke@users.noreply.github.com> Date: Thu, 22 May 2025 10:45:51 +0530 Subject: [PATCH 03/31] chore: disable splits integration tests (#2306) Co-authored-by: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> --- samples/system-test/spanner.test.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/system-test/spanner.test.js b/samples/system-test/spanner.test.js index f50a0a6a9..a3fa9b558 100644 --- a/samples/system-test/spanner.test.js +++ b/samples/system-test/spanner.test.js @@ -2494,8 +2494,8 @@ describe('Autogenerated Admin Clients', () => { ); }); - // add split points - it('should add split points', async () => { + // add split points, enable when drop database automatically reclaims quota + it.skip('should add split points', async () => { const output = execSync( `node database-add-split-points.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); From 3c3db1318906a99a33e10d61838b1ac7ef113429 Mon Sep 17 00:00:00 2001 From: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Date: Fri, 23 May 2025 05:15:21 +0000 Subject: [PATCH 04/31] chore: add sample for transaction timeout configuration (#2308) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: add sample for transaction timeout configuration * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- README.md | 1 + samples/README.md | 18 +++++++ samples/system-test/spanner.test.js | 9 ++++ samples/transaction-timeout.js | 77 +++++++++++++++++++++++++++++ 4 files changed, 105 insertions(+) create mode 100644 samples/transaction-timeout.js diff --git a/README.md b/README.md index ed4c06756..cff2046b8 100644 --- a/README.md +++ b/README.md @@ -208,6 +208,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/nodejs-spanner/tre | Drops a foreign key constraint with delete cascade action | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/table-drop-foreign-key-constraint-delete-cascade.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/table-drop-foreign-key-constraint-delete-cascade.js,samples/README.md) | | Timestamp | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/timestamp.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/timestamp.js,samples/README.md) | | Executes a read/write transaction with transaction and request tags | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/transaction-tag.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/transaction-tag.js,samples/README.md) | +| Executes a read/write transaction with transaction timeout | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/transaction-timeout.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/transaction-timeout.js,samples/README.md) | | Transaction | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/transaction.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/transaction.js,samples/README.md) | | Updates a backup schedule | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/update-backup-schedule.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/update-backup-schedule.js,samples/README.md) | | Updates an instance. | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/update-instance-default-backup-schedule-type.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/update-instance-default-backup-schedule-type.js,samples/README.md) | diff --git a/samples/README.md b/samples/README.md index bf09c546a..b91aad776 100644 --- a/samples/README.md +++ b/samples/README.md @@ -133,6 +133,7 @@ and automatic, synchronous replication for high availability. * [Drops a foreign key constraint with delete cascade action](#drops-a-foreign-key-constraint-with-delete-cascade-action) * [Timestamp](#timestamp) * [Executes a read/write transaction with transaction and request tags](#executes-a-read/write-transaction-with-transaction-and-request-tags) + * [Executes a read/write transaction with transaction timeout](#executes-a-read/write-transaction-with-transaction-timeout) * [Transaction](#transaction) * [Updates a backup schedule](#updates-a-backup-schedule) * [Updates an instance.](#updates-an-instance.) @@ -2175,6 +2176,23 @@ __Usage:__ +### Executes a read/write transaction with transaction timeout + +View the [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/transaction-timeout.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/transaction-timeout.js,samples/README.md) + +__Usage:__ + + +`node transaction-timeout.js ` + + +----- + + + + ### Transaction View the [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/transaction.js). diff --git a/samples/system-test/spanner.test.js b/samples/system-test/spanner.test.js index a3fa9b558..ef54d3bba 100644 --- a/samples/system-test/spanner.test.js +++ b/samples/system-test/spanner.test.js @@ -36,6 +36,7 @@ const rpcPriorityQueryPartitionsCommand = 'node rpc-priority-query-partitions.js'; const transactionCmd = 'node transaction.js'; const transactionTagCommand = 'node transaction-tag.js'; +const transactionTimeoutCommand = 'node transaction-timeout.js'; const requestTagCommand = 'node request-tag.js'; const timestampCmd = 'node timestamp.js'; const structCmd = 'node struct.js'; @@ -1237,6 +1238,14 @@ describe('Autogenerated Admin Clients', () => { assert.include(output, 'Inserted new outdoor venue'); }); + // read_write_transaction with transaction timeout + it('should execute a read/write transaction with a transaction timeout of 60 seconds', async () => { + const output = execSync( + `${transactionTimeoutCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, + ); + assert.include(output, '1 record inserted.'); + }); + // add_json_column it('should add a VenueDetails column to Venues example table', async () => { const output = execSync( diff --git a/samples/transaction-timeout.js b/samples/transaction-timeout.js new file mode 100644 index 000000000..e51c94113 --- /dev/null +++ b/samples/transaction-timeout.js @@ -0,0 +1,77 @@ +/** + * Copyright 2025 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Executes a read/write transaction with transaction timeout +// usage: node transaction-timeout.js + +'use strict'; + +function main(instanceId, databaseId, projectId) { + // [START spanner_transaction_timeout] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function executeTransactionWithTimeout() { + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + const options = { + timeout: 60000, // 60 seconds timeout + }; + + try { + await database.runTransactionAsync(options, async tx => { + const [results] = await tx.run( + 'SELECT SingerId, FirstName, LastName FROM Singers ORDER BY LastName, FirstName', + ); + results.forEach(result => { + console.log( + `${result[0].name}: ${result[0].value.value}, ${result[1].name}: ${result[1].value}, ${result[2].name}: ${result[2].value}`, + ); + }); + const sql = + "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES (100, 'George', 'Washington')"; + const [rowCount] = await tx.runUpdate(sql); + console.log(`${rowCount} record inserted.`); + await tx.commit(); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + await database.close(); + } + } + executeTransactionWithTimeout(); + // [END spanner_transaction_timeout] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); From 610d1b989ba186c0758791343deaa7f683c4bd26 Mon Sep 17 00:00:00 2001 From: Lester Szeto Date: Fri, 23 May 2025 02:21:27 -0700 Subject: [PATCH 05/31] Feat: Add Custom OpenTelemetry Exporter in for Service Metrics (#2272) * chore: Add Custom OpenTelemetry Exporter in for Service Metrics * fix: lint errors * chore: migrate metrics service API from googleapis to @google-cloud/monitoring * fix: correct gfe latencies metric name * chore: add batch unit test and update gauge double handling --------- Co-authored-by: Ravjot Brar <83892020+ravjotbrar@users.noreply.github.com> Co-authored-by: Jeremy Parr-Pearson <94406158+jeremyprime@users.noreply.github.com> Co-authored-by: surbhigarg92 --- package.json | 4 + src/metrics/README.md | 19 + src/metrics/constants.ts | 62 ++++ src/metrics/external-types.ts | 37 ++ src/metrics/spanner-metrics-exporter.ts | 138 ++++++++ src/metrics/transform.ts | 307 ++++++++++++++++ test/metrics/spanner-metrics-exporter.ts | 270 ++++++++++++++ test/metrics/transform.ts | 432 +++++++++++++++++++++++ 8 files changed, 1269 insertions(+) create mode 100644 src/metrics/README.md create mode 100644 src/metrics/constants.ts create mode 100644 src/metrics/external-types.ts create mode 100644 src/metrics/spanner-metrics-exporter.ts create mode 100644 src/metrics/transform.ts create mode 100644 test/metrics/spanner-metrics-exporter.ts create mode 100644 test/metrics/transform.ts diff --git a/package.json b/package.json index 9906df3c9..1a50cfc37 100644 --- a/package.json +++ b/package.json @@ -55,13 +55,17 @@ }, "dependencies": { "@google-cloud/common": "^6.0.0", + "@google-cloud/monitoring": "^5.0.0", + "@google-cloud/opentelemetry-resource-util": "^2.4.0", "@google-cloud/precise-date": "^5.0.0", "@google-cloud/projectify": "^5.0.0", "@google-cloud/promisify": "^5.0.0", + "@grpc/grpc-js": "^1.13.2", "@grpc/proto-loader": "^0.7.13", "@opentelemetry/api": "^1.9.0", "@opentelemetry/context-async-hooks": "^2.0.0", "@opentelemetry/core": "^2.0.0", + "@opentelemetry/sdk-metrics": "^1.30.1", "@opentelemetry/semantic-conventions": "^1.30.0", "@types/big.js": "^6.2.2", "@types/stack-trace": "^0.0.33", diff --git a/src/metrics/README.md b/src/metrics/README.md new file mode 100644 index 000000000..ca78af31a --- /dev/null +++ b/src/metrics/README.md @@ -0,0 +1,19 @@ +# Custom Metric Exporter +The custom metric exporter, as defined in [spanner-metrics-exporter.ts](./spanner-metrics-exporter.ts), is designed to work in conjunction with OpenTelemetry and the Spanner client. It converts data into its protobuf equivalent and sends it to Google Cloud Monitoring. + +## Filtering Criteria +The exporter filters metrics based on the following conditions, utilizing values defined in [constants.ts](./constants.ts): + +* Metrics with a scope set to `spanner-nodejs`. +* Metrics with one of the following predefined names: + * `attempt_latencies` + * `attempt_count` + * `operation_latencies` + * `operation_count` + * `gfe_latencies` + * `gfe_connectivity_error_count` + +## Service Endpoint +The exporter sends metrics to the Google Cloud Monitoring [service endpoint](https://cloud.google.com/python/docs/reference/monitoring/latest/google.cloud.monitoring_v3.services.metric_service.MetricServiceClient#google_cloud_monitoring_v3_services_metric_service_MetricServiceClient_create_service_time_series), distinct from the regular client endpoint. This service endpoint operates under a different quota limit than the user endpoint and features an additional server-side filter that only permits a predefined set of metrics to pass through. + +When introducing new service metrics, it is essential to ensure they are allowed through by the server-side filter as well. \ No newline at end of file diff --git a/src/metrics/constants.ts b/src/metrics/constants.ts new file mode 100644 index 000000000..64d740409 --- /dev/null +++ b/src/metrics/constants.ts @@ -0,0 +1,62 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +export const SPANNER_METER_NAME = 'spanner-nodejs'; +export const CLIENT_METRICS_PREFIX = 'spanner.googleapis.com/internal/client'; +export const SPANNER_RESOURCE_TYPE = 'spanner_instance_client'; + +// Monitored resource labels +export const MONITORED_RES_LABEL_KEY_PROJECT = 'project_id'; +export const MONITORED_RES_LABEL_KEY_INSTANCE = 'instance_id'; +export const MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG = 'instance_config'; +export const MONITORED_RES_LABEL_KEY_LOCATION = 'location'; +export const MONITORED_RES_LABEL_KEY_CLIENT_HASH = 'client_hash'; +export const MONITORED_RESOURCE_LABELS = new Set([ + MONITORED_RES_LABEL_KEY_PROJECT, + MONITORED_RES_LABEL_KEY_INSTANCE, + MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG, + MONITORED_RES_LABEL_KEY_LOCATION, + MONITORED_RES_LABEL_KEY_CLIENT_HASH, +]); + +// Metric labels +export const METRIC_LABEL_KEY_CLIENT_UID = 'client_uid'; +export const METRIC_LABEL_KEY_CLIENT_NAME = 'client_name'; +export const METRIC_LABEL_KEY_DATABASE = 'database'; +export const METRIC_LABEL_KEY_METHOD = 'method'; +export const METRIC_LABEL_KEY_STATUS = 'status'; +export const METRIC_LABELS = new Set([ + METRIC_LABEL_KEY_CLIENT_UID, + METRIC_LABEL_KEY_CLIENT_NAME, + METRIC_LABEL_KEY_DATABASE, + METRIC_LABEL_KEY_METHOD, + METRIC_LABEL_KEY_STATUS, +]); + +// Metric names +export const METRIC_NAME_OPERATION_LATENCIES = 'operation_latencies'; +export const METRIC_NAME_ATTEMPT_LATENCIES = 'attempt_latencies'; +export const METRIC_NAME_OPERATION_COUNT = 'operation_count'; +export const METRIC_NAME_ATTEMPT_COUNT = 'attempt_count'; +export const METRIC_NAME_GFE_LATENCIES = 'gfe_latencies'; +export const METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT = + 'gfe_connectivity_error_count'; +export const METRIC_NAMES = new Set([ + METRIC_NAME_OPERATION_LATENCIES, + METRIC_NAME_ATTEMPT_LATENCIES, + METRIC_NAME_GFE_LATENCIES, + METRIC_NAME_OPERATION_COUNT, + METRIC_NAME_ATTEMPT_COUNT, + METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, +]); diff --git a/src/metrics/external-types.ts b/src/metrics/external-types.ts new file mode 100644 index 000000000..07c08f9b0 --- /dev/null +++ b/src/metrics/external-types.ts @@ -0,0 +1,37 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {GoogleAuth} from 'google-auth-library'; + +export interface ExporterOptions { + /** + * Optional authentication options for Google services. + */ + auth: GoogleAuth; +} + +export enum MetricKind { + UNSPECIFIED = 'METRIC_KIND_UNSPECIFIED', + GAUGE = 'GAUGE', + DELTA = 'DELTA', + CUMULATIVE = 'CUMULATIVE', +} + +/** The value type of a metric. */ +export enum ValueType { + VALUE_TYPE_UNSPECIFIED = 'VALUE_TYPE_UNSPECIFIED', + INT64 = 'INT64', + DOUBLE = 'DOUBLE', + DISTRIBUTION = 'DISTRIBUTION', +} diff --git a/src/metrics/spanner-metrics-exporter.ts b/src/metrics/spanner-metrics-exporter.ts new file mode 100644 index 000000000..98d291933 --- /dev/null +++ b/src/metrics/spanner-metrics-exporter.ts @@ -0,0 +1,138 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {PushMetricExporter, ResourceMetrics} from '@opentelemetry/sdk-metrics'; +import {ExportResult, ExportResultCode} from '@opentelemetry/core'; +import {ExporterOptions} from './external-types'; +import {MetricServiceClient} from '@google-cloud/monitoring'; +import {transformResourceMetricToTimeSeriesArray} from './transform'; +import {status} from '@grpc/grpc-js'; + +// Stackdriver Monitoring v3 only accepts up to 200 TimeSeries per +// CreateTimeSeries call. +export const MAX_BATCH_EXPORT_SIZE = 200; + +/** + * Format and sends metrics information to Google Cloud Monitoring. + */ +export class CloudMonitoringMetricsExporter implements PushMetricExporter { + private _projectId: string | void | Promise; + + private readonly _client: MetricServiceClient; + + constructor({auth}: ExporterOptions) { + this._client = new MetricServiceClient({auth: auth}); + + // Start this async process as early as possible. It will be + // awaited on the first export because constructors are synchronous + this._projectId = auth.getProjectId().catch(err => { + console.error(err); + }); + } + + /** + * Implementation for {@link PushMetricExporter.export}. + * Calls the async wrapper method {@link _exportAsync} and + * assures no rejected promises bubble up to the caller. + * + * @param metrics Metrics to be sent to the Google Cloud Monitoring backend + * @param resultCallback result callback to be called on finish + */ + export( + metrics: ResourceMetrics, + resultCallback: (result: ExportResult) => void, + ): void { + this._exportAsync(metrics).then(resultCallback, err => { + console.error(err.message); + resultCallback({code: ExportResultCode.FAILED, error: err}); + }); + } + + async shutdown(): Promise {} + async forceFlush(): Promise {} + + /** + * Asnyc wrapper for the {@link export} implementation. + * Writes the current values of all exported {@link MetricRecord}s + * to the Google Cloud Monitoring backend. + * + * @param resourceMetrics Metrics to be sent to the Google Cloud Monitoring backend + */ + private async _exportAsync( + resourceMetrics: ResourceMetrics, + ): Promise { + if (this._projectId instanceof Promise) { + this._projectId = await this._projectId; + } + + if (!this._projectId) { + const error = new Error('expecting a non-blank ProjectID'); + console.error(error.message); + return {code: ExportResultCode.FAILED, error}; + } + + const timeSeriesList = + transformResourceMetricToTimeSeriesArray(resourceMetrics); + + let failure: {sendFailed: false} | {sendFailed: true; error: Error} = { + sendFailed: false, + }; + await Promise.all( + this._partitionList(timeSeriesList, MAX_BATCH_EXPORT_SIZE).map( + async batchedTimeSeries => this._sendTimeSeries(batchedTimeSeries), + ), + ).catch(e => { + const error = e as {code: number}; + if (error.code === status.PERMISSION_DENIED) { + console.warn( + `Need monitoring metric writer permission on project ${this._projectId}. Follow https://cloud.google.com/spanner/docs/view-manage-client-side-metrics#access-client-side-metrics to set up permissions`, + ); + } + const err = asError(e); + err.message = `Send TimeSeries failed: ${err.message}`; + failure = {sendFailed: true, error: err}; + console.error(`ERROR: ${err.message}`); + }); + + return failure.sendFailed + ? { + code: ExportResultCode.FAILED, + error: (failure as {sendFailed: boolean; error: Error}).error, + } + : {code: ExportResultCode.SUCCESS}; + } + + private async _sendTimeSeries(timeSeries) { + if (timeSeries.length === 0) { + return Promise.resolve(); + } + + // TODO: Use createServiceTimeSeries when it is available + await this._client.createTimeSeries({ + name: `projects/${this._projectId}`, + timeSeries: timeSeries, + }); + } + + /** Returns the minimum number of arrays of max size chunkSize, partitioned from the given array. */ + private _partitionList(list, chunkSize: number) { + return Array.from({length: Math.ceil(list.length / chunkSize)}, (_, i) => + list.slice(i * chunkSize, (i + 1) * chunkSize), + ); + } +} + +function asError(error: unknown): Error { + return error instanceof Error ? error : new Error(String(error)); +} diff --git a/src/metrics/transform.ts b/src/metrics/transform.ts new file mode 100644 index 000000000..4cba27f23 --- /dev/null +++ b/src/metrics/transform.ts @@ -0,0 +1,307 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { + Histogram, + MetricData, + DataPoint, + DataPointType, + ExponentialHistogram, + ResourceMetrics, +} from '@opentelemetry/sdk-metrics'; +import {MonitoredResource} from '@google-cloud/opentelemetry-resource-util'; +import * as path from 'path'; +import {MetricKind, ValueType} from './external-types'; +import { + SPANNER_METER_NAME, + CLIENT_METRICS_PREFIX, + SPANNER_RESOURCE_TYPE, + METRIC_LABELS, + MONITORED_RESOURCE_LABELS, + METRIC_NAMES, +} from './constants'; + +/** Transforms a OpenTelemetry instrument type to a GCM MetricKind. */ +function _transformMetricKind(metric: MetricData): MetricKind { + switch (metric.dataPointType) { + case DataPointType.SUM: + return metric.isMonotonic ? MetricKind.CUMULATIVE : MetricKind.GAUGE; + case DataPointType.GAUGE: + return MetricKind.GAUGE; + case DataPointType.HISTOGRAM: + case DataPointType.EXPONENTIAL_HISTOGRAM: + return MetricKind.CUMULATIVE; + default: + exhaust(metric); + // No logging needed as it will be done in transformPoints() + return MetricKind.UNSPECIFIED; + } +} + +/** Transforms resource to Google Cloud Monitoring monitored resource */ +function _transformResource(labels: { + [key: string]: string; +}): MonitoredResource { + return { + type: SPANNER_RESOURCE_TYPE, + labels: labels, + } as MonitoredResource; +} + +/** Transforms a OpenTelemetry ValueType to a GCM ValueType. */ +function _transformValueType(metric: MetricData): ValueType { + const { + dataPointType, + descriptor: {name}, + } = metric; + + if ( + dataPointType === DataPointType.HISTOGRAM || + dataPointType === DataPointType.EXPONENTIAL_HISTOGRAM + ) { + return ValueType.DISTRIBUTION; + } else if (dataPointType === DataPointType.SUM) { + return ValueType.INT64; + } else if (dataPointType === DataPointType.GAUGE) { + return ValueType.DOUBLE; + } + console.warn('Encountered unexpected metric %s', name); + return ValueType.VALUE_TYPE_UNSPECIFIED; +} + +/** + * Convert the metrics data to a list of Google Cloud Monitoring time series. + */ +export function transformResourceMetricToTimeSeriesArray({ + scopeMetrics, +}: ResourceMetrics) { + if (!scopeMetrics) return []; + + return ( + scopeMetrics + // Only keep those whose scope.name matches 'spanner-nodejs'. + .filter(({scope: {name}}) => name === SPANNER_METER_NAME) + // Takes each metric array and flattens it into one array + .flatMap(({metrics}) => + // Only keeps metrics that match our spanner metric names + metrics.filter(metric => METRIC_NAMES.has(metric.descriptor.name)), + ) + // Flatmap the data points in each metric to create a TimeSeries for each point + .flatMap(metric => + metric.dataPoints.flatMap(dataPoint => + _createTimeSeries(metric, dataPoint), + ), + ) + ); +} +/** + * Creates a GCM TimeSeries. + */ +function _createTimeSeries(metric: MetricData, dataPoint: DataPoint) { + const type = path.posix.join(CLIENT_METRICS_PREFIX, metric.descriptor.name); + const {metricLabels: labels, monitoredResourceLabels} = + _extractLabels(dataPoint); + const transformedMetric = { + type, + labels, + }; + + return { + metric: transformedMetric, + resource: _transformResource(monitoredResourceLabels), + metricKind: _transformMetricKind(metric), + valueType: _transformValueType(metric), + points: [_transformPoint(metric, dataPoint)], + unit: metric.descriptor.unit, + }; +} + +/** + * Transform timeseries's point, so that metric can be uploaded to GCM. + */ +function _transformPoint(metric: MetricData, dataPoint: DataPoint) { + switch (metric.dataPointType) { + case DataPointType.SUM: + case DataPointType.GAUGE: + return { + value: _transformNumberValue( + _transformValueType(metric), + dataPoint.value as number, + ), + interval: { + // Add start time for non-gauge points + ...(metric.dataPointType === DataPointType.SUM && metric.isMonotonic + ? { + startTime: _formatHrTimeToGcmTime(dataPoint.startTime), + } + : null), + endTime: _formatHrTimeToGcmTime(dataPoint.endTime), + }, + }; + case DataPointType.HISTOGRAM: + return { + value: _transformHistogramValue(dataPoint.value as Histogram), + interval: { + startTime: _formatHrTimeToGcmTime(dataPoint.startTime), + endTime: _formatHrTimeToGcmTime(dataPoint.endTime), + }, + }; + case DataPointType.EXPONENTIAL_HISTOGRAM: + return { + value: _transformExponentialHistogramValue( + dataPoint.value as ExponentialHistogram, + ), + interval: { + startTime: _formatHrTimeToGcmTime(dataPoint.startTime), + endTime: _formatHrTimeToGcmTime(dataPoint.endTime), + }, + }; + default: + exhaust(metric); + return { + value: dataPoint.value, + interval: { + endTime: _formatHrTimeToGcmTime(dataPoint.endTime), + }, + }; + } +} + +/** Extracts metric and monitored resource labels from data point */ +function _extractLabels({attributes = {}}: DataPoint) { + return Object.entries(attributes).reduce( + (result, [key, value]) => { + const normalizedKey = _normalizeLabelKey(key); + const val = value?.toString(); + + if (METRIC_LABELS.has(key)) result.metricLabels[normalizedKey] = val; + + if (MONITORED_RESOURCE_LABELS.has(key)) + result.monitoredResourceLabels[normalizedKey] = val; + + return result; + }, + {metricLabels: {}, monitoredResourceLabels: {}}, + ); +} + +function _normalizeLabelKey(key: string): string { + // Replace characters which are not Letter or Decimal_Number unicode category with "_", see + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions/Unicode_Property_Escapes + // + // Reimplementation of reference impl in Go: + // https://github.com/GoogleCloudPlatform/opentelemetry-operations-go/blob/e955c204f4f2bfdc92ff0ad52786232b975efcc2/exporter/metric/metric.go#L595-L604 + let sanitized = key.replace(/[^\p{Letter}\p{Decimal_Number}_]/gu, '_'); + + if (sanitized[0].match(/\p{Decimal_Number}/u)) { + sanitized = 'key_' + sanitized; + } + return sanitized; +} + +/** Transforms a OpenTelemetry Point's value to a GCM Point value. */ +function _transformNumberValue(valueType: ValueType, value: number) { + if (valueType === ValueType.INT64) { + return {int64Value: Math.round(value).toString()}; + } else if (valueType === ValueType.DOUBLE) { + const doubleString = Number.isInteger(value) + ? `${value}.0` + : value.toString(); + return {doubleValue: doubleString}; + } + throw Error(`unsupported value type: ${valueType}`); +} + +function _transformHistogramValue(value: Histogram) { + return { + distributionValue: { + // sumOfSquaredDeviation param not aggregated + count: value.count.toString(), + mean: value.count && value.sum ? value.sum / value.count : 0, + bucketOptions: { + explicitBuckets: {bounds: value.buckets.boundaries}, + }, + bucketCounts: value.buckets.counts.map(value => value.toString()), + }, + }; +} + +function _transformExponentialHistogramValue(value: ExponentialHistogram) { + // Adapated from reference impl in Go which has more explanatory comments + // https://github.com/GoogleCloudPlatform/opentelemetry-operations-go/blob/v1.8.0/exporter/collector/metrics.go#L582 + const underflow = + value.zeroCount + + value.negative.bucketCounts.reduce((prev, current) => prev + current, 0); + const bucketCounts = [ + underflow, + ...value.positive.bucketCounts, + 0, // overflow bucket is always empty + ]; + + let bucketOptions; + if (value.positive.bucketCounts.length === 0) { + bucketOptions = { + explicitBuckets: {bounds: []}, + }; + } else { + const growthFactor = Math.pow(2, Math.pow(2, -value.scale)); //exp2(exp2(-value.scale)); + const scale = Math.pow(growthFactor, value.positive.offset); + bucketOptions = { + exponentialBuckets: { + growthFactor, + scale, + numFiniteBuckets: bucketCounts.length - 2, + }, + }; + } + + const mean = + value.sum === undefined || value.count === 0 ? 0 : value.sum / value.count; + + return { + distributionValue: { + // sumOfSquaredDeviation param not aggregated + count: value.count.toString(), + mean, + bucketOptions, + bucketCounts: bucketCounts.map(value => value.toString()), + }, + }; +} + +/** Transforms an OpenTelemetry time value to a GCM time value. */ +function _formatHrTimeToGcmTime(hrTime) { + return { + seconds: hrTime[0], + nanos: hrTime[1], + }; +} + +/** + * Assert switch case is exhaustive + */ +function exhaust(switchValue: never) { + return switchValue; +} + +export const _TEST_ONLY = { + _normalizeLabelKey, + _transformMetricKind, + _extractLabels, + _formatHrTimeToGcmTime, + _transformResource, + _transformPoint, + _transformValueType, + transformResourceMetricToTimeSeriesArray, +}; diff --git a/test/metrics/spanner-metrics-exporter.ts b/test/metrics/spanner-metrics-exporter.ts new file mode 100644 index 000000000..8e01f915d --- /dev/null +++ b/test/metrics/spanner-metrics-exporter.ts @@ -0,0 +1,270 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {MeterProvider, MetricReader} from '@opentelemetry/sdk-metrics'; +import {GoogleAuth} from 'google-auth-library'; +import { + CloudMonitoringMetricsExporter, + MAX_BATCH_EXPORT_SIZE, +} from '../../src/metrics/spanner-metrics-exporter'; +import { + SPANNER_METER_NAME, + METRIC_NAME_ATTEMPT_COUNT, + METRIC_NAME_ATTEMPT_LATENCIES, + METRIC_NAME_OPERATION_COUNT, + METRIC_NAME_OPERATION_LATENCIES, + METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, + METRIC_NAME_GFE_LATENCIES, +} from '../../src/metrics/constants'; +import {Counter, Meter, Histogram} from '@opentelemetry/api'; +import {ExportResult, ExportResultCode} from '@opentelemetry/core'; + +const PROJECT_ID = 'test-project'; +const INSTANCE_ID = 'test-instance'; +const DATABASE_ID = 'test-db'; +const LOCATION = 'test-location'; + +const auth = new GoogleAuth(); +auth.getProjectId = sinon.stub().resolves(PROJECT_ID); + +// Ensure custom exporter is valid +describe('CustomExporter', () => { + it('should construct an exporter', () => { + const exporter = new CloudMonitoringMetricsExporter({auth}); + assert.ok(typeof exporter.export === 'function'); + assert.ok(typeof exporter.shutdown === 'function'); + }); + + it('should construct an exporter with credentials', () => { + const auth = new GoogleAuth({ + credentials: { + client_email: 'fake', + private_key: '', + }, + }); + auth.getProjectId = sinon.stub().resolves(PROJECT_ID); + const exporter = new CloudMonitoringMetricsExporter({auth}); + + assert(exporter); + return (exporter['_projectId'] as Promise).then(id => { + assert.deepStrictEqual(id, PROJECT_ID); + }); + }); + + it('should be able to shutdown', async () => { + const exporter = new CloudMonitoringMetricsExporter({auth}); + await assert.doesNotReject(exporter.shutdown()); + }); + + it('should be able to force flush', async () => { + const exporter = new CloudMonitoringMetricsExporter({auth}); + await assert.doesNotReject(exporter.forceFlush()); + }); +}); + +// Verify that the export call will convert and send the requests out. +describe('Export', () => { + class InMemoryMetricReader extends MetricReader { + protected async onForceFlush(): Promise {} + protected async onShutdown(): Promise {} + } + let reader: MetricReader; + let meterProvider: MeterProvider; + let meter: Meter; + let attempt_counter: Counter; + let operation_counter: Counter; + let gfe_connectivity_error_count: Counter; + let attempt_latency: Histogram; + let operation_latency: Histogram; + let gfe_latency: Histogram; + let metricAttributes: {[key: string]: string}; + let exporter: CloudMonitoringMetricsExporter; + + beforeEach(() => { + exporter = new CloudMonitoringMetricsExporter({auth}); + reader = new InMemoryMetricReader(); + meterProvider = new MeterProvider({ + readers: [reader], + }); + meter = meterProvider.getMeter(SPANNER_METER_NAME); + metricAttributes = { + project_id: PROJECT_ID, + instance_id: INSTANCE_ID, + instance_config: 'test_config', + location: LOCATION, + client_hash: 'test_hash', + client_uid: 'test_uid', + client_name: 'test_name', + database: DATABASE_ID, + method: 'test_method', + status: 'test_status', + other: 'ignored', + }; + + attempt_counter = meter.createCounter(METRIC_NAME_ATTEMPT_COUNT, { + description: 'Count of attempts', + unit: '1', + }); + + operation_counter = meter.createCounter(METRIC_NAME_OPERATION_COUNT, { + description: 'Count of operations', + unit: '1', + }); + + gfe_connectivity_error_count = meter.createCounter( + METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, + { + description: 'Count of missing headers', + unit: '1', + }, + ); + + attempt_latency = meter.createHistogram(METRIC_NAME_ATTEMPT_LATENCIES, { + description: 'Test attempt latencies in ms', + unit: 'ms', + }); + + operation_latency = meter.createHistogram(METRIC_NAME_OPERATION_LATENCIES, { + description: 'Test operation latencies in ms', + unit: 'ms', + }); + + gfe_latency = meter.createHistogram(METRIC_NAME_GFE_LATENCIES, { + description: 'Test GFE latencies in ms', + unit: 'ms', + }); + }); + + it('should export GCM metrics', async () => { + attempt_counter.add(10, metricAttributes); + operation_counter.add(25, metricAttributes); + gfe_connectivity_error_count.add(12, metricAttributes); + attempt_latency.record(30, metricAttributes); + operation_latency.record(45, metricAttributes); + gfe_latency.record(22, metricAttributes); + + const {errors, resourceMetrics} = await reader.collect(); + if (errors.length !== 0) { + throw errors; + } + + const sendTimeSeriesStub = sinon + .stub(exporter as any, '_sendTimeSeries') + .resolves(); + + await new Promise(resolve => { + exporter.export(resourceMetrics, result => { + if (result.error) { + console.error(result.error); + } + resolve(result); + }); + }); + + assert(sendTimeSeriesStub.calledOnce); + + const [timeseries] = sendTimeSeriesStub.getCall(0).args; + + assert.strictEqual(timeseries.length, 6); + }); + + it('should exit early if resource metrics are empty', async () => { + const {errors, resourceMetrics} = await reader.collect(); + + if (errors.length !== 0) { + throw errors; + } + const sendTimeSeriesStub = sinon + .stub(exporter as any, '_sendTimeSeries') + .resolves(); + + await new Promise(resolve => { + exporter.export(resourceMetrics, result => { + if (result.error) { + console.error(result.error); + } + resolve(result); + }); + }); + + assert(sendTimeSeriesStub.notCalled); + }); + + it('should handle failed send during time series export with callback', async () => { + const sendTimeSeriesStub = sinon + .stub(exporter as any, '_sendTimeSeries') + .rejects(new Error('Network error')); + + attempt_counter.add(10, metricAttributes); + + const {resourceMetrics} = await reader.collect(); + + const resultCallbackSpy = sinon.spy(); + + exporter.export(resourceMetrics, resultCallbackSpy); + + await new Promise(resolve => setImmediate(resolve)); + + const callbackResult = resultCallbackSpy.getCall(0).args[0]; + assert.strictEqual(callbackResult.code, ExportResultCode.FAILED); + assert.strictEqual( + callbackResult.error.message, + 'Send TimeSeries failed: Network error', + ); + + assert(sendTimeSeriesStub.calledOnce); + }); + + it('should batch exports into multiple calls', async () => { + // Create metircs larger than the batch size + const numberOfDistinctMetrics = MAX_BATCH_EXPORT_SIZE * 2 + 1; + for (let i = 0; i < numberOfDistinctMetrics; i++) { + attempt_counter.add(1, {...metricAttributes, testId: `batch-test-${i}`}); + } + + const {resourceMetrics} = await reader.collect(); + + const sendTimeSeriesStub = sinon + .stub(exporter as any, '_sendTimeSeries') + .resolves(); + const resultCallbackSpy = sinon.spy(); + + exporter.export(resourceMetrics, resultCallbackSpy); + + await new Promise(resolve => setImmediate(resolve)); + + // Confirm number of metrics for each batch + const expectedNumberOfCalls = Math.ceil( + numberOfDistinctMetrics / MAX_BATCH_EXPORT_SIZE, + ); + assert.strictEqual(sendTimeSeriesStub.callCount, expectedNumberOfCalls); + assert.strictEqual( + sendTimeSeriesStub.getCall(0).args[0].length, + MAX_BATCH_EXPORT_SIZE, + ); + assert.strictEqual( + sendTimeSeriesStub.getCall(1).args[0].length, + MAX_BATCH_EXPORT_SIZE, + ); + assert.strictEqual( + sendTimeSeriesStub.getCall(2).args[0].length, + numberOfDistinctMetrics % MAX_BATCH_EXPORT_SIZE, + ); + + const callbackResult = resultCallbackSpy.getCall(0).args[0]; + assert.strictEqual(callbackResult.code, ExportResultCode.SUCCESS); + }); +}); diff --git a/test/metrics/transform.ts b/test/metrics/transform.ts new file mode 100644 index 000000000..223829812 --- /dev/null +++ b/test/metrics/transform.ts @@ -0,0 +1,432 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as assert from 'assert'; +import {_TEST_ONLY} from '../../src/metrics/transform'; +import { + AggregationTemporality, + DataPoint, + DataPointType, + ExponentialHistogramMetricData, + GaugeMetricData, + HistogramMetricData, + SumMetricData, + Histogram, + ExponentialHistogram, + MeterProvider, + MetricReader, +} from '@opentelemetry/sdk-metrics'; +import { + Attributes, + Counter, + Meter, + ValueType as OTValueType, +} from '@opentelemetry/api'; +import { + SPANNER_RESOURCE_TYPE, + SPANNER_METER_NAME, + METRIC_NAME_ATTEMPT_COUNT, +} from '../../src/metrics/constants'; +import {MetricKind, ValueType} from '../../src/metrics/external-types'; + +const { + _normalizeLabelKey, + _transformMetricKind, + _extractLabels, + _formatHrTimeToGcmTime, + _transformResource, + _transformValueType, + _transformPoint, + transformResourceMetricToTimeSeriesArray, +} = _TEST_ONLY; + +describe('transform', () => { + let reader: MetricReader; + let meterProvider: MeterProvider; + let attributes: Attributes; + let metricSum: SumMetricData; + let metricGauge: GaugeMetricData; + let metricHistogram: HistogramMetricData; + let metricExponentialHistogram: ExponentialHistogramMetricData; + let metricUnknown; + let sumDataPoint: DataPoint; + let gaugeDataPoint: DataPoint; + let histogramDataPoint: DataPoint; + let exponentialHistogramDataPoint: DataPoint; + + class InMemoryMetricReader extends MetricReader { + protected async onShutdown(): Promise {} + protected async onForceFlush(): Promise {} + } + + before(() => { + reader = new InMemoryMetricReader(); + meterProvider = new MeterProvider({ + readers: [reader], + }); + attributes = { + project_id: 'project_id', + instance_id: 'instance_id', + instance_config: 'test_config', + location: 'test_location', + client_hash: 'test_hash', + client_uid: 'test_uid', + client_name: 'test_name', + database: 'database_id', + method: 'test_method', + status: 'test_status', + other: 'ignored', + } as Attributes; + + metricSum = { + dataPoints: [], + aggregationTemporality: AggregationTemporality.DELTA, + isMonotonic: true, + dataPointType: DataPointType.SUM, + descriptor: {valueType: OTValueType.INT, name: 'some_count'} as any, + }; + + metricGauge = { + dataPoints: [], + aggregationTemporality: '' as any, + dataPointType: DataPointType.GAUGE, + descriptor: {valueType: OTValueType.DOUBLE, name: 'a_count'} as any, + }; + + metricHistogram = { + dataPoints: [], + aggregationTemporality: '' as any, + dataPointType: DataPointType.HISTOGRAM, + descriptor: {} as any, + }; + + metricExponentialHistogram = { + dataPoints: [], + aggregationTemporality: '' as any, + dataPointType: DataPointType.EXPONENTIAL_HISTOGRAM, + descriptor: {} as any, + }; + + metricUnknown = { + dataPoints: [], + aggregationTemporality: '' as any, + dataPointType: 'UNKNOWN_TYPE' as any, + descriptor: {name: ''} as any, + }; + + sumDataPoint = { + attributes, + value: 0, + startTime: process.hrtime(), + endTime: process.hrtime(), + }; + + gaugeDataPoint = { + attributes, + value: 0.0, + startTime: process.hrtime(), + endTime: process.hrtime(), + }; + + histogramDataPoint = { + attributes, + startTime: process.hrtime(), + endTime: process.hrtime(), + value: { + count: 1, + buckets: { + boundaries: [ + 0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, + 10000, + ], + counts: [0, 0, 0, 0, 1, 0], + }, + }, + }; + + exponentialHistogramDataPoint = { + attributes: {}, + startTime: [1687103020, 679000000], + endTime: [1687103020, 680000000], + value: { + count: 7, + sum: 12.5, + scale: -1, + zeroCount: 1, + positive: { + offset: -1, + bucketCounts: [1, 3, 1], + }, + negative: { + bucketCounts: [1], + offset: 0, + }, + }, + }; + }); + + it('normalizes label keys', () => { + [ + ['valid_key_1', 'valid_key_1'], + ['hellø', 'hellø'], + ['123', 'key_123'], + ['key!321', 'key_321'], + ['hyphens-dots.slashes/', 'hyphens_dots_slashes_'], + ['non_letters_:£¢$∞', 'non_letters______'], + ].map(([key, expected]) => { + assert.strictEqual(_normalizeLabelKey(key), expected); + }); + }); + + it('should convert metric types to GCM metric kinds', () => { + assert.strictEqual(_transformMetricKind(metricSum), MetricKind.CUMULATIVE); + + const nonMonotonicMetricSum = { + dataPoints: [], + aggregationTemporality: '' as any, + isMonotonic: false, + dataPointType: DataPointType.SUM, + descriptor: {} as any, + } as SumMetricData; + + assert.strictEqual( + _transformMetricKind(nonMonotonicMetricSum), + MetricKind.GAUGE, + ); + + assert.strictEqual(_transformMetricKind(metricGauge), MetricKind.GAUGE); + + assert.strictEqual( + _transformMetricKind(metricHistogram), + MetricKind.CUMULATIVE, + ); + + assert.strictEqual( + _transformMetricKind(metricExponentialHistogram), + MetricKind.CUMULATIVE, + ); + + assert.strictEqual( + _transformMetricKind(metricUnknown), + MetricKind.UNSPECIFIED, + ); + }); + + it('should extract metric and resource labels', () => { + const {metricLabels, monitoredResourceLabels} = + _extractLabels(sumDataPoint); + + // Metric Labels + assert.strictEqual(metricLabels['client_uid'], 'test_uid'); + assert.strictEqual(metricLabels['client_name'], 'test_name'); + assert.strictEqual(metricLabels['database'], 'database_id'); + assert.strictEqual(metricLabels['method'], 'test_method'); + assert.strictEqual(metricLabels['status'], 'test_status'); + + // Resource Labels + assert.strictEqual(monitoredResourceLabels['project_id'], 'project_id'); + assert.strictEqual(monitoredResourceLabels['instance_id'], 'instance_id'); + assert.strictEqual( + monitoredResourceLabels['instance_config'], + 'test_config', + ); + assert.strictEqual(monitoredResourceLabels['location'], 'test_location'); + assert.strictEqual(monitoredResourceLabels['client_hash'], 'test_hash'); + + // Other Labels + assert(!('other' in metricLabels)); + assert(!('other' in monitoredResourceLabels)); + }); + + it('should transform otel value types to GCM value types', () => { + assert.strictEqual(_transformValueType(metricSum), ValueType.INT64); + + assert.strictEqual(_transformValueType(metricGauge), ValueType.DOUBLE); + + assert.strictEqual( + _transformValueType(metricHistogram), + ValueType.DISTRIBUTION, + ); + + assert.strictEqual( + _transformValueType(metricExponentialHistogram), + ValueType.DISTRIBUTION, + ); + + assert.strictEqual( + _transformValueType(metricUnknown), + ValueType.VALUE_TYPE_UNSPECIFIED, + ); + }); + + it('should tranform the datapoint to a GCM point type', () => { + const sumExpectation = { + value: { + int64Value: '0', + }, + interval: { + startTime: _formatHrTimeToGcmTime(sumDataPoint.startTime), + endTime: _formatHrTimeToGcmTime(sumDataPoint.endTime), + }, + }; + assert.deepStrictEqual( + _transformPoint(metricSum, sumDataPoint), + sumExpectation, + ); + + const gaugeExpectation = { + value: { + doubleValue: '0.0', + }, + interval: { + endTime: _formatHrTimeToGcmTime(gaugeDataPoint.endTime), + }, + }; + + assert.deepStrictEqual( + _transformPoint(metricGauge, gaugeDataPoint), + gaugeExpectation, + ); + + const histogramExpectation = { + value: { + distributionValue: { + count: '1', + mean: 0, + bucketOptions: { + explicitBuckets: { + bounds: [ + 0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, + 7500, 10000, + ], + }, + }, + bucketCounts: ['0', '0', '0', '0', '1', '0'], + }, + }, + interval: { + startTime: _formatHrTimeToGcmTime(histogramDataPoint.startTime), + endTime: _formatHrTimeToGcmTime(histogramDataPoint.endTime), + }, + }; + + assert.deepStrictEqual( + _transformPoint(metricHistogram, histogramDataPoint), + histogramExpectation, + ); + + const exponentialHistogramExpectation = { + interval: { + startTime: _formatHrTimeToGcmTime( + exponentialHistogramDataPoint.startTime, + ), + endTime: _formatHrTimeToGcmTime(exponentialHistogramDataPoint.endTime), + }, + value: { + distributionValue: { + bucketCounts: ['2', '1', '3', '1', '0'], + bucketOptions: { + exponentialBuckets: { + growthFactor: 4, + numFiniteBuckets: 3, + scale: 0.25, + }, + }, + count: '7', + mean: 1.7857142857142858, + }, + }, + }; + + assert.deepStrictEqual( + _transformPoint( + metricExponentialHistogram, + exponentialHistogramDataPoint, + ), + exponentialHistogramExpectation, + ); + }); + + it('should create a MonitoredResource with spanner type', () => { + const labels = {}; + const resource = _transformResource(labels); + assert(resource); + assert.strictEqual(resource.type, SPANNER_RESOURCE_TYPE); + }); + + it('should convert otel metrics to GCM TimeSeries', async () => { + const meter: Meter = meterProvider.getMeter(SPANNER_METER_NAME); + + const attemptCounter: Counter = meter.createCounter( + METRIC_NAME_ATTEMPT_COUNT, + { + description: 'Count of attempts', + unit: 'count', + }, + ); + + attemptCounter.add(1, {}); + attemptCounter.add(2, {}); + + const {errors, resourceMetrics} = await reader.collect(); + if (errors.length !== 0) { + throw errors; + } + const timeseries = + transformResourceMetricToTimeSeriesArray(resourceMetrics); + assert.strictEqual(timeseries.length, 1); + + // Verify the contents of the TimeSeries + const ts = timeseries[0]; + + assert.strictEqual(ts.valueType, 'INT64'); + + assert.strictEqual(ts.points?.length, 1); + + assert.strictEqual( + (ts.points[0].value as {int64Value: string})?.int64Value, + '3', + ); + }); + + it('should filter out metrics without spanner-nodejs scope', async () => { + reader = new InMemoryMetricReader(); + meterProvider = new MeterProvider({ + readers: [reader], + }); + + const meter: Meter = meterProvider.getMeter('wrong_scope'); + + const attemptCounter: Counter = meter.createCounter( + METRIC_NAME_ATTEMPT_COUNT, + { + description: 'Count of attempts', + unit: 'count', + }, + ); + + attemptCounter.add(1, {}); + attemptCounter.add(2, {}); + + const {errors, resourceMetrics} = await reader.collect(); + + if (errors.length !== 0) { + throw errors; + } + const timeseries = + transformResourceMetricToTimeSeriesArray(resourceMetrics); + + assert.strictEqual(timeseries.length, 0); + }); +}); From d45855b53ac334d566c3aef636603923dd88c4d3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 28 May 2025 12:55:47 +0530 Subject: [PATCH 06/31] build: ensure there's only a single service config file for the Spanner Admin Instances API (#2309) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * build: ensure there's only a single service config file for the Spanner Admin Instances API PiperOrigin-RevId: 763646865 Source-Link: https://github.com/googleapis/googleapis/commit/0a4ce50a6664cce6eaae3dfb4deb0135155027ec Source-Link: https://github.com/googleapis/googleapis-gen/commit/88e635519594e1a159a1f811d14958c55cfa8a85 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODhlNjM1NTE5NTk0ZTFhMTU5YTFmODExZDE0OTU4YzU1Y2ZhOGE4NSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- src/v1/instance_admin_client.ts | 289 +++++++++++++++++++++++++++-- test/gapic_instance_admin_v1.ts | 315 ++++++++++++++++++++++++++++++++ 2 files changed, 592 insertions(+), 12 deletions(-) diff --git a/src/v1/instance_admin_client.ts b/src/v1/instance_admin_client.ts index 4ec8d9979..a1d1adb7e 100644 --- a/src/v1/instance_admin_client.ts +++ b/src/v1/instance_admin_client.ts @@ -282,31 +282,72 @@ export class InstanceAdminClient { lroOptions.protoJson = protoFilesRoot; lroOptions.httpRules = [ { - selector: 'google.longrunning.Operations.GetOperation', - get: '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + selector: 'google.longrunning.Operations.CancelOperation', + post: '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', additional_bindings: [ - {get: '/v1/{name=projects/*/instances/*/operations/*}'}, + {post: '/v1/{name=projects/*/instances/*/operations/*}:cancel'}, + { + post: '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', + }, + { + post: '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}:cancel', + }, + { + post: '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', + }, + { + post: '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}:cancel', + }, ], }, { - selector: 'google.longrunning.Operations.ListOperations', - get: '/v1/{name=projects/*/instances/*/databases/*/operations}', + selector: 'google.longrunning.Operations.DeleteOperation', + delete: '/v1/{name=projects/*/instances/*/databases/*/operations/*}', additional_bindings: [ - {get: '/v1/{name=projects/*/instances/*/operations}'}, + {delete: '/v1/{name=projects/*/instances/*/operations/*}'}, + { + delete: + '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + delete: + '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', + }, + {delete: '/v1/{name=projects/*/instanceConfigs/*/operations/*}'}, + { + delete: + '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', + }, ], }, { - selector: 'google.longrunning.Operations.CancelOperation', - post: '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', + selector: 'google.longrunning.Operations.GetOperation', + get: '/v1/{name=projects/*/instances/*/databases/*/operations/*}', additional_bindings: [ - {post: '/v1/{name=projects/*/instances/*/operations/*}:cancel'}, + {get: '/v1/{name=projects/*/instances/*/operations/*}'}, + {get: '/v1/{name=projects/*/instances/*/backups/*/operations/*}'}, + { + get: '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', + }, + {get: '/v1/{name=projects/*/instanceConfigs/*/operations/*}'}, + { + get: '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', + }, ], }, { - selector: 'google.longrunning.Operations.DeleteOperation', - delete: '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + selector: 'google.longrunning.Operations.ListOperations', + get: '/v1/{name=projects/*/instances/*/databases/*/operations}', additional_bindings: [ - {delete: '/v1/{name=projects/*/instances/*/operations/*}'}, + {get: '/v1/{name=projects/*/instances/*/operations}'}, + {get: '/v1/{name=projects/*/instances/*/backups/*/operations}'}, + { + get: '/v1/{name=projects/*/instances/*/instancePartitions/*/operations}', + }, + {get: '/v1/{name=projects/*/instanceConfigs/*/operations}'}, + { + get: '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations}', + }, ], }, ]; @@ -4853,6 +4894,230 @@ export class InstanceAdminClient { callSettings, ) as AsyncIterable; } + /** + * Gets the latest state of a long-running operation. Clients can use this + * method to poll the operation result at intervals as recommended by the API + * service. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation resource. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See {@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions | gax.CallOptions} + * for the details. + * @param {function(?Error, ?Object)=} callback + * The function which will be called with the result of the API call. + * + * The second parameter to the callback is an object representing + * {@link google.longrunning.Operation | google.longrunning.Operation}. + * @return {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * {@link google.longrunning.Operation | google.longrunning.Operation}. + * The promise has a method named "cancel" which cancels the ongoing API call. + * + * @example + * ``` + * const client = longrunning.operationsClient(); + * const name = ''; + * const [response] = await client.getOperation({name}); + * // doThingsWith(response) + * ``` + */ + getOperation( + request: protos.google.longrunning.GetOperationRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< + protos.google.longrunning.Operation, + protos.google.longrunning.GetOperationRequest, + {} | null | undefined + >, + callback?: Callback< + protos.google.longrunning.Operation, + protos.google.longrunning.GetOperationRequest, + {} | null | undefined + >, + ): Promise<[protos.google.longrunning.Operation]> { + let options: gax.CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as gax.CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: request.name ?? '', + }); + return this.operationsClient.getOperation(request, options, callback); + } + /** + * Lists operations that match the specified filter in the request. If the + * server doesn't support this method, it returns `UNIMPLEMENTED`. Returns an iterable object. + * + * For-await-of syntax is used with the iterable to recursively get response element on-demand. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation collection. + * @param {string} request.filter - The standard list filter. + * @param {number=} request.pageSize - + * The maximum number of resources contained in the underlying API + * response. If page streaming is performed per-resource, this + * parameter does not affect the return value. If page streaming is + * performed per-page, this determines the maximum number of + * resources in a page. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See {@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions | gax.CallOptions} for the + * details. + * @returns {Object} + * An iterable Object that conforms to {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | iteration protocols}. + * + * @example + * ``` + * const client = longrunning.operationsClient(); + * for await (const response of client.listOperationsAsync(request)); + * // doThingsWith(response) + * ``` + */ + listOperationsAsync( + request: protos.google.longrunning.ListOperationsRequest, + options?: gax.CallOptions, + ): AsyncIterable { + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: request.name ?? '', + }); + return this.operationsClient.listOperationsAsync(request, options); + } + /** + * Starts asynchronous cancellation on a long-running operation. The server + * makes a best effort to cancel the operation, but success is not + * guaranteed. If the server doesn't support this method, it returns + * `google.rpc.Code.UNIMPLEMENTED`. Clients can use + * {@link Operations.GetOperation} or + * other methods to check whether the cancellation succeeded or whether the + * operation completed despite cancellation. On successful cancellation, + * the operation is not deleted; instead, it becomes an operation with + * an {@link Operation.error} value with a {@link google.rpc.Status.code} of + * 1, corresponding to `Code.CANCELLED`. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation resource to be cancelled. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See {@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions | gax.CallOptions} for the + * details. + * @param {function(?Error)=} callback + * The function which will be called with the result of the API call. + * @return {Promise} - The promise which resolves when API call finishes. + * The promise has a method named "cancel" which cancels the ongoing API + * call. + * + * @example + * ``` + * const client = longrunning.operationsClient(); + * await client.cancelOperation({name: ''}); + * ``` + */ + cancelOperation( + request: protos.google.longrunning.CancelOperationRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< + protos.google.longrunning.CancelOperationRequest, + protos.google.protobuf.Empty, + {} | undefined | null + >, + callback?: Callback< + protos.google.longrunning.CancelOperationRequest, + protos.google.protobuf.Empty, + {} | undefined | null + >, + ): Promise { + let options: gax.CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as gax.CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: request.name ?? '', + }); + return this.operationsClient.cancelOperation(request, options, callback); + } + + /** + * Deletes a long-running operation. This method indicates that the client is + * no longer interested in the operation result. It does not cancel the + * operation. If the server doesn't support this method, it returns + * `google.rpc.Code.UNIMPLEMENTED`. + * + * @param {Object} request - The request object that will be sent. + * @param {string} request.name - The name of the operation resource to be deleted. + * @param {Object=} options + * Optional parameters. You can override the default settings for this call, + * e.g, timeout, retries, paginations, etc. See {@link + * https://googleapis.github.io/gax-nodejs/global.html#CallOptions | gax.CallOptions} + * for the details. + * @param {function(?Error)=} callback + * The function which will be called with the result of the API call. + * @return {Promise} - The promise which resolves when API call finishes. + * The promise has a method named "cancel" which cancels the ongoing API + * call. + * + * @example + * ``` + * const client = longrunning.operationsClient(); + * await client.deleteOperation({name: ''}); + * ``` + */ + deleteOperation( + request: protos.google.longrunning.DeleteOperationRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< + protos.google.protobuf.Empty, + protos.google.longrunning.DeleteOperationRequest, + {} | null | undefined + >, + callback?: Callback< + protos.google.protobuf.Empty, + protos.google.longrunning.DeleteOperationRequest, + {} | null | undefined + >, + ): Promise { + let options: gax.CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as gax.CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers['x-goog-request-params'] = + this._gaxModule.routingHeader.fromParams({ + name: request.name ?? '', + }); + return this.operationsClient.deleteOperation(request, options, callback); + } + // -------------------- // -- Path templates -- // -------------------- diff --git a/test/gapic_instance_admin_v1.ts b/test/gapic_instance_admin_v1.ts index 627f8ece6..8c3f7b596 100644 --- a/test/gapic_instance_admin_v1.ts +++ b/test/gapic_instance_admin_v1.ts @@ -4593,6 +4593,321 @@ describe('v1.InstanceAdminClient', () => { ); }); }); + describe('getOperation', () => { + it('invokes getOperation without error', async () => { + const client = new instanceadminModule.v1.InstanceAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + await client.initialize(); + const request = generateSampleMessage( + new operationsProtos.google.longrunning.GetOperationRequest(), + ); + const expectedResponse = generateSampleMessage( + new operationsProtos.google.longrunning.Operation(), + ); + client.operationsClient.getOperation = stubSimpleCall(expectedResponse); + const response = await client.getOperation(request); + assert.deepStrictEqual(response, [expectedResponse]); + assert( + (client.operationsClient.getOperation as SinonStub) + .getCall(0) + .calledWith(request), + ); + }); + it('invokes getOperation without error using callback', async () => { + const client = new instanceadminModule.v1.InstanceAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + const request = generateSampleMessage( + new operationsProtos.google.longrunning.GetOperationRequest(), + ); + const expectedResponse = generateSampleMessage( + new operationsProtos.google.longrunning.Operation(), + ); + client.operationsClient.getOperation = sinon + .stub() + .callsArgWith(2, null, expectedResponse); + const promise = new Promise((resolve, reject) => { + client.operationsClient + .getOperation( + request, + undefined, + ( + err?: Error | null, + result?: operationsProtos.google.longrunning.Operation | null, + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }, + ) + .catch(err => { + throw err; + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.operationsClient.getOperation as SinonStub).getCall(0)); + }); + it('invokes getOperation with error', async () => { + const client = new instanceadminModule.v1.InstanceAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + const request = generateSampleMessage( + new operationsProtos.google.longrunning.GetOperationRequest(), + ); + const expectedError = new Error('expected'); + client.operationsClient.getOperation = stubSimpleCall( + undefined, + expectedError, + ); + await assert.rejects(async () => { + await client.getOperation(request); + }, expectedError); + assert( + (client.operationsClient.getOperation as SinonStub) + .getCall(0) + .calledWith(request), + ); + }); + }); + describe('cancelOperation', () => { + it('invokes cancelOperation without error', async () => { + const client = new instanceadminModule.v1.InstanceAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + await client.initialize(); + const request = generateSampleMessage( + new operationsProtos.google.longrunning.CancelOperationRequest(), + ); + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty(), + ); + client.operationsClient.cancelOperation = + stubSimpleCall(expectedResponse); + const response = await client.cancelOperation(request); + assert.deepStrictEqual(response, [expectedResponse]); + assert( + (client.operationsClient.cancelOperation as SinonStub) + .getCall(0) + .calledWith(request), + ); + }); + it('invokes cancelOperation without error using callback', async () => { + const client = new instanceadminModule.v1.InstanceAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + const request = generateSampleMessage( + new operationsProtos.google.longrunning.CancelOperationRequest(), + ); + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty(), + ); + client.operationsClient.cancelOperation = sinon + .stub() + .callsArgWith(2, null, expectedResponse); + const promise = new Promise((resolve, reject) => { + client.operationsClient + .cancelOperation( + request, + undefined, + ( + err?: Error | null, + result?: protos.google.protobuf.Empty | null, + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }, + ) + .catch(err => { + throw err; + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.operationsClient.cancelOperation as SinonStub).getCall(0)); + }); + it('invokes cancelOperation with error', async () => { + const client = new instanceadminModule.v1.InstanceAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + const request = generateSampleMessage( + new operationsProtos.google.longrunning.CancelOperationRequest(), + ); + const expectedError = new Error('expected'); + client.operationsClient.cancelOperation = stubSimpleCall( + undefined, + expectedError, + ); + await assert.rejects(async () => { + await client.cancelOperation(request); + }, expectedError); + assert( + (client.operationsClient.cancelOperation as SinonStub) + .getCall(0) + .calledWith(request), + ); + }); + }); + describe('deleteOperation', () => { + it('invokes deleteOperation without error', async () => { + const client = new instanceadminModule.v1.InstanceAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + await client.initialize(); + const request = generateSampleMessage( + new operationsProtos.google.longrunning.DeleteOperationRequest(), + ); + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty(), + ); + client.operationsClient.deleteOperation = + stubSimpleCall(expectedResponse); + const response = await client.deleteOperation(request); + assert.deepStrictEqual(response, [expectedResponse]); + assert( + (client.operationsClient.deleteOperation as SinonStub) + .getCall(0) + .calledWith(request), + ); + }); + it('invokes deleteOperation without error using callback', async () => { + const client = new instanceadminModule.v1.InstanceAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + const request = generateSampleMessage( + new operationsProtos.google.longrunning.DeleteOperationRequest(), + ); + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty(), + ); + client.operationsClient.deleteOperation = sinon + .stub() + .callsArgWith(2, null, expectedResponse); + const promise = new Promise((resolve, reject) => { + client.operationsClient + .deleteOperation( + request, + undefined, + ( + err?: Error | null, + result?: protos.google.protobuf.Empty | null, + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }, + ) + .catch(err => { + throw err; + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.operationsClient.deleteOperation as SinonStub).getCall(0)); + }); + it('invokes deleteOperation with error', async () => { + const client = new instanceadminModule.v1.InstanceAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + const request = generateSampleMessage( + new operationsProtos.google.longrunning.DeleteOperationRequest(), + ); + const expectedError = new Error('expected'); + client.operationsClient.deleteOperation = stubSimpleCall( + undefined, + expectedError, + ); + await assert.rejects(async () => { + await client.deleteOperation(request); + }, expectedError); + assert( + (client.operationsClient.deleteOperation as SinonStub) + .getCall(0) + .calledWith(request), + ); + }); + }); + describe('listOperationsAsync', () => { + it('uses async iteration with listOperations without error', async () => { + const client = new instanceadminModule.v1.InstanceAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + const request = generateSampleMessage( + new operationsProtos.google.longrunning.ListOperationsRequest(), + ); + const expectedResponse = [ + generateSampleMessage( + new operationsProtos.google.longrunning.ListOperationsResponse(), + ), + generateSampleMessage( + new operationsProtos.google.longrunning.ListOperationsResponse(), + ), + generateSampleMessage( + new operationsProtos.google.longrunning.ListOperationsResponse(), + ), + ]; + client.operationsClient.descriptor.listOperations.asyncIterate = + stubAsyncIterationCall(expectedResponse); + const responses: operationsProtos.google.longrunning.IOperation[] = []; + const iterable = client.operationsClient.listOperationsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + ( + client.operationsClient.descriptor.listOperations + .asyncIterate as SinonStub + ).getCall(0).args[1], + request, + ); + }); + it('uses async iteration with listOperations with error', async () => { + const client = new instanceadminModule.v1.InstanceAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + await client.initialize(); + const request = generateSampleMessage( + new operationsProtos.google.longrunning.ListOperationsRequest(), + ); + const expectedError = new Error('expected'); + client.operationsClient.descriptor.listOperations.asyncIterate = + stubAsyncIterationCall(undefined, expectedError); + const iterable = client.operationsClient.listOperationsAsync(request); + await assert.rejects(async () => { + const responses: operationsProtos.google.longrunning.IOperation[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + ( + client.operationsClient.descriptor.listOperations + .asyncIterate as SinonStub + ).getCall(0).args[1], + request, + ); + }); + }); describe('Path templates', () => { describe('instance', async () => { From 57d67be2e3b6d6ac2a8a903acf8613b27a049c3b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 4 Jun 2025 10:15:23 +0530 Subject: [PATCH 07/31] feat(spanner): add new change_stream.proto (#2315) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(spanner): add new change_stream.proto PiperOrigin-RevId: 766241102 Source-Link: https://github.com/googleapis/googleapis/commit/2bea1fccad5117e9f026488570a4eb533df17b7c Source-Link: https://github.com/googleapis/googleapis-gen/commit/f429e2a86492fe37754079ff0236cbac3be1bfba Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjQyOWUyYTg2NDkyZmUzNzc1NDA3OWZmMDIzNmNiYWMzYmUxYmZiYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- protos/google/spanner/v1/change_stream.proto | 451 +++ protos/protos.d.ts | 1276 +++++++ protos/protos.js | 3342 ++++++++++++++++++ protos/protos.json | 252 +- src/v1/spanner_proto_list.json | 1 + 5 files changed, 5321 insertions(+), 1 deletion(-) create mode 100644 protos/google/spanner/v1/change_stream.proto diff --git a/protos/google/spanner/v1/change_stream.proto b/protos/google/spanner/v1/change_stream.proto new file mode 100644 index 000000000..53c71078d --- /dev/null +++ b/protos/google/spanner/v1/change_stream.proto @@ -0,0 +1,451 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.spanner.v1; + +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/spanner/v1/type.proto"; + +option csharp_namespace = "Google.Cloud.Spanner.V1"; +option go_package = "cloud.google.com/go/spanner/apiv1/spannerpb;spannerpb"; +option java_multiple_files = true; +option java_outer_classname = "ChangeStreamProto"; +option java_package = "com.google.spanner.v1"; +option php_namespace = "Google\\Cloud\\Spanner\\V1"; +option ruby_package = "Google::Cloud::Spanner::V1"; + +// Spanner Change Streams enable customers to capture and stream out changes to +// their Spanner databases in real-time. A change stream +// can be created with option partition_mode='IMMUTABLE_KEY_RANGE' or +// partition_mode='MUTABLE_KEY_RANGE'. +// +// This message is only used in Change Streams created with the option +// partition_mode='MUTABLE_KEY_RANGE'. Spanner automatically creates a special +// Table-Valued Function (TVF) along with each Change Streams. The function +// provides access to the change stream's records. The function is named +// READ_ (where is the +// name of the change stream), and it returns a table with only one column +// called ChangeRecord. +message ChangeStreamRecord { + // A data change record contains a set of changes to a table with the same + // modification type (insert, update, or delete) committed at the same commit + // timestamp in one change stream partition for the same transaction. Multiple + // data change records can be returned for the same transaction across + // multiple change stream partitions. + message DataChangeRecord { + // Metadata for a column. + message ColumnMetadata { + // Name of the column. + string name = 1; + + // Type of the column. + Type type = 2; + + // Indicates whether the column is a primary key column. + bool is_primary_key = 3; + + // Ordinal position of the column based on the original table definition + // in the schema starting with a value of 1. + int64 ordinal_position = 4; + } + + // Returns the value and associated metadata for a particular field of the + // [Mod][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod]. + message ModValue { + // Index within the repeated + // [column_metadata][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.column_metadata] + // field, to obtain the column metadata for the column that was modified. + int32 column_metadata_index = 1; + + // The value of the column. + google.protobuf.Value value = 2; + } + + // A mod describes all data changes in a watched table row. + message Mod { + // Returns the value of the primary key of the modified row. + repeated ModValue keys = 1; + + // Returns the old values before the change for the modified columns. + // Always empty for + // [INSERT][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType.INSERT], + // or if old values are not being captured specified by + // [value_capture_type][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType]. + repeated ModValue old_values = 2; + + // Returns the new values after the change for the modified columns. + // Always empty for + // [DELETE][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType.DELETE]. + repeated ModValue new_values = 3; + } + + // Mod type describes the type of change Spanner applied to the data. For + // example, if the client submits an INSERT_OR_UPDATE request, Spanner will + // perform an insert if there is no existing row and return ModType INSERT. + // Alternatively, if there is an existing row, Spanner will perform an + // update and return ModType UPDATE. + enum ModType { + // Not specified. + MOD_TYPE_UNSPECIFIED = 0; + + // Indicates data was inserted. + INSERT = 10; + + // Indicates existing data was updated. + UPDATE = 20; + + // Indicates existing data was deleted. + DELETE = 30; + } + + // Value capture type describes which values are recorded in the data + // change record. + enum ValueCaptureType { + // Not specified. + VALUE_CAPTURE_TYPE_UNSPECIFIED = 0; + + // Records both old and new values of the modified watched columns. + OLD_AND_NEW_VALUES = 10; + + // Records only new values of the modified watched columns. + NEW_VALUES = 20; + + // Records new values of all watched columns, including modified and + // unmodified columns. + NEW_ROW = 30; + + // Records the new values of all watched columns, including modified and + // unmodified columns. Also records the old values of the modified + // columns. + NEW_ROW_AND_OLD_VALUES = 40; + } + + // Indicates the timestamp in which the change was committed. + // DataChangeRecord.commit_timestamps, + // PartitionStartRecord.start_timestamps, + // PartitionEventRecord.commit_timestamps, and + // PartitionEndRecord.end_timestamps can have the same value in the same + // partition. + google.protobuf.Timestamp commit_timestamp = 1; + + // Record sequence numbers are unique and monotonically increasing (but not + // necessarily contiguous) for a specific timestamp across record + // types in the same partition. To guarantee ordered processing, the reader + // should process records (of potentially different types) in + // record_sequence order for a specific timestamp in the same partition. + // + // The record sequence number ordering across partitions is only meaningful + // in the context of a specific transaction. Record sequence numbers are + // unique across partitions for a specific transaction. Sort the + // DataChangeRecords for the same + // [server_transaction_id][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.server_transaction_id] + // by + // [record_sequence][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.record_sequence] + // to reconstruct the ordering of the changes within the transaction. + string record_sequence = 2; + + // Provides a globally unique string that represents the transaction in + // which the change was committed. Multiple transactions can have the same + // commit timestamp, but each transaction has a unique + // server_transaction_id. + string server_transaction_id = 3; + + // Indicates whether this is the last record for a transaction in the + // current partition. Clients can use this field to determine when all + // records for a transaction in the current partition have been received. + bool is_last_record_in_transaction_in_partition = 4; + + // Name of the table affected by the change. + string table = 5; + + // Provides metadata describing the columns associated with the + // [mods][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.mods] listed + // below. + repeated ColumnMetadata column_metadata = 6; + + // Describes the changes that were made. + repeated Mod mods = 7; + + // Describes the type of change. + ModType mod_type = 8; + + // Describes the value capture type that was specified in the change stream + // configuration when this change was captured. + ValueCaptureType value_capture_type = 9; + + // Indicates the number of data change records that are part of this + // transaction across all change stream partitions. This value can be used + // to assemble all the records associated with a particular transaction. + int32 number_of_records_in_transaction = 10; + + // Indicates the number of partitions that return data change records for + // this transaction. This value can be helpful in assembling all records + // associated with a particular transaction. + int32 number_of_partitions_in_transaction = 11; + + // Indicates the transaction tag associated with this transaction. + string transaction_tag = 12; + + // Indicates whether the transaction is a system transaction. System + // transactions include those issued by time-to-live (TTL), column backfill, + // etc. + bool is_system_transaction = 13; + } + + // A heartbeat record is returned as a progress indicator, when there are no + // data changes or any other partition record types in the change stream + // partition. + message HeartbeatRecord { + // Indicates the timestamp at which the query has returned all the records + // in the change stream partition with timestamp <= heartbeat timestamp. + // The heartbeat timestamp will not be the same as the timestamps of other + // record types in the same partition. + google.protobuf.Timestamp timestamp = 1; + } + + // A partition start record serves as a notification that the client should + // schedule the partitions to be queried. PartitionStartRecord returns + // information about one or more partitions. + message PartitionStartRecord { + // Start timestamp at which the partitions should be queried to return + // change stream records with timestamps >= start_timestamp. + // DataChangeRecord.commit_timestamps, + // PartitionStartRecord.start_timestamps, + // PartitionEventRecord.commit_timestamps, and + // PartitionEndRecord.end_timestamps can have the same value in the same + // partition. + google.protobuf.Timestamp start_timestamp = 1; + + // Record sequence numbers are unique and monotonically increasing (but not + // necessarily contiguous) for a specific timestamp across record + // types in the same partition. To guarantee ordered processing, the reader + // should process records (of potentially different types) in + // record_sequence order for a specific timestamp in the same partition. + string record_sequence = 2; + + // Unique partition identifiers to be used in queries. + repeated string partition_tokens = 3; + } + + // A partition end record serves as a notification that the client should stop + // reading the partition. No further records are expected to be retrieved on + // it. + message PartitionEndRecord { + // End timestamp at which the change stream partition is terminated. All + // changes generated by this partition will have timestamps <= + // end_timestamp. DataChangeRecord.commit_timestamps, + // PartitionStartRecord.start_timestamps, + // PartitionEventRecord.commit_timestamps, and + // PartitionEndRecord.end_timestamps can have the same value in the same + // partition. PartitionEndRecord is the last record returned for a + // partition. + google.protobuf.Timestamp end_timestamp = 1; + + // Record sequence numbers are unique and monotonically increasing (but not + // necessarily contiguous) for a specific timestamp across record + // types in the same partition. To guarantee ordered processing, the reader + // should process records (of potentially different types) in + // record_sequence order for a specific timestamp in the same partition. + string record_sequence = 2; + + // Unique partition identifier describing the terminated change stream + // partition. + // [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.partition_token] + // is equal to the partition token of the change stream partition currently + // queried to return this PartitionEndRecord. + string partition_token = 3; + } + + // A partition event record describes key range changes for a change stream + // partition. The changes to a row defined by its primary key can be captured + // in one change stream partition for a specific time range, and then be + // captured in a different change stream partition for a different time range. + // This movement of key ranges across change stream partitions is a reflection + // of activities, such as Spanner's dynamic splitting and load balancing, etc. + // Processing this event is needed if users want to guarantee processing of + // the changes for any key in timestamp order. If time ordered processing of + // changes for a primary key is not needed, this event can be ignored. + // To guarantee time ordered processing for each primary key, if the event + // describes move-ins, the reader of this partition needs to wait until the + // readers of the source partitions have processed all records with timestamps + // <= this PartitionEventRecord.commit_timestamp, before advancing beyond this + // PartitionEventRecord. If the event describes move-outs, the reader can + // notify the readers of the destination partitions that they can continue + // processing. + message PartitionEventRecord { + // Describes move-in of the key ranges into the change stream partition + // identified by + // [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + // + // To maintain processing the changes for a particular key in timestamp + // order, the query processing the change stream partition identified by + // [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] + // should not advance beyond the partition event record commit timestamp + // until the queries processing the source change stream partitions have + // processed all change stream records with timestamps <= the partition + // event record commit timestamp. + message MoveInEvent { + // An unique partition identifier describing the source change stream + // partition that recorded changes for the key range that is moving + // into this partition. + string source_partition_token = 1; + } + + // Describes move-out of the key ranges out of the change stream partition + // identified by + // [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + // + // To maintain processing the changes for a particular key in timestamp + // order, the query processing the + // [MoveOutEvent][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent] + // in the partition identified by + // [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] + // should inform the queries processing the destination partitions that + // they can unblock and proceed processing records past the + // [commit_timestamp][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.commit_timestamp]. + message MoveOutEvent { + // An unique partition identifier describing the destination change + // stream partition that will record changes for the key range that is + // moving out of this partition. + string destination_partition_token = 1; + } + + // Indicates the commit timestamp at which the key range change occurred. + // DataChangeRecord.commit_timestamps, + // PartitionStartRecord.start_timestamps, + // PartitionEventRecord.commit_timestamps, and + // PartitionEndRecord.end_timestamps can have the same value in the same + // partition. + google.protobuf.Timestamp commit_timestamp = 1; + + // Record sequence numbers are unique and monotonically increasing (but not + // necessarily contiguous) for a specific timestamp across record + // types in the same partition. To guarantee ordered processing, the reader + // should process records (of potentially different types) in + // record_sequence order for a specific timestamp in the same partition. + string record_sequence = 2; + + // Unique partition identifier describing the partition this event + // occurred on. + // [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] + // is equal to the partition token of the change stream partition currently + // queried to return this PartitionEventRecord. + string partition_token = 3; + + // Set when one or more key ranges are moved into the change stream + // partition identified by + // [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + // + // Example: Two key ranges are moved into partition (P1) from partition (P2) + // and partition (P3) in a single transaction at timestamp T. + // + // The PartitionEventRecord returned in P1 will reflect the move as: + // + // PartitionEventRecord { + // commit_timestamp: T + // partition_token: "P1" + // move_in_events { + // source_partition_token: "P2" + // } + // move_in_events { + // source_partition_token: "P3" + // } + // } + // + // The PartitionEventRecord returned in P2 will reflect the move as: + // + // PartitionEventRecord { + // commit_timestamp: T + // partition_token: "P2" + // move_out_events { + // destination_partition_token: "P1" + // } + // } + // + // The PartitionEventRecord returned in P3 will reflect the move as: + // + // PartitionEventRecord { + // commit_timestamp: T + // partition_token: "P3" + // move_out_events { + // destination_partition_token: "P1" + // } + // } + repeated MoveInEvent move_in_events = 4; + + // Set when one or more key ranges are moved out of the change stream + // partition identified by + // [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + // + // Example: Two key ranges are moved out of partition (P1) to partition (P2) + // and partition (P3) in a single transaction at timestamp T. + // + // The PartitionEventRecord returned in P1 will reflect the move as: + // + // PartitionEventRecord { + // commit_timestamp: T + // partition_token: "P1" + // move_out_events { + // destination_partition_token: "P2" + // } + // move_out_events { + // destination_partition_token: "P3" + // } + // } + // + // The PartitionEventRecord returned in P2 will reflect the move as: + // + // PartitionEventRecord { + // commit_timestamp: T + // partition_token: "P2" + // move_in_events { + // source_partition_token: "P1" + // } + // } + // + // The PartitionEventRecord returned in P3 will reflect the move as: + // + // PartitionEventRecord { + // commit_timestamp: T + // partition_token: "P3" + // move_in_events { + // source_partition_token: "P1" + // } + // } + repeated MoveOutEvent move_out_events = 5; + } + + // One of the change stream subrecords. + oneof record { + // Data change record describing a data change for a change stream + // partition. + DataChangeRecord data_change_record = 1; + + // Heartbeat record describing a heartbeat for a change stream partition. + HeartbeatRecord heartbeat_record = 2; + + // Partition start record describing a new change stream partition. + PartitionStartRecord partition_start_record = 3; + + // Partition end record describing a terminated change stream partition. + PartitionEndRecord partition_end_record = 4; + + // Partition event record describing key range changes for a change stream + // partition. + PartitionEventRecord partition_event_record = 5; + } +} diff --git a/protos/protos.d.ts b/protos/protos.d.ts index 18501fd1f..8eaff7767 100644 --- a/protos/protos.d.ts +++ b/protos/protos.d.ts @@ -35121,6 +35121,1282 @@ export namespace google { PG_JSONB = 3, PG_OID = 4 } + + /** Properties of a ChangeStreamRecord. */ + interface IChangeStreamRecord { + + /** ChangeStreamRecord dataChangeRecord */ + dataChangeRecord?: (google.spanner.v1.ChangeStreamRecord.IDataChangeRecord|null); + + /** ChangeStreamRecord heartbeatRecord */ + heartbeatRecord?: (google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord|null); + + /** ChangeStreamRecord partitionStartRecord */ + partitionStartRecord?: (google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord|null); + + /** ChangeStreamRecord partitionEndRecord */ + partitionEndRecord?: (google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord|null); + + /** ChangeStreamRecord partitionEventRecord */ + partitionEventRecord?: (google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord|null); + } + + /** Represents a ChangeStreamRecord. */ + class ChangeStreamRecord implements IChangeStreamRecord { + + /** + * Constructs a new ChangeStreamRecord. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.v1.IChangeStreamRecord); + + /** ChangeStreamRecord dataChangeRecord. */ + public dataChangeRecord?: (google.spanner.v1.ChangeStreamRecord.IDataChangeRecord|null); + + /** ChangeStreamRecord heartbeatRecord. */ + public heartbeatRecord?: (google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord|null); + + /** ChangeStreamRecord partitionStartRecord. */ + public partitionStartRecord?: (google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord|null); + + /** ChangeStreamRecord partitionEndRecord. */ + public partitionEndRecord?: (google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord|null); + + /** ChangeStreamRecord partitionEventRecord. */ + public partitionEventRecord?: (google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord|null); + + /** ChangeStreamRecord record. */ + public record?: ("dataChangeRecord"|"heartbeatRecord"|"partitionStartRecord"|"partitionEndRecord"|"partitionEventRecord"); + + /** + * Creates a new ChangeStreamRecord instance using the specified properties. + * @param [properties] Properties to set + * @returns ChangeStreamRecord instance + */ + public static create(properties?: google.spanner.v1.IChangeStreamRecord): google.spanner.v1.ChangeStreamRecord; + + /** + * Encodes the specified ChangeStreamRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.verify|verify} messages. + * @param message ChangeStreamRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.v1.IChangeStreamRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ChangeStreamRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.verify|verify} messages. + * @param message ChangeStreamRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.v1.IChangeStreamRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ChangeStreamRecord message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ChangeStreamRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.v1.ChangeStreamRecord; + + /** + * Decodes a ChangeStreamRecord message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ChangeStreamRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.v1.ChangeStreamRecord; + + /** + * Verifies a ChangeStreamRecord message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ChangeStreamRecord message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ChangeStreamRecord + */ + public static fromObject(object: { [k: string]: any }): google.spanner.v1.ChangeStreamRecord; + + /** + * Creates a plain object from a ChangeStreamRecord message. Also converts values to other types if specified. + * @param message ChangeStreamRecord + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.v1.ChangeStreamRecord, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ChangeStreamRecord to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ChangeStreamRecord + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace ChangeStreamRecord { + + /** Properties of a DataChangeRecord. */ + interface IDataChangeRecord { + + /** DataChangeRecord commitTimestamp */ + commitTimestamp?: (google.protobuf.ITimestamp|null); + + /** DataChangeRecord recordSequence */ + recordSequence?: (string|null); + + /** DataChangeRecord serverTransactionId */ + serverTransactionId?: (string|null); + + /** DataChangeRecord isLastRecordInTransactionInPartition */ + isLastRecordInTransactionInPartition?: (boolean|null); + + /** DataChangeRecord table */ + table?: (string|null); + + /** DataChangeRecord columnMetadata */ + columnMetadata?: (google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IColumnMetadata[]|null); + + /** DataChangeRecord mods */ + mods?: (google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IMod[]|null); + + /** DataChangeRecord modType */ + modType?: (google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType|keyof typeof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType|null); + + /** DataChangeRecord valueCaptureType */ + valueCaptureType?: (google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType|keyof typeof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType|null); + + /** DataChangeRecord numberOfRecordsInTransaction */ + numberOfRecordsInTransaction?: (number|null); + + /** DataChangeRecord numberOfPartitionsInTransaction */ + numberOfPartitionsInTransaction?: (number|null); + + /** DataChangeRecord transactionTag */ + transactionTag?: (string|null); + + /** DataChangeRecord isSystemTransaction */ + isSystemTransaction?: (boolean|null); + } + + /** Represents a DataChangeRecord. */ + class DataChangeRecord implements IDataChangeRecord { + + /** + * Constructs a new DataChangeRecord. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.v1.ChangeStreamRecord.IDataChangeRecord); + + /** DataChangeRecord commitTimestamp. */ + public commitTimestamp?: (google.protobuf.ITimestamp|null); + + /** DataChangeRecord recordSequence. */ + public recordSequence: string; + + /** DataChangeRecord serverTransactionId. */ + public serverTransactionId: string; + + /** DataChangeRecord isLastRecordInTransactionInPartition. */ + public isLastRecordInTransactionInPartition: boolean; + + /** DataChangeRecord table. */ + public table: string; + + /** DataChangeRecord columnMetadata. */ + public columnMetadata: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IColumnMetadata[]; + + /** DataChangeRecord mods. */ + public mods: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IMod[]; + + /** DataChangeRecord modType. */ + public modType: (google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType|keyof typeof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType); + + /** DataChangeRecord valueCaptureType. */ + public valueCaptureType: (google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType|keyof typeof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType); + + /** DataChangeRecord numberOfRecordsInTransaction. */ + public numberOfRecordsInTransaction: number; + + /** DataChangeRecord numberOfPartitionsInTransaction. */ + public numberOfPartitionsInTransaction: number; + + /** DataChangeRecord transactionTag. */ + public transactionTag: string; + + /** DataChangeRecord isSystemTransaction. */ + public isSystemTransaction: boolean; + + /** + * Creates a new DataChangeRecord instance using the specified properties. + * @param [properties] Properties to set + * @returns DataChangeRecord instance + */ + public static create(properties?: google.spanner.v1.ChangeStreamRecord.IDataChangeRecord): google.spanner.v1.ChangeStreamRecord.DataChangeRecord; + + /** + * Encodes the specified DataChangeRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.verify|verify} messages. + * @param message DataChangeRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.v1.ChangeStreamRecord.IDataChangeRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified DataChangeRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.verify|verify} messages. + * @param message DataChangeRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.v1.ChangeStreamRecord.IDataChangeRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DataChangeRecord message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DataChangeRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.v1.ChangeStreamRecord.DataChangeRecord; + + /** + * Decodes a DataChangeRecord message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns DataChangeRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.v1.ChangeStreamRecord.DataChangeRecord; + + /** + * Verifies a DataChangeRecord message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a DataChangeRecord message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns DataChangeRecord + */ + public static fromObject(object: { [k: string]: any }): google.spanner.v1.ChangeStreamRecord.DataChangeRecord; + + /** + * Creates a plain object from a DataChangeRecord message. Also converts values to other types if specified. + * @param message DataChangeRecord + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.v1.ChangeStreamRecord.DataChangeRecord, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this DataChangeRecord to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for DataChangeRecord + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace DataChangeRecord { + + /** Properties of a ColumnMetadata. */ + interface IColumnMetadata { + + /** ColumnMetadata name */ + name?: (string|null); + + /** ColumnMetadata type */ + type?: (google.spanner.v1.IType|null); + + /** ColumnMetadata isPrimaryKey */ + isPrimaryKey?: (boolean|null); + + /** ColumnMetadata ordinalPosition */ + ordinalPosition?: (number|Long|string|null); + } + + /** Represents a ColumnMetadata. */ + class ColumnMetadata implements IColumnMetadata { + + /** + * Constructs a new ColumnMetadata. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IColumnMetadata); + + /** ColumnMetadata name. */ + public name: string; + + /** ColumnMetadata type. */ + public type?: (google.spanner.v1.IType|null); + + /** ColumnMetadata isPrimaryKey. */ + public isPrimaryKey: boolean; + + /** ColumnMetadata ordinalPosition. */ + public ordinalPosition: (number|Long|string); + + /** + * Creates a new ColumnMetadata instance using the specified properties. + * @param [properties] Properties to set + * @returns ColumnMetadata instance + */ + public static create(properties?: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IColumnMetadata): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata; + + /** + * Encodes the specified ColumnMetadata message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata.verify|verify} messages. + * @param message ColumnMetadata message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IColumnMetadata, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ColumnMetadata message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata.verify|verify} messages. + * @param message ColumnMetadata message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IColumnMetadata, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ColumnMetadata message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ColumnMetadata + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata; + + /** + * Decodes a ColumnMetadata message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ColumnMetadata + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata; + + /** + * Verifies a ColumnMetadata message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ColumnMetadata message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ColumnMetadata + */ + public static fromObject(object: { [k: string]: any }): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata; + + /** + * Creates a plain object from a ColumnMetadata message. Also converts values to other types if specified. + * @param message ColumnMetadata + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ColumnMetadata to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ColumnMetadata + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a ModValue. */ + interface IModValue { + + /** ModValue columnMetadataIndex */ + columnMetadataIndex?: (number|null); + + /** ModValue value */ + value?: (google.protobuf.IValue|null); + } + + /** Represents a ModValue. */ + class ModValue implements IModValue { + + /** + * Constructs a new ModValue. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue); + + /** ModValue columnMetadataIndex. */ + public columnMetadataIndex: number; + + /** ModValue value. */ + public value?: (google.protobuf.IValue|null); + + /** + * Creates a new ModValue instance using the specified properties. + * @param [properties] Properties to set + * @returns ModValue instance + */ + public static create(properties?: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue; + + /** + * Encodes the specified ModValue message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.verify|verify} messages. + * @param message ModValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ModValue message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.verify|verify} messages. + * @param message ModValue message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a ModValue message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ModValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue; + + /** + * Decodes a ModValue message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ModValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue; + + /** + * Verifies a ModValue message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a ModValue message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ModValue + */ + public static fromObject(object: { [k: string]: any }): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue; + + /** + * Creates a plain object from a ModValue message. Also converts values to other types if specified. + * @param message ModValue + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ModValue to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ModValue + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a Mod. */ + interface IMod { + + /** Mod keys */ + keys?: (google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue[]|null); + + /** Mod oldValues */ + oldValues?: (google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue[]|null); + + /** Mod newValues */ + newValues?: (google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue[]|null); + } + + /** Represents a Mod. */ + class Mod implements IMod { + + /** + * Constructs a new Mod. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IMod); + + /** Mod keys. */ + public keys: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue[]; + + /** Mod oldValues. */ + public oldValues: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue[]; + + /** Mod newValues. */ + public newValues: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue[]; + + /** + * Creates a new Mod instance using the specified properties. + * @param [properties] Properties to set + * @returns Mod instance + */ + public static create(properties?: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IMod): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod; + + /** + * Encodes the specified Mod message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.verify|verify} messages. + * @param message Mod message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IMod, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified Mod message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.verify|verify} messages. + * @param message Mod message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IMod, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Mod message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns Mod + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod; + + /** + * Decodes a Mod message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns Mod + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod; + + /** + * Verifies a Mod message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a Mod message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns Mod + */ + public static fromObject(object: { [k: string]: any }): google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod; + + /** + * Creates a plain object from a Mod message. Also converts values to other types if specified. + * @param message Mod + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Mod to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for Mod + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** ModType enum. */ + enum ModType { + MOD_TYPE_UNSPECIFIED = 0, + INSERT = 10, + UPDATE = 20, + DELETE = 30 + } + + /** ValueCaptureType enum. */ + enum ValueCaptureType { + VALUE_CAPTURE_TYPE_UNSPECIFIED = 0, + OLD_AND_NEW_VALUES = 10, + NEW_VALUES = 20, + NEW_ROW = 30, + NEW_ROW_AND_OLD_VALUES = 40 + } + } + + /** Properties of a HeartbeatRecord. */ + interface IHeartbeatRecord { + + /** HeartbeatRecord timestamp */ + timestamp?: (google.protobuf.ITimestamp|null); + } + + /** Represents a HeartbeatRecord. */ + class HeartbeatRecord implements IHeartbeatRecord { + + /** + * Constructs a new HeartbeatRecord. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord); + + /** HeartbeatRecord timestamp. */ + public timestamp?: (google.protobuf.ITimestamp|null); + + /** + * Creates a new HeartbeatRecord instance using the specified properties. + * @param [properties] Properties to set + * @returns HeartbeatRecord instance + */ + public static create(properties?: google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord): google.spanner.v1.ChangeStreamRecord.HeartbeatRecord; + + /** + * Encodes the specified HeartbeatRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.HeartbeatRecord.verify|verify} messages. + * @param message HeartbeatRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified HeartbeatRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.HeartbeatRecord.verify|verify} messages. + * @param message HeartbeatRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a HeartbeatRecord message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns HeartbeatRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.v1.ChangeStreamRecord.HeartbeatRecord; + + /** + * Decodes a HeartbeatRecord message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns HeartbeatRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.v1.ChangeStreamRecord.HeartbeatRecord; + + /** + * Verifies a HeartbeatRecord message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a HeartbeatRecord message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns HeartbeatRecord + */ + public static fromObject(object: { [k: string]: any }): google.spanner.v1.ChangeStreamRecord.HeartbeatRecord; + + /** + * Creates a plain object from a HeartbeatRecord message. Also converts values to other types if specified. + * @param message HeartbeatRecord + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.v1.ChangeStreamRecord.HeartbeatRecord, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this HeartbeatRecord to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for HeartbeatRecord + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a PartitionStartRecord. */ + interface IPartitionStartRecord { + + /** PartitionStartRecord startTimestamp */ + startTimestamp?: (google.protobuf.ITimestamp|null); + + /** PartitionStartRecord recordSequence */ + recordSequence?: (string|null); + + /** PartitionStartRecord partitionTokens */ + partitionTokens?: (string[]|null); + } + + /** Represents a PartitionStartRecord. */ + class PartitionStartRecord implements IPartitionStartRecord { + + /** + * Constructs a new PartitionStartRecord. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord); + + /** PartitionStartRecord startTimestamp. */ + public startTimestamp?: (google.protobuf.ITimestamp|null); + + /** PartitionStartRecord recordSequence. */ + public recordSequence: string; + + /** PartitionStartRecord partitionTokens. */ + public partitionTokens: string[]; + + /** + * Creates a new PartitionStartRecord instance using the specified properties. + * @param [properties] Properties to set + * @returns PartitionStartRecord instance + */ + public static create(properties?: google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord): google.spanner.v1.ChangeStreamRecord.PartitionStartRecord; + + /** + * Encodes the specified PartitionStartRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionStartRecord.verify|verify} messages. + * @param message PartitionStartRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified PartitionStartRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionStartRecord.verify|verify} messages. + * @param message PartitionStartRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a PartitionStartRecord message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns PartitionStartRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.v1.ChangeStreamRecord.PartitionStartRecord; + + /** + * Decodes a PartitionStartRecord message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns PartitionStartRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.v1.ChangeStreamRecord.PartitionStartRecord; + + /** + * Verifies a PartitionStartRecord message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a PartitionStartRecord message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns PartitionStartRecord + */ + public static fromObject(object: { [k: string]: any }): google.spanner.v1.ChangeStreamRecord.PartitionStartRecord; + + /** + * Creates a plain object from a PartitionStartRecord message. Also converts values to other types if specified. + * @param message PartitionStartRecord + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.v1.ChangeStreamRecord.PartitionStartRecord, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this PartitionStartRecord to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for PartitionStartRecord + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a PartitionEndRecord. */ + interface IPartitionEndRecord { + + /** PartitionEndRecord endTimestamp */ + endTimestamp?: (google.protobuf.ITimestamp|null); + + /** PartitionEndRecord recordSequence */ + recordSequence?: (string|null); + + /** PartitionEndRecord partitionToken */ + partitionToken?: (string|null); + } + + /** Represents a PartitionEndRecord. */ + class PartitionEndRecord implements IPartitionEndRecord { + + /** + * Constructs a new PartitionEndRecord. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord); + + /** PartitionEndRecord endTimestamp. */ + public endTimestamp?: (google.protobuf.ITimestamp|null); + + /** PartitionEndRecord recordSequence. */ + public recordSequence: string; + + /** PartitionEndRecord partitionToken. */ + public partitionToken: string; + + /** + * Creates a new PartitionEndRecord instance using the specified properties. + * @param [properties] Properties to set + * @returns PartitionEndRecord instance + */ + public static create(properties?: google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord): google.spanner.v1.ChangeStreamRecord.PartitionEndRecord; + + /** + * Encodes the specified PartitionEndRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.verify|verify} messages. + * @param message PartitionEndRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified PartitionEndRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.verify|verify} messages. + * @param message PartitionEndRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a PartitionEndRecord message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns PartitionEndRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.v1.ChangeStreamRecord.PartitionEndRecord; + + /** + * Decodes a PartitionEndRecord message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns PartitionEndRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.v1.ChangeStreamRecord.PartitionEndRecord; + + /** + * Verifies a PartitionEndRecord message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a PartitionEndRecord message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns PartitionEndRecord + */ + public static fromObject(object: { [k: string]: any }): google.spanner.v1.ChangeStreamRecord.PartitionEndRecord; + + /** + * Creates a plain object from a PartitionEndRecord message. Also converts values to other types if specified. + * @param message PartitionEndRecord + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.v1.ChangeStreamRecord.PartitionEndRecord, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this PartitionEndRecord to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for PartitionEndRecord + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a PartitionEventRecord. */ + interface IPartitionEventRecord { + + /** PartitionEventRecord commitTimestamp */ + commitTimestamp?: (google.protobuf.ITimestamp|null); + + /** PartitionEventRecord recordSequence */ + recordSequence?: (string|null); + + /** PartitionEventRecord partitionToken */ + partitionToken?: (string|null); + + /** PartitionEventRecord moveInEvents */ + moveInEvents?: (google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveInEvent[]|null); + + /** PartitionEventRecord moveOutEvents */ + moveOutEvents?: (google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveOutEvent[]|null); + } + + /** Represents a PartitionEventRecord. */ + class PartitionEventRecord implements IPartitionEventRecord { + + /** + * Constructs a new PartitionEventRecord. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord); + + /** PartitionEventRecord commitTimestamp. */ + public commitTimestamp?: (google.protobuf.ITimestamp|null); + + /** PartitionEventRecord recordSequence. */ + public recordSequence: string; + + /** PartitionEventRecord partitionToken. */ + public partitionToken: string; + + /** PartitionEventRecord moveInEvents. */ + public moveInEvents: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveInEvent[]; + + /** PartitionEventRecord moveOutEvents. */ + public moveOutEvents: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveOutEvent[]; + + /** + * Creates a new PartitionEventRecord instance using the specified properties. + * @param [properties] Properties to set + * @returns PartitionEventRecord instance + */ + public static create(properties?: google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord; + + /** + * Encodes the specified PartitionEventRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.verify|verify} messages. + * @param message PartitionEventRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified PartitionEventRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.verify|verify} messages. + * @param message PartitionEventRecord message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a PartitionEventRecord message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns PartitionEventRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord; + + /** + * Decodes a PartitionEventRecord message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns PartitionEventRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord; + + /** + * Verifies a PartitionEventRecord message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a PartitionEventRecord message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns PartitionEventRecord + */ + public static fromObject(object: { [k: string]: any }): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord; + + /** + * Creates a plain object from a PartitionEventRecord message. Also converts values to other types if specified. + * @param message PartitionEventRecord + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this PartitionEventRecord to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for PartitionEventRecord + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace PartitionEventRecord { + + /** Properties of a MoveInEvent. */ + interface IMoveInEvent { + + /** MoveInEvent sourcePartitionToken */ + sourcePartitionToken?: (string|null); + } + + /** Represents a MoveInEvent. */ + class MoveInEvent implements IMoveInEvent { + + /** + * Constructs a new MoveInEvent. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveInEvent); + + /** MoveInEvent sourcePartitionToken. */ + public sourcePartitionToken: string; + + /** + * Creates a new MoveInEvent instance using the specified properties. + * @param [properties] Properties to set + * @returns MoveInEvent instance + */ + public static create(properties?: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveInEvent): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent; + + /** + * Encodes the specified MoveInEvent message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent.verify|verify} messages. + * @param message MoveInEvent message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveInEvent, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MoveInEvent message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent.verify|verify} messages. + * @param message MoveInEvent message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveInEvent, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MoveInEvent message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MoveInEvent + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent; + + /** + * Decodes a MoveInEvent message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MoveInEvent + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent; + + /** + * Verifies a MoveInEvent message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MoveInEvent message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MoveInEvent + */ + public static fromObject(object: { [k: string]: any }): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent; + + /** + * Creates a plain object from a MoveInEvent message. Also converts values to other types if specified. + * @param message MoveInEvent + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MoveInEvent to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MoveInEvent + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of a MoveOutEvent. */ + interface IMoveOutEvent { + + /** MoveOutEvent destinationPartitionToken */ + destinationPartitionToken?: (string|null); + } + + /** Represents a MoveOutEvent. */ + class MoveOutEvent implements IMoveOutEvent { + + /** + * Constructs a new MoveOutEvent. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveOutEvent); + + /** MoveOutEvent destinationPartitionToken. */ + public destinationPartitionToken: string; + + /** + * Creates a new MoveOutEvent instance using the specified properties. + * @param [properties] Properties to set + * @returns MoveOutEvent instance + */ + public static create(properties?: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveOutEvent): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent; + + /** + * Encodes the specified MoveOutEvent message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent.verify|verify} messages. + * @param message MoveOutEvent message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveOutEvent, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MoveOutEvent message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent.verify|verify} messages. + * @param message MoveOutEvent message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveOutEvent, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MoveOutEvent message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MoveOutEvent + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent; + + /** + * Decodes a MoveOutEvent message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MoveOutEvent + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent; + + /** + * Verifies a MoveOutEvent message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MoveOutEvent message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MoveOutEvent + */ + public static fromObject(object: { [k: string]: any }): google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent; + + /** + * Creates a plain object from a MoveOutEvent message. Also converts values to other types if specified. + * @param message MoveOutEvent + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MoveOutEvent to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MoveOutEvent + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + } } } diff --git a/protos/protos.js b/protos/protos.js index 0e2ea8d79..89afc55e6 100644 --- a/protos/protos.js +++ b/protos/protos.js @@ -90622,6 +90622,3348 @@ return values; })(); + v1.ChangeStreamRecord = (function() { + + /** + * Properties of a ChangeStreamRecord. + * @memberof google.spanner.v1 + * @interface IChangeStreamRecord + * @property {google.spanner.v1.ChangeStreamRecord.IDataChangeRecord|null} [dataChangeRecord] ChangeStreamRecord dataChangeRecord + * @property {google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord|null} [heartbeatRecord] ChangeStreamRecord heartbeatRecord + * @property {google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord|null} [partitionStartRecord] ChangeStreamRecord partitionStartRecord + * @property {google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord|null} [partitionEndRecord] ChangeStreamRecord partitionEndRecord + * @property {google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord|null} [partitionEventRecord] ChangeStreamRecord partitionEventRecord + */ + + /** + * Constructs a new ChangeStreamRecord. + * @memberof google.spanner.v1 + * @classdesc Represents a ChangeStreamRecord. + * @implements IChangeStreamRecord + * @constructor + * @param {google.spanner.v1.IChangeStreamRecord=} [properties] Properties to set + */ + function ChangeStreamRecord(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ChangeStreamRecord dataChangeRecord. + * @member {google.spanner.v1.ChangeStreamRecord.IDataChangeRecord|null|undefined} dataChangeRecord + * @memberof google.spanner.v1.ChangeStreamRecord + * @instance + */ + ChangeStreamRecord.prototype.dataChangeRecord = null; + + /** + * ChangeStreamRecord heartbeatRecord. + * @member {google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord|null|undefined} heartbeatRecord + * @memberof google.spanner.v1.ChangeStreamRecord + * @instance + */ + ChangeStreamRecord.prototype.heartbeatRecord = null; + + /** + * ChangeStreamRecord partitionStartRecord. + * @member {google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord|null|undefined} partitionStartRecord + * @memberof google.spanner.v1.ChangeStreamRecord + * @instance + */ + ChangeStreamRecord.prototype.partitionStartRecord = null; + + /** + * ChangeStreamRecord partitionEndRecord. + * @member {google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord|null|undefined} partitionEndRecord + * @memberof google.spanner.v1.ChangeStreamRecord + * @instance + */ + ChangeStreamRecord.prototype.partitionEndRecord = null; + + /** + * ChangeStreamRecord partitionEventRecord. + * @member {google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord|null|undefined} partitionEventRecord + * @memberof google.spanner.v1.ChangeStreamRecord + * @instance + */ + ChangeStreamRecord.prototype.partitionEventRecord = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ChangeStreamRecord record. + * @member {"dataChangeRecord"|"heartbeatRecord"|"partitionStartRecord"|"partitionEndRecord"|"partitionEventRecord"|undefined} record + * @memberof google.spanner.v1.ChangeStreamRecord + * @instance + */ + Object.defineProperty(ChangeStreamRecord.prototype, "record", { + get: $util.oneOfGetter($oneOfFields = ["dataChangeRecord", "heartbeatRecord", "partitionStartRecord", "partitionEndRecord", "partitionEventRecord"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new ChangeStreamRecord instance using the specified properties. + * @function create + * @memberof google.spanner.v1.ChangeStreamRecord + * @static + * @param {google.spanner.v1.IChangeStreamRecord=} [properties] Properties to set + * @returns {google.spanner.v1.ChangeStreamRecord} ChangeStreamRecord instance + */ + ChangeStreamRecord.create = function create(properties) { + return new ChangeStreamRecord(properties); + }; + + /** + * Encodes the specified ChangeStreamRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.verify|verify} messages. + * @function encode + * @memberof google.spanner.v1.ChangeStreamRecord + * @static + * @param {google.spanner.v1.IChangeStreamRecord} message ChangeStreamRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ChangeStreamRecord.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.dataChangeRecord != null && Object.hasOwnProperty.call(message, "dataChangeRecord")) + $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.encode(message.dataChangeRecord, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.heartbeatRecord != null && Object.hasOwnProperty.call(message, "heartbeatRecord")) + $root.google.spanner.v1.ChangeStreamRecord.HeartbeatRecord.encode(message.heartbeatRecord, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.partitionStartRecord != null && Object.hasOwnProperty.call(message, "partitionStartRecord")) + $root.google.spanner.v1.ChangeStreamRecord.PartitionStartRecord.encode(message.partitionStartRecord, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.partitionEndRecord != null && Object.hasOwnProperty.call(message, "partitionEndRecord")) + $root.google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.encode(message.partitionEndRecord, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.partitionEventRecord != null && Object.hasOwnProperty.call(message, "partitionEventRecord")) + $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.encode(message.partitionEventRecord, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ChangeStreamRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord + * @static + * @param {google.spanner.v1.IChangeStreamRecord} message ChangeStreamRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ChangeStreamRecord.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ChangeStreamRecord message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.v1.ChangeStreamRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.v1.ChangeStreamRecord} ChangeStreamRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ChangeStreamRecord.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.v1.ChangeStreamRecord(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.dataChangeRecord = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.decode(reader, reader.uint32()); + break; + } + case 2: { + message.heartbeatRecord = $root.google.spanner.v1.ChangeStreamRecord.HeartbeatRecord.decode(reader, reader.uint32()); + break; + } + case 3: { + message.partitionStartRecord = $root.google.spanner.v1.ChangeStreamRecord.PartitionStartRecord.decode(reader, reader.uint32()); + break; + } + case 4: { + message.partitionEndRecord = $root.google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.decode(reader, reader.uint32()); + break; + } + case 5: { + message.partitionEventRecord = $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ChangeStreamRecord message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.v1.ChangeStreamRecord} ChangeStreamRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ChangeStreamRecord.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ChangeStreamRecord message. + * @function verify + * @memberof google.spanner.v1.ChangeStreamRecord + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ChangeStreamRecord.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.dataChangeRecord != null && message.hasOwnProperty("dataChangeRecord")) { + properties.record = 1; + { + var error = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.verify(message.dataChangeRecord); + if (error) + return "dataChangeRecord." + error; + } + } + if (message.heartbeatRecord != null && message.hasOwnProperty("heartbeatRecord")) { + if (properties.record === 1) + return "record: multiple values"; + properties.record = 1; + { + var error = $root.google.spanner.v1.ChangeStreamRecord.HeartbeatRecord.verify(message.heartbeatRecord); + if (error) + return "heartbeatRecord." + error; + } + } + if (message.partitionStartRecord != null && message.hasOwnProperty("partitionStartRecord")) { + if (properties.record === 1) + return "record: multiple values"; + properties.record = 1; + { + var error = $root.google.spanner.v1.ChangeStreamRecord.PartitionStartRecord.verify(message.partitionStartRecord); + if (error) + return "partitionStartRecord." + error; + } + } + if (message.partitionEndRecord != null && message.hasOwnProperty("partitionEndRecord")) { + if (properties.record === 1) + return "record: multiple values"; + properties.record = 1; + { + var error = $root.google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.verify(message.partitionEndRecord); + if (error) + return "partitionEndRecord." + error; + } + } + if (message.partitionEventRecord != null && message.hasOwnProperty("partitionEventRecord")) { + if (properties.record === 1) + return "record: multiple values"; + properties.record = 1; + { + var error = $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.verify(message.partitionEventRecord); + if (error) + return "partitionEventRecord." + error; + } + } + return null; + }; + + /** + * Creates a ChangeStreamRecord message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.v1.ChangeStreamRecord + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.v1.ChangeStreamRecord} ChangeStreamRecord + */ + ChangeStreamRecord.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.v1.ChangeStreamRecord) + return object; + var message = new $root.google.spanner.v1.ChangeStreamRecord(); + if (object.dataChangeRecord != null) { + if (typeof object.dataChangeRecord !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.dataChangeRecord: object expected"); + message.dataChangeRecord = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.fromObject(object.dataChangeRecord); + } + if (object.heartbeatRecord != null) { + if (typeof object.heartbeatRecord !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.heartbeatRecord: object expected"); + message.heartbeatRecord = $root.google.spanner.v1.ChangeStreamRecord.HeartbeatRecord.fromObject(object.heartbeatRecord); + } + if (object.partitionStartRecord != null) { + if (typeof object.partitionStartRecord !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.partitionStartRecord: object expected"); + message.partitionStartRecord = $root.google.spanner.v1.ChangeStreamRecord.PartitionStartRecord.fromObject(object.partitionStartRecord); + } + if (object.partitionEndRecord != null) { + if (typeof object.partitionEndRecord !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.partitionEndRecord: object expected"); + message.partitionEndRecord = $root.google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.fromObject(object.partitionEndRecord); + } + if (object.partitionEventRecord != null) { + if (typeof object.partitionEventRecord !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.partitionEventRecord: object expected"); + message.partitionEventRecord = $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.fromObject(object.partitionEventRecord); + } + return message; + }; + + /** + * Creates a plain object from a ChangeStreamRecord message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.v1.ChangeStreamRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord} message ChangeStreamRecord + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ChangeStreamRecord.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (message.dataChangeRecord != null && message.hasOwnProperty("dataChangeRecord")) { + object.dataChangeRecord = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.toObject(message.dataChangeRecord, options); + if (options.oneofs) + object.record = "dataChangeRecord"; + } + if (message.heartbeatRecord != null && message.hasOwnProperty("heartbeatRecord")) { + object.heartbeatRecord = $root.google.spanner.v1.ChangeStreamRecord.HeartbeatRecord.toObject(message.heartbeatRecord, options); + if (options.oneofs) + object.record = "heartbeatRecord"; + } + if (message.partitionStartRecord != null && message.hasOwnProperty("partitionStartRecord")) { + object.partitionStartRecord = $root.google.spanner.v1.ChangeStreamRecord.PartitionStartRecord.toObject(message.partitionStartRecord, options); + if (options.oneofs) + object.record = "partitionStartRecord"; + } + if (message.partitionEndRecord != null && message.hasOwnProperty("partitionEndRecord")) { + object.partitionEndRecord = $root.google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.toObject(message.partitionEndRecord, options); + if (options.oneofs) + object.record = "partitionEndRecord"; + } + if (message.partitionEventRecord != null && message.hasOwnProperty("partitionEventRecord")) { + object.partitionEventRecord = $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.toObject(message.partitionEventRecord, options); + if (options.oneofs) + object.record = "partitionEventRecord"; + } + return object; + }; + + /** + * Converts this ChangeStreamRecord to JSON. + * @function toJSON + * @memberof google.spanner.v1.ChangeStreamRecord + * @instance + * @returns {Object.} JSON object + */ + ChangeStreamRecord.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ChangeStreamRecord + * @function getTypeUrl + * @memberof google.spanner.v1.ChangeStreamRecord + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ChangeStreamRecord.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.v1.ChangeStreamRecord"; + }; + + ChangeStreamRecord.DataChangeRecord = (function() { + + /** + * Properties of a DataChangeRecord. + * @memberof google.spanner.v1.ChangeStreamRecord + * @interface IDataChangeRecord + * @property {google.protobuf.ITimestamp|null} [commitTimestamp] DataChangeRecord commitTimestamp + * @property {string|null} [recordSequence] DataChangeRecord recordSequence + * @property {string|null} [serverTransactionId] DataChangeRecord serverTransactionId + * @property {boolean|null} [isLastRecordInTransactionInPartition] DataChangeRecord isLastRecordInTransactionInPartition + * @property {string|null} [table] DataChangeRecord table + * @property {Array.|null} [columnMetadata] DataChangeRecord columnMetadata + * @property {Array.|null} [mods] DataChangeRecord mods + * @property {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType|null} [modType] DataChangeRecord modType + * @property {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType|null} [valueCaptureType] DataChangeRecord valueCaptureType + * @property {number|null} [numberOfRecordsInTransaction] DataChangeRecord numberOfRecordsInTransaction + * @property {number|null} [numberOfPartitionsInTransaction] DataChangeRecord numberOfPartitionsInTransaction + * @property {string|null} [transactionTag] DataChangeRecord transactionTag + * @property {boolean|null} [isSystemTransaction] DataChangeRecord isSystemTransaction + */ + + /** + * Constructs a new DataChangeRecord. + * @memberof google.spanner.v1.ChangeStreamRecord + * @classdesc Represents a DataChangeRecord. + * @implements IDataChangeRecord + * @constructor + * @param {google.spanner.v1.ChangeStreamRecord.IDataChangeRecord=} [properties] Properties to set + */ + function DataChangeRecord(properties) { + this.columnMetadata = []; + this.mods = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DataChangeRecord commitTimestamp. + * @member {google.protobuf.ITimestamp|null|undefined} commitTimestamp + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.commitTimestamp = null; + + /** + * DataChangeRecord recordSequence. + * @member {string} recordSequence + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.recordSequence = ""; + + /** + * DataChangeRecord serverTransactionId. + * @member {string} serverTransactionId + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.serverTransactionId = ""; + + /** + * DataChangeRecord isLastRecordInTransactionInPartition. + * @member {boolean} isLastRecordInTransactionInPartition + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.isLastRecordInTransactionInPartition = false; + + /** + * DataChangeRecord table. + * @member {string} table + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.table = ""; + + /** + * DataChangeRecord columnMetadata. + * @member {Array.} columnMetadata + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.columnMetadata = $util.emptyArray; + + /** + * DataChangeRecord mods. + * @member {Array.} mods + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.mods = $util.emptyArray; + + /** + * DataChangeRecord modType. + * @member {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType} modType + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.modType = 0; + + /** + * DataChangeRecord valueCaptureType. + * @member {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType} valueCaptureType + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.valueCaptureType = 0; + + /** + * DataChangeRecord numberOfRecordsInTransaction. + * @member {number} numberOfRecordsInTransaction + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.numberOfRecordsInTransaction = 0; + + /** + * DataChangeRecord numberOfPartitionsInTransaction. + * @member {number} numberOfPartitionsInTransaction + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.numberOfPartitionsInTransaction = 0; + + /** + * DataChangeRecord transactionTag. + * @member {string} transactionTag + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.transactionTag = ""; + + /** + * DataChangeRecord isSystemTransaction. + * @member {boolean} isSystemTransaction + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + */ + DataChangeRecord.prototype.isSystemTransaction = false; + + /** + * Creates a new DataChangeRecord instance using the specified properties. + * @function create + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IDataChangeRecord=} [properties] Properties to set + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord} DataChangeRecord instance + */ + DataChangeRecord.create = function create(properties) { + return new DataChangeRecord(properties); + }; + + /** + * Encodes the specified DataChangeRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.verify|verify} messages. + * @function encode + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IDataChangeRecord} message DataChangeRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DataChangeRecord.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.commitTimestamp != null && Object.hasOwnProperty.call(message, "commitTimestamp")) + $root.google.protobuf.Timestamp.encode(message.commitTimestamp, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.recordSequence != null && Object.hasOwnProperty.call(message, "recordSequence")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.recordSequence); + if (message.serverTransactionId != null && Object.hasOwnProperty.call(message, "serverTransactionId")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.serverTransactionId); + if (message.isLastRecordInTransactionInPartition != null && Object.hasOwnProperty.call(message, "isLastRecordInTransactionInPartition")) + writer.uint32(/* id 4, wireType 0 =*/32).bool(message.isLastRecordInTransactionInPartition); + if (message.table != null && Object.hasOwnProperty.call(message, "table")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.table); + if (message.columnMetadata != null && message.columnMetadata.length) + for (var i = 0; i < message.columnMetadata.length; ++i) + $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata.encode(message.columnMetadata[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.mods != null && message.mods.length) + for (var i = 0; i < message.mods.length; ++i) + $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.encode(message.mods[i], writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.modType != null && Object.hasOwnProperty.call(message, "modType")) + writer.uint32(/* id 8, wireType 0 =*/64).int32(message.modType); + if (message.valueCaptureType != null && Object.hasOwnProperty.call(message, "valueCaptureType")) + writer.uint32(/* id 9, wireType 0 =*/72).int32(message.valueCaptureType); + if (message.numberOfRecordsInTransaction != null && Object.hasOwnProperty.call(message, "numberOfRecordsInTransaction")) + writer.uint32(/* id 10, wireType 0 =*/80).int32(message.numberOfRecordsInTransaction); + if (message.numberOfPartitionsInTransaction != null && Object.hasOwnProperty.call(message, "numberOfPartitionsInTransaction")) + writer.uint32(/* id 11, wireType 0 =*/88).int32(message.numberOfPartitionsInTransaction); + if (message.transactionTag != null && Object.hasOwnProperty.call(message, "transactionTag")) + writer.uint32(/* id 12, wireType 2 =*/98).string(message.transactionTag); + if (message.isSystemTransaction != null && Object.hasOwnProperty.call(message, "isSystemTransaction")) + writer.uint32(/* id 13, wireType 0 =*/104).bool(message.isSystemTransaction); + return writer; + }; + + /** + * Encodes the specified DataChangeRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IDataChangeRecord} message DataChangeRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DataChangeRecord.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a DataChangeRecord message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord} DataChangeRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DataChangeRecord.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.commitTimestamp = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 2: { + message.recordSequence = reader.string(); + break; + } + case 3: { + message.serverTransactionId = reader.string(); + break; + } + case 4: { + message.isLastRecordInTransactionInPartition = reader.bool(); + break; + } + case 5: { + message.table = reader.string(); + break; + } + case 6: { + if (!(message.columnMetadata && message.columnMetadata.length)) + message.columnMetadata = []; + message.columnMetadata.push($root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata.decode(reader, reader.uint32())); + break; + } + case 7: { + if (!(message.mods && message.mods.length)) + message.mods = []; + message.mods.push($root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.decode(reader, reader.uint32())); + break; + } + case 8: { + message.modType = reader.int32(); + break; + } + case 9: { + message.valueCaptureType = reader.int32(); + break; + } + case 10: { + message.numberOfRecordsInTransaction = reader.int32(); + break; + } + case 11: { + message.numberOfPartitionsInTransaction = reader.int32(); + break; + } + case 12: { + message.transactionTag = reader.string(); + break; + } + case 13: { + message.isSystemTransaction = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a DataChangeRecord message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord} DataChangeRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DataChangeRecord.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a DataChangeRecord message. + * @function verify + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DataChangeRecord.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.commitTimestamp != null && message.hasOwnProperty("commitTimestamp")) { + var error = $root.google.protobuf.Timestamp.verify(message.commitTimestamp); + if (error) + return "commitTimestamp." + error; + } + if (message.recordSequence != null && message.hasOwnProperty("recordSequence")) + if (!$util.isString(message.recordSequence)) + return "recordSequence: string expected"; + if (message.serverTransactionId != null && message.hasOwnProperty("serverTransactionId")) + if (!$util.isString(message.serverTransactionId)) + return "serverTransactionId: string expected"; + if (message.isLastRecordInTransactionInPartition != null && message.hasOwnProperty("isLastRecordInTransactionInPartition")) + if (typeof message.isLastRecordInTransactionInPartition !== "boolean") + return "isLastRecordInTransactionInPartition: boolean expected"; + if (message.table != null && message.hasOwnProperty("table")) + if (!$util.isString(message.table)) + return "table: string expected"; + if (message.columnMetadata != null && message.hasOwnProperty("columnMetadata")) { + if (!Array.isArray(message.columnMetadata)) + return "columnMetadata: array expected"; + for (var i = 0; i < message.columnMetadata.length; ++i) { + var error = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata.verify(message.columnMetadata[i]); + if (error) + return "columnMetadata." + error; + } + } + if (message.mods != null && message.hasOwnProperty("mods")) { + if (!Array.isArray(message.mods)) + return "mods: array expected"; + for (var i = 0; i < message.mods.length; ++i) { + var error = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.verify(message.mods[i]); + if (error) + return "mods." + error; + } + } + if (message.modType != null && message.hasOwnProperty("modType")) + switch (message.modType) { + default: + return "modType: enum value expected"; + case 0: + case 10: + case 20: + case 30: + break; + } + if (message.valueCaptureType != null && message.hasOwnProperty("valueCaptureType")) + switch (message.valueCaptureType) { + default: + return "valueCaptureType: enum value expected"; + case 0: + case 10: + case 20: + case 30: + case 40: + break; + } + if (message.numberOfRecordsInTransaction != null && message.hasOwnProperty("numberOfRecordsInTransaction")) + if (!$util.isInteger(message.numberOfRecordsInTransaction)) + return "numberOfRecordsInTransaction: integer expected"; + if (message.numberOfPartitionsInTransaction != null && message.hasOwnProperty("numberOfPartitionsInTransaction")) + if (!$util.isInteger(message.numberOfPartitionsInTransaction)) + return "numberOfPartitionsInTransaction: integer expected"; + if (message.transactionTag != null && message.hasOwnProperty("transactionTag")) + if (!$util.isString(message.transactionTag)) + return "transactionTag: string expected"; + if (message.isSystemTransaction != null && message.hasOwnProperty("isSystemTransaction")) + if (typeof message.isSystemTransaction !== "boolean") + return "isSystemTransaction: boolean expected"; + return null; + }; + + /** + * Creates a DataChangeRecord message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord} DataChangeRecord + */ + DataChangeRecord.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord) + return object; + var message = new $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord(); + if (object.commitTimestamp != null) { + if (typeof object.commitTimestamp !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.commitTimestamp: object expected"); + message.commitTimestamp = $root.google.protobuf.Timestamp.fromObject(object.commitTimestamp); + } + if (object.recordSequence != null) + message.recordSequence = String(object.recordSequence); + if (object.serverTransactionId != null) + message.serverTransactionId = String(object.serverTransactionId); + if (object.isLastRecordInTransactionInPartition != null) + message.isLastRecordInTransactionInPartition = Boolean(object.isLastRecordInTransactionInPartition); + if (object.table != null) + message.table = String(object.table); + if (object.columnMetadata) { + if (!Array.isArray(object.columnMetadata)) + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.columnMetadata: array expected"); + message.columnMetadata = []; + for (var i = 0; i < object.columnMetadata.length; ++i) { + if (typeof object.columnMetadata[i] !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.columnMetadata: object expected"); + message.columnMetadata[i] = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata.fromObject(object.columnMetadata[i]); + } + } + if (object.mods) { + if (!Array.isArray(object.mods)) + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.mods: array expected"); + message.mods = []; + for (var i = 0; i < object.mods.length; ++i) { + if (typeof object.mods[i] !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.mods: object expected"); + message.mods[i] = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.fromObject(object.mods[i]); + } + } + switch (object.modType) { + default: + if (typeof object.modType === "number") { + message.modType = object.modType; + break; + } + break; + case "MOD_TYPE_UNSPECIFIED": + case 0: + message.modType = 0; + break; + case "INSERT": + case 10: + message.modType = 10; + break; + case "UPDATE": + case 20: + message.modType = 20; + break; + case "DELETE": + case 30: + message.modType = 30; + break; + } + switch (object.valueCaptureType) { + default: + if (typeof object.valueCaptureType === "number") { + message.valueCaptureType = object.valueCaptureType; + break; + } + break; + case "VALUE_CAPTURE_TYPE_UNSPECIFIED": + case 0: + message.valueCaptureType = 0; + break; + case "OLD_AND_NEW_VALUES": + case 10: + message.valueCaptureType = 10; + break; + case "NEW_VALUES": + case 20: + message.valueCaptureType = 20; + break; + case "NEW_ROW": + case 30: + message.valueCaptureType = 30; + break; + case "NEW_ROW_AND_OLD_VALUES": + case 40: + message.valueCaptureType = 40; + break; + } + if (object.numberOfRecordsInTransaction != null) + message.numberOfRecordsInTransaction = object.numberOfRecordsInTransaction | 0; + if (object.numberOfPartitionsInTransaction != null) + message.numberOfPartitionsInTransaction = object.numberOfPartitionsInTransaction | 0; + if (object.transactionTag != null) + message.transactionTag = String(object.transactionTag); + if (object.isSystemTransaction != null) + message.isSystemTransaction = Boolean(object.isSystemTransaction); + return message; + }; + + /** + * Creates a plain object from a DataChangeRecord message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord} message DataChangeRecord + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + DataChangeRecord.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.columnMetadata = []; + object.mods = []; + } + if (options.defaults) { + object.commitTimestamp = null; + object.recordSequence = ""; + object.serverTransactionId = ""; + object.isLastRecordInTransactionInPartition = false; + object.table = ""; + object.modType = options.enums === String ? "MOD_TYPE_UNSPECIFIED" : 0; + object.valueCaptureType = options.enums === String ? "VALUE_CAPTURE_TYPE_UNSPECIFIED" : 0; + object.numberOfRecordsInTransaction = 0; + object.numberOfPartitionsInTransaction = 0; + object.transactionTag = ""; + object.isSystemTransaction = false; + } + if (message.commitTimestamp != null && message.hasOwnProperty("commitTimestamp")) + object.commitTimestamp = $root.google.protobuf.Timestamp.toObject(message.commitTimestamp, options); + if (message.recordSequence != null && message.hasOwnProperty("recordSequence")) + object.recordSequence = message.recordSequence; + if (message.serverTransactionId != null && message.hasOwnProperty("serverTransactionId")) + object.serverTransactionId = message.serverTransactionId; + if (message.isLastRecordInTransactionInPartition != null && message.hasOwnProperty("isLastRecordInTransactionInPartition")) + object.isLastRecordInTransactionInPartition = message.isLastRecordInTransactionInPartition; + if (message.table != null && message.hasOwnProperty("table")) + object.table = message.table; + if (message.columnMetadata && message.columnMetadata.length) { + object.columnMetadata = []; + for (var j = 0; j < message.columnMetadata.length; ++j) + object.columnMetadata[j] = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata.toObject(message.columnMetadata[j], options); + } + if (message.mods && message.mods.length) { + object.mods = []; + for (var j = 0; j < message.mods.length; ++j) + object.mods[j] = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.toObject(message.mods[j], options); + } + if (message.modType != null && message.hasOwnProperty("modType")) + object.modType = options.enums === String ? $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType[message.modType] === undefined ? message.modType : $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType[message.modType] : message.modType; + if (message.valueCaptureType != null && message.hasOwnProperty("valueCaptureType")) + object.valueCaptureType = options.enums === String ? $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType[message.valueCaptureType] === undefined ? message.valueCaptureType : $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType[message.valueCaptureType] : message.valueCaptureType; + if (message.numberOfRecordsInTransaction != null && message.hasOwnProperty("numberOfRecordsInTransaction")) + object.numberOfRecordsInTransaction = message.numberOfRecordsInTransaction; + if (message.numberOfPartitionsInTransaction != null && message.hasOwnProperty("numberOfPartitionsInTransaction")) + object.numberOfPartitionsInTransaction = message.numberOfPartitionsInTransaction; + if (message.transactionTag != null && message.hasOwnProperty("transactionTag")) + object.transactionTag = message.transactionTag; + if (message.isSystemTransaction != null && message.hasOwnProperty("isSystemTransaction")) + object.isSystemTransaction = message.isSystemTransaction; + return object; + }; + + /** + * Converts this DataChangeRecord to JSON. + * @function toJSON + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @instance + * @returns {Object.} JSON object + */ + DataChangeRecord.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for DataChangeRecord + * @function getTypeUrl + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + DataChangeRecord.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.v1.ChangeStreamRecord.DataChangeRecord"; + }; + + DataChangeRecord.ColumnMetadata = (function() { + + /** + * Properties of a ColumnMetadata. + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @interface IColumnMetadata + * @property {string|null} [name] ColumnMetadata name + * @property {google.spanner.v1.IType|null} [type] ColumnMetadata type + * @property {boolean|null} [isPrimaryKey] ColumnMetadata isPrimaryKey + * @property {number|Long|null} [ordinalPosition] ColumnMetadata ordinalPosition + */ + + /** + * Constructs a new ColumnMetadata. + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @classdesc Represents a ColumnMetadata. + * @implements IColumnMetadata + * @constructor + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IColumnMetadata=} [properties] Properties to set + */ + function ColumnMetadata(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ColumnMetadata name. + * @member {string} name + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @instance + */ + ColumnMetadata.prototype.name = ""; + + /** + * ColumnMetadata type. + * @member {google.spanner.v1.IType|null|undefined} type + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @instance + */ + ColumnMetadata.prototype.type = null; + + /** + * ColumnMetadata isPrimaryKey. + * @member {boolean} isPrimaryKey + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @instance + */ + ColumnMetadata.prototype.isPrimaryKey = false; + + /** + * ColumnMetadata ordinalPosition. + * @member {number|Long} ordinalPosition + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @instance + */ + ColumnMetadata.prototype.ordinalPosition = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Creates a new ColumnMetadata instance using the specified properties. + * @function create + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IColumnMetadata=} [properties] Properties to set + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata} ColumnMetadata instance + */ + ColumnMetadata.create = function create(properties) { + return new ColumnMetadata(properties); + }; + + /** + * Encodes the specified ColumnMetadata message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata.verify|verify} messages. + * @function encode + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IColumnMetadata} message ColumnMetadata message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ColumnMetadata.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.name != null && Object.hasOwnProperty.call(message, "name")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); + if (message.type != null && Object.hasOwnProperty.call(message, "type")) + $root.google.spanner.v1.Type.encode(message.type, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.isPrimaryKey != null && Object.hasOwnProperty.call(message, "isPrimaryKey")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.isPrimaryKey); + if (message.ordinalPosition != null && Object.hasOwnProperty.call(message, "ordinalPosition")) + writer.uint32(/* id 4, wireType 0 =*/32).int64(message.ordinalPosition); + return writer; + }; + + /** + * Encodes the specified ColumnMetadata message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IColumnMetadata} message ColumnMetadata message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ColumnMetadata.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ColumnMetadata message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata} ColumnMetadata + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ColumnMetadata.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.name = reader.string(); + break; + } + case 2: { + message.type = $root.google.spanner.v1.Type.decode(reader, reader.uint32()); + break; + } + case 3: { + message.isPrimaryKey = reader.bool(); + break; + } + case 4: { + message.ordinalPosition = reader.int64(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ColumnMetadata message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata} ColumnMetadata + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ColumnMetadata.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ColumnMetadata message. + * @function verify + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ColumnMetadata.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.name != null && message.hasOwnProperty("name")) + if (!$util.isString(message.name)) + return "name: string expected"; + if (message.type != null && message.hasOwnProperty("type")) { + var error = $root.google.spanner.v1.Type.verify(message.type); + if (error) + return "type." + error; + } + if (message.isPrimaryKey != null && message.hasOwnProperty("isPrimaryKey")) + if (typeof message.isPrimaryKey !== "boolean") + return "isPrimaryKey: boolean expected"; + if (message.ordinalPosition != null && message.hasOwnProperty("ordinalPosition")) + if (!$util.isInteger(message.ordinalPosition) && !(message.ordinalPosition && $util.isInteger(message.ordinalPosition.low) && $util.isInteger(message.ordinalPosition.high))) + return "ordinalPosition: integer|Long expected"; + return null; + }; + + /** + * Creates a ColumnMetadata message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata} ColumnMetadata + */ + ColumnMetadata.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata) + return object; + var message = new $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata(); + if (object.name != null) + message.name = String(object.name); + if (object.type != null) { + if (typeof object.type !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata.type: object expected"); + message.type = $root.google.spanner.v1.Type.fromObject(object.type); + } + if (object.isPrimaryKey != null) + message.isPrimaryKey = Boolean(object.isPrimaryKey); + if (object.ordinalPosition != null) + if ($util.Long) + (message.ordinalPosition = $util.Long.fromValue(object.ordinalPosition)).unsigned = false; + else if (typeof object.ordinalPosition === "string") + message.ordinalPosition = parseInt(object.ordinalPosition, 10); + else if (typeof object.ordinalPosition === "number") + message.ordinalPosition = object.ordinalPosition; + else if (typeof object.ordinalPosition === "object") + message.ordinalPosition = new $util.LongBits(object.ordinalPosition.low >>> 0, object.ordinalPosition.high >>> 0).toNumber(); + return message; + }; + + /** + * Creates a plain object from a ColumnMetadata message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata} message ColumnMetadata + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ColumnMetadata.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.name = ""; + object.type = null; + object.isPrimaryKey = false; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.ordinalPosition = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.ordinalPosition = options.longs === String ? "0" : 0; + } + if (message.name != null && message.hasOwnProperty("name")) + object.name = message.name; + if (message.type != null && message.hasOwnProperty("type")) + object.type = $root.google.spanner.v1.Type.toObject(message.type, options); + if (message.isPrimaryKey != null && message.hasOwnProperty("isPrimaryKey")) + object.isPrimaryKey = message.isPrimaryKey; + if (message.ordinalPosition != null && message.hasOwnProperty("ordinalPosition")) + if (typeof message.ordinalPosition === "number") + object.ordinalPosition = options.longs === String ? String(message.ordinalPosition) : message.ordinalPosition; + else + object.ordinalPosition = options.longs === String ? $util.Long.prototype.toString.call(message.ordinalPosition) : options.longs === Number ? new $util.LongBits(message.ordinalPosition.low >>> 0, message.ordinalPosition.high >>> 0).toNumber() : message.ordinalPosition; + return object; + }; + + /** + * Converts this ColumnMetadata to JSON. + * @function toJSON + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @instance + * @returns {Object.} JSON object + */ + ColumnMetadata.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ColumnMetadata + * @function getTypeUrl + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ColumnMetadata.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ColumnMetadata"; + }; + + return ColumnMetadata; + })(); + + DataChangeRecord.ModValue = (function() { + + /** + * Properties of a ModValue. + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @interface IModValue + * @property {number|null} [columnMetadataIndex] ModValue columnMetadataIndex + * @property {google.protobuf.IValue|null} [value] ModValue value + */ + + /** + * Constructs a new ModValue. + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @classdesc Represents a ModValue. + * @implements IModValue + * @constructor + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue=} [properties] Properties to set + */ + function ModValue(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ModValue columnMetadataIndex. + * @member {number} columnMetadataIndex + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @instance + */ + ModValue.prototype.columnMetadataIndex = 0; + + /** + * ModValue value. + * @member {google.protobuf.IValue|null|undefined} value + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @instance + */ + ModValue.prototype.value = null; + + /** + * Creates a new ModValue instance using the specified properties. + * @function create + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue=} [properties] Properties to set + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue} ModValue instance + */ + ModValue.create = function create(properties) { + return new ModValue(properties); + }; + + /** + * Encodes the specified ModValue message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.verify|verify} messages. + * @function encode + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue} message ModValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ModValue.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.columnMetadataIndex != null && Object.hasOwnProperty.call(message, "columnMetadataIndex")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.columnMetadataIndex); + if (message.value != null && Object.hasOwnProperty.call(message, "value")) + $root.google.protobuf.Value.encode(message.value, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified ModValue message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IModValue} message ModValue message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ModValue.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a ModValue message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue} ModValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ModValue.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.columnMetadataIndex = reader.int32(); + break; + } + case 2: { + message.value = $root.google.protobuf.Value.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a ModValue message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue} ModValue + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ModValue.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a ModValue message. + * @function verify + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ModValue.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.columnMetadataIndex != null && message.hasOwnProperty("columnMetadataIndex")) + if (!$util.isInteger(message.columnMetadataIndex)) + return "columnMetadataIndex: integer expected"; + if (message.value != null && message.hasOwnProperty("value")) { + var error = $root.google.protobuf.Value.verify(message.value); + if (error) + return "value." + error; + } + return null; + }; + + /** + * Creates a ModValue message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue} ModValue + */ + ModValue.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue) + return object; + var message = new $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue(); + if (object.columnMetadataIndex != null) + message.columnMetadataIndex = object.columnMetadataIndex | 0; + if (object.value != null) { + if (typeof object.value !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.value: object expected"); + message.value = $root.google.protobuf.Value.fromObject(object.value); + } + return message; + }; + + /** + * Creates a plain object from a ModValue message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue} message ModValue + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ModValue.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.columnMetadataIndex = 0; + object.value = null; + } + if (message.columnMetadataIndex != null && message.hasOwnProperty("columnMetadataIndex")) + object.columnMetadataIndex = message.columnMetadataIndex; + if (message.value != null && message.hasOwnProperty("value")) + object.value = $root.google.protobuf.Value.toObject(message.value, options); + return object; + }; + + /** + * Converts this ModValue to JSON. + * @function toJSON + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @instance + * @returns {Object.} JSON object + */ + ModValue.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ModValue + * @function getTypeUrl + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ModValue.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue"; + }; + + return ModValue; + })(); + + DataChangeRecord.Mod = (function() { + + /** + * Properties of a Mod. + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @interface IMod + * @property {Array.|null} [keys] Mod keys + * @property {Array.|null} [oldValues] Mod oldValues + * @property {Array.|null} [newValues] Mod newValues + */ + + /** + * Constructs a new Mod. + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord + * @classdesc Represents a Mod. + * @implements IMod + * @constructor + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IMod=} [properties] Properties to set + */ + function Mod(properties) { + this.keys = []; + this.oldValues = []; + this.newValues = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Mod keys. + * @member {Array.} keys + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @instance + */ + Mod.prototype.keys = $util.emptyArray; + + /** + * Mod oldValues. + * @member {Array.} oldValues + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @instance + */ + Mod.prototype.oldValues = $util.emptyArray; + + /** + * Mod newValues. + * @member {Array.} newValues + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @instance + */ + Mod.prototype.newValues = $util.emptyArray; + + /** + * Creates a new Mod instance using the specified properties. + * @function create + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IMod=} [properties] Properties to set + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod} Mod instance + */ + Mod.create = function create(properties) { + return new Mod(properties); + }; + + /** + * Encodes the specified Mod message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.verify|verify} messages. + * @function encode + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IMod} message Mod message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Mod.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.keys != null && message.keys.length) + for (var i = 0; i < message.keys.length; ++i) + $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.encode(message.keys[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.oldValues != null && message.oldValues.length) + for (var i = 0; i < message.oldValues.length; ++i) + $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.encode(message.oldValues[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.newValues != null && message.newValues.length) + for (var i = 0; i < message.newValues.length; ++i) + $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.encode(message.newValues[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified Mod message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.IMod} message Mod message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Mod.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a Mod message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod} Mod + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Mod.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + if (!(message.keys && message.keys.length)) + message.keys = []; + message.keys.push($root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.decode(reader, reader.uint32())); + break; + } + case 2: { + if (!(message.oldValues && message.oldValues.length)) + message.oldValues = []; + message.oldValues.push($root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.decode(reader, reader.uint32())); + break; + } + case 3: { + if (!(message.newValues && message.newValues.length)) + message.newValues = []; + message.newValues.push($root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a Mod message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod} Mod + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Mod.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a Mod message. + * @function verify + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + Mod.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.keys != null && message.hasOwnProperty("keys")) { + if (!Array.isArray(message.keys)) + return "keys: array expected"; + for (var i = 0; i < message.keys.length; ++i) { + var error = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.verify(message.keys[i]); + if (error) + return "keys." + error; + } + } + if (message.oldValues != null && message.hasOwnProperty("oldValues")) { + if (!Array.isArray(message.oldValues)) + return "oldValues: array expected"; + for (var i = 0; i < message.oldValues.length; ++i) { + var error = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.verify(message.oldValues[i]); + if (error) + return "oldValues." + error; + } + } + if (message.newValues != null && message.hasOwnProperty("newValues")) { + if (!Array.isArray(message.newValues)) + return "newValues: array expected"; + for (var i = 0; i < message.newValues.length; ++i) { + var error = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.verify(message.newValues[i]); + if (error) + return "newValues." + error; + } + } + return null; + }; + + /** + * Creates a Mod message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod} Mod + */ + Mod.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod) + return object; + var message = new $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod(); + if (object.keys) { + if (!Array.isArray(object.keys)) + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.keys: array expected"); + message.keys = []; + for (var i = 0; i < object.keys.length; ++i) { + if (typeof object.keys[i] !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.keys: object expected"); + message.keys[i] = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.fromObject(object.keys[i]); + } + } + if (object.oldValues) { + if (!Array.isArray(object.oldValues)) + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.oldValues: array expected"); + message.oldValues = []; + for (var i = 0; i < object.oldValues.length; ++i) { + if (typeof object.oldValues[i] !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.oldValues: object expected"); + message.oldValues[i] = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.fromObject(object.oldValues[i]); + } + } + if (object.newValues) { + if (!Array.isArray(object.newValues)) + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.newValues: array expected"); + message.newValues = []; + for (var i = 0; i < object.newValues.length; ++i) { + if (typeof object.newValues[i] !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod.newValues: object expected"); + message.newValues[i] = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.fromObject(object.newValues[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a Mod message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @static + * @param {google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod} message Mod + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + Mod.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.keys = []; + object.oldValues = []; + object.newValues = []; + } + if (message.keys && message.keys.length) { + object.keys = []; + for (var j = 0; j < message.keys.length; ++j) + object.keys[j] = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.toObject(message.keys[j], options); + } + if (message.oldValues && message.oldValues.length) { + object.oldValues = []; + for (var j = 0; j < message.oldValues.length; ++j) + object.oldValues[j] = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.toObject(message.oldValues[j], options); + } + if (message.newValues && message.newValues.length) { + object.newValues = []; + for (var j = 0; j < message.newValues.length; ++j) + object.newValues[j] = $root.google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModValue.toObject(message.newValues[j], options); + } + return object; + }; + + /** + * Converts this Mod to JSON. + * @function toJSON + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @instance + * @returns {Object.} JSON object + */ + Mod.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for Mod + * @function getTypeUrl + * @memberof google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + Mod.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod"; + }; + + return Mod; + })(); + + /** + * ModType enum. + * @name google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType + * @enum {number} + * @property {number} MOD_TYPE_UNSPECIFIED=0 MOD_TYPE_UNSPECIFIED value + * @property {number} INSERT=10 INSERT value + * @property {number} UPDATE=20 UPDATE value + * @property {number} DELETE=30 DELETE value + */ + DataChangeRecord.ModType = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "MOD_TYPE_UNSPECIFIED"] = 0; + values[valuesById[10] = "INSERT"] = 10; + values[valuesById[20] = "UPDATE"] = 20; + values[valuesById[30] = "DELETE"] = 30; + return values; + })(); + + /** + * ValueCaptureType enum. + * @name google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType + * @enum {number} + * @property {number} VALUE_CAPTURE_TYPE_UNSPECIFIED=0 VALUE_CAPTURE_TYPE_UNSPECIFIED value + * @property {number} OLD_AND_NEW_VALUES=10 OLD_AND_NEW_VALUES value + * @property {number} NEW_VALUES=20 NEW_VALUES value + * @property {number} NEW_ROW=30 NEW_ROW value + * @property {number} NEW_ROW_AND_OLD_VALUES=40 NEW_ROW_AND_OLD_VALUES value + */ + DataChangeRecord.ValueCaptureType = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "VALUE_CAPTURE_TYPE_UNSPECIFIED"] = 0; + values[valuesById[10] = "OLD_AND_NEW_VALUES"] = 10; + values[valuesById[20] = "NEW_VALUES"] = 20; + values[valuesById[30] = "NEW_ROW"] = 30; + values[valuesById[40] = "NEW_ROW_AND_OLD_VALUES"] = 40; + return values; + })(); + + return DataChangeRecord; + })(); + + ChangeStreamRecord.HeartbeatRecord = (function() { + + /** + * Properties of a HeartbeatRecord. + * @memberof google.spanner.v1.ChangeStreamRecord + * @interface IHeartbeatRecord + * @property {google.protobuf.ITimestamp|null} [timestamp] HeartbeatRecord timestamp + */ + + /** + * Constructs a new HeartbeatRecord. + * @memberof google.spanner.v1.ChangeStreamRecord + * @classdesc Represents a HeartbeatRecord. + * @implements IHeartbeatRecord + * @constructor + * @param {google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord=} [properties] Properties to set + */ + function HeartbeatRecord(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * HeartbeatRecord timestamp. + * @member {google.protobuf.ITimestamp|null|undefined} timestamp + * @memberof google.spanner.v1.ChangeStreamRecord.HeartbeatRecord + * @instance + */ + HeartbeatRecord.prototype.timestamp = null; + + /** + * Creates a new HeartbeatRecord instance using the specified properties. + * @function create + * @memberof google.spanner.v1.ChangeStreamRecord.HeartbeatRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord=} [properties] Properties to set + * @returns {google.spanner.v1.ChangeStreamRecord.HeartbeatRecord} HeartbeatRecord instance + */ + HeartbeatRecord.create = function create(properties) { + return new HeartbeatRecord(properties); + }; + + /** + * Encodes the specified HeartbeatRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.HeartbeatRecord.verify|verify} messages. + * @function encode + * @memberof google.spanner.v1.ChangeStreamRecord.HeartbeatRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord} message HeartbeatRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + HeartbeatRecord.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.timestamp != null && Object.hasOwnProperty.call(message, "timestamp")) + $root.google.protobuf.Timestamp.encode(message.timestamp, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified HeartbeatRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.HeartbeatRecord.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.HeartbeatRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IHeartbeatRecord} message HeartbeatRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + HeartbeatRecord.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a HeartbeatRecord message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.v1.ChangeStreamRecord.HeartbeatRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.v1.ChangeStreamRecord.HeartbeatRecord} HeartbeatRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + HeartbeatRecord.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.v1.ChangeStreamRecord.HeartbeatRecord(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.timestamp = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a HeartbeatRecord message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.HeartbeatRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.v1.ChangeStreamRecord.HeartbeatRecord} HeartbeatRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + HeartbeatRecord.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a HeartbeatRecord message. + * @function verify + * @memberof google.spanner.v1.ChangeStreamRecord.HeartbeatRecord + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + HeartbeatRecord.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.timestamp != null && message.hasOwnProperty("timestamp")) { + var error = $root.google.protobuf.Timestamp.verify(message.timestamp); + if (error) + return "timestamp." + error; + } + return null; + }; + + /** + * Creates a HeartbeatRecord message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.v1.ChangeStreamRecord.HeartbeatRecord + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.v1.ChangeStreamRecord.HeartbeatRecord} HeartbeatRecord + */ + HeartbeatRecord.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.v1.ChangeStreamRecord.HeartbeatRecord) + return object; + var message = new $root.google.spanner.v1.ChangeStreamRecord.HeartbeatRecord(); + if (object.timestamp != null) { + if (typeof object.timestamp !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.HeartbeatRecord.timestamp: object expected"); + message.timestamp = $root.google.protobuf.Timestamp.fromObject(object.timestamp); + } + return message; + }; + + /** + * Creates a plain object from a HeartbeatRecord message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.v1.ChangeStreamRecord.HeartbeatRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.HeartbeatRecord} message HeartbeatRecord + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + HeartbeatRecord.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.timestamp = null; + if (message.timestamp != null && message.hasOwnProperty("timestamp")) + object.timestamp = $root.google.protobuf.Timestamp.toObject(message.timestamp, options); + return object; + }; + + /** + * Converts this HeartbeatRecord to JSON. + * @function toJSON + * @memberof google.spanner.v1.ChangeStreamRecord.HeartbeatRecord + * @instance + * @returns {Object.} JSON object + */ + HeartbeatRecord.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for HeartbeatRecord + * @function getTypeUrl + * @memberof google.spanner.v1.ChangeStreamRecord.HeartbeatRecord + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + HeartbeatRecord.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.v1.ChangeStreamRecord.HeartbeatRecord"; + }; + + return HeartbeatRecord; + })(); + + ChangeStreamRecord.PartitionStartRecord = (function() { + + /** + * Properties of a PartitionStartRecord. + * @memberof google.spanner.v1.ChangeStreamRecord + * @interface IPartitionStartRecord + * @property {google.protobuf.ITimestamp|null} [startTimestamp] PartitionStartRecord startTimestamp + * @property {string|null} [recordSequence] PartitionStartRecord recordSequence + * @property {Array.|null} [partitionTokens] PartitionStartRecord partitionTokens + */ + + /** + * Constructs a new PartitionStartRecord. + * @memberof google.spanner.v1.ChangeStreamRecord + * @classdesc Represents a PartitionStartRecord. + * @implements IPartitionStartRecord + * @constructor + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord=} [properties] Properties to set + */ + function PartitionStartRecord(properties) { + this.partitionTokens = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * PartitionStartRecord startTimestamp. + * @member {google.protobuf.ITimestamp|null|undefined} startTimestamp + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @instance + */ + PartitionStartRecord.prototype.startTimestamp = null; + + /** + * PartitionStartRecord recordSequence. + * @member {string} recordSequence + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @instance + */ + PartitionStartRecord.prototype.recordSequence = ""; + + /** + * PartitionStartRecord partitionTokens. + * @member {Array.} partitionTokens + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @instance + */ + PartitionStartRecord.prototype.partitionTokens = $util.emptyArray; + + /** + * Creates a new PartitionStartRecord instance using the specified properties. + * @function create + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord=} [properties] Properties to set + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionStartRecord} PartitionStartRecord instance + */ + PartitionStartRecord.create = function create(properties) { + return new PartitionStartRecord(properties); + }; + + /** + * Encodes the specified PartitionStartRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionStartRecord.verify|verify} messages. + * @function encode + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord} message PartitionStartRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PartitionStartRecord.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.startTimestamp != null && Object.hasOwnProperty.call(message, "startTimestamp")) + $root.google.protobuf.Timestamp.encode(message.startTimestamp, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.recordSequence != null && Object.hasOwnProperty.call(message, "recordSequence")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.recordSequence); + if (message.partitionTokens != null && message.partitionTokens.length) + for (var i = 0; i < message.partitionTokens.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.partitionTokens[i]); + return writer; + }; + + /** + * Encodes the specified PartitionStartRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionStartRecord.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionStartRecord} message PartitionStartRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PartitionStartRecord.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a PartitionStartRecord message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionStartRecord} PartitionStartRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PartitionStartRecord.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.v1.ChangeStreamRecord.PartitionStartRecord(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.startTimestamp = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 2: { + message.recordSequence = reader.string(); + break; + } + case 3: { + if (!(message.partitionTokens && message.partitionTokens.length)) + message.partitionTokens = []; + message.partitionTokens.push(reader.string()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a PartitionStartRecord message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionStartRecord} PartitionStartRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PartitionStartRecord.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a PartitionStartRecord message. + * @function verify + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + PartitionStartRecord.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.startTimestamp != null && message.hasOwnProperty("startTimestamp")) { + var error = $root.google.protobuf.Timestamp.verify(message.startTimestamp); + if (error) + return "startTimestamp." + error; + } + if (message.recordSequence != null && message.hasOwnProperty("recordSequence")) + if (!$util.isString(message.recordSequence)) + return "recordSequence: string expected"; + if (message.partitionTokens != null && message.hasOwnProperty("partitionTokens")) { + if (!Array.isArray(message.partitionTokens)) + return "partitionTokens: array expected"; + for (var i = 0; i < message.partitionTokens.length; ++i) + if (!$util.isString(message.partitionTokens[i])) + return "partitionTokens: string[] expected"; + } + return null; + }; + + /** + * Creates a PartitionStartRecord message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionStartRecord} PartitionStartRecord + */ + PartitionStartRecord.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.v1.ChangeStreamRecord.PartitionStartRecord) + return object; + var message = new $root.google.spanner.v1.ChangeStreamRecord.PartitionStartRecord(); + if (object.startTimestamp != null) { + if (typeof object.startTimestamp !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.PartitionStartRecord.startTimestamp: object expected"); + message.startTimestamp = $root.google.protobuf.Timestamp.fromObject(object.startTimestamp); + } + if (object.recordSequence != null) + message.recordSequence = String(object.recordSequence); + if (object.partitionTokens) { + if (!Array.isArray(object.partitionTokens)) + throw TypeError(".google.spanner.v1.ChangeStreamRecord.PartitionStartRecord.partitionTokens: array expected"); + message.partitionTokens = []; + for (var i = 0; i < object.partitionTokens.length; ++i) + message.partitionTokens[i] = String(object.partitionTokens[i]); + } + return message; + }; + + /** + * Creates a plain object from a PartitionStartRecord message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.PartitionStartRecord} message PartitionStartRecord + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + PartitionStartRecord.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.partitionTokens = []; + if (options.defaults) { + object.startTimestamp = null; + object.recordSequence = ""; + } + if (message.startTimestamp != null && message.hasOwnProperty("startTimestamp")) + object.startTimestamp = $root.google.protobuf.Timestamp.toObject(message.startTimestamp, options); + if (message.recordSequence != null && message.hasOwnProperty("recordSequence")) + object.recordSequence = message.recordSequence; + if (message.partitionTokens && message.partitionTokens.length) { + object.partitionTokens = []; + for (var j = 0; j < message.partitionTokens.length; ++j) + object.partitionTokens[j] = message.partitionTokens[j]; + } + return object; + }; + + /** + * Converts this PartitionStartRecord to JSON. + * @function toJSON + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @instance + * @returns {Object.} JSON object + */ + PartitionStartRecord.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for PartitionStartRecord + * @function getTypeUrl + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionStartRecord + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + PartitionStartRecord.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.v1.ChangeStreamRecord.PartitionStartRecord"; + }; + + return PartitionStartRecord; + })(); + + ChangeStreamRecord.PartitionEndRecord = (function() { + + /** + * Properties of a PartitionEndRecord. + * @memberof google.spanner.v1.ChangeStreamRecord + * @interface IPartitionEndRecord + * @property {google.protobuf.ITimestamp|null} [endTimestamp] PartitionEndRecord endTimestamp + * @property {string|null} [recordSequence] PartitionEndRecord recordSequence + * @property {string|null} [partitionToken] PartitionEndRecord partitionToken + */ + + /** + * Constructs a new PartitionEndRecord. + * @memberof google.spanner.v1.ChangeStreamRecord + * @classdesc Represents a PartitionEndRecord. + * @implements IPartitionEndRecord + * @constructor + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord=} [properties] Properties to set + */ + function PartitionEndRecord(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * PartitionEndRecord endTimestamp. + * @member {google.protobuf.ITimestamp|null|undefined} endTimestamp + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @instance + */ + PartitionEndRecord.prototype.endTimestamp = null; + + /** + * PartitionEndRecord recordSequence. + * @member {string} recordSequence + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @instance + */ + PartitionEndRecord.prototype.recordSequence = ""; + + /** + * PartitionEndRecord partitionToken. + * @member {string} partitionToken + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @instance + */ + PartitionEndRecord.prototype.partitionToken = ""; + + /** + * Creates a new PartitionEndRecord instance using the specified properties. + * @function create + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord=} [properties] Properties to set + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEndRecord} PartitionEndRecord instance + */ + PartitionEndRecord.create = function create(properties) { + return new PartitionEndRecord(properties); + }; + + /** + * Encodes the specified PartitionEndRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.verify|verify} messages. + * @function encode + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord} message PartitionEndRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PartitionEndRecord.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.endTimestamp != null && Object.hasOwnProperty.call(message, "endTimestamp")) + $root.google.protobuf.Timestamp.encode(message.endTimestamp, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.recordSequence != null && Object.hasOwnProperty.call(message, "recordSequence")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.recordSequence); + if (message.partitionToken != null && Object.hasOwnProperty.call(message, "partitionToken")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.partitionToken); + return writer; + }; + + /** + * Encodes the specified PartitionEndRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionEndRecord} message PartitionEndRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PartitionEndRecord.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a PartitionEndRecord message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEndRecord} PartitionEndRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PartitionEndRecord.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.v1.ChangeStreamRecord.PartitionEndRecord(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.endTimestamp = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 2: { + message.recordSequence = reader.string(); + break; + } + case 3: { + message.partitionToken = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a PartitionEndRecord message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEndRecord} PartitionEndRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PartitionEndRecord.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a PartitionEndRecord message. + * @function verify + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + PartitionEndRecord.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.endTimestamp != null && message.hasOwnProperty("endTimestamp")) { + var error = $root.google.protobuf.Timestamp.verify(message.endTimestamp); + if (error) + return "endTimestamp." + error; + } + if (message.recordSequence != null && message.hasOwnProperty("recordSequence")) + if (!$util.isString(message.recordSequence)) + return "recordSequence: string expected"; + if (message.partitionToken != null && message.hasOwnProperty("partitionToken")) + if (!$util.isString(message.partitionToken)) + return "partitionToken: string expected"; + return null; + }; + + /** + * Creates a PartitionEndRecord message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEndRecord} PartitionEndRecord + */ + PartitionEndRecord.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.v1.ChangeStreamRecord.PartitionEndRecord) + return object; + var message = new $root.google.spanner.v1.ChangeStreamRecord.PartitionEndRecord(); + if (object.endTimestamp != null) { + if (typeof object.endTimestamp !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.endTimestamp: object expected"); + message.endTimestamp = $root.google.protobuf.Timestamp.fromObject(object.endTimestamp); + } + if (object.recordSequence != null) + message.recordSequence = String(object.recordSequence); + if (object.partitionToken != null) + message.partitionToken = String(object.partitionToken); + return message; + }; + + /** + * Creates a plain object from a PartitionEndRecord message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEndRecord} message PartitionEndRecord + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + PartitionEndRecord.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.endTimestamp = null; + object.recordSequence = ""; + object.partitionToken = ""; + } + if (message.endTimestamp != null && message.hasOwnProperty("endTimestamp")) + object.endTimestamp = $root.google.protobuf.Timestamp.toObject(message.endTimestamp, options); + if (message.recordSequence != null && message.hasOwnProperty("recordSequence")) + object.recordSequence = message.recordSequence; + if (message.partitionToken != null && message.hasOwnProperty("partitionToken")) + object.partitionToken = message.partitionToken; + return object; + }; + + /** + * Converts this PartitionEndRecord to JSON. + * @function toJSON + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @instance + * @returns {Object.} JSON object + */ + PartitionEndRecord.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for PartitionEndRecord + * @function getTypeUrl + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEndRecord + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + PartitionEndRecord.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.v1.ChangeStreamRecord.PartitionEndRecord"; + }; + + return PartitionEndRecord; + })(); + + ChangeStreamRecord.PartitionEventRecord = (function() { + + /** + * Properties of a PartitionEventRecord. + * @memberof google.spanner.v1.ChangeStreamRecord + * @interface IPartitionEventRecord + * @property {google.protobuf.ITimestamp|null} [commitTimestamp] PartitionEventRecord commitTimestamp + * @property {string|null} [recordSequence] PartitionEventRecord recordSequence + * @property {string|null} [partitionToken] PartitionEventRecord partitionToken + * @property {Array.|null} [moveInEvents] PartitionEventRecord moveInEvents + * @property {Array.|null} [moveOutEvents] PartitionEventRecord moveOutEvents + */ + + /** + * Constructs a new PartitionEventRecord. + * @memberof google.spanner.v1.ChangeStreamRecord + * @classdesc Represents a PartitionEventRecord. + * @implements IPartitionEventRecord + * @constructor + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord=} [properties] Properties to set + */ + function PartitionEventRecord(properties) { + this.moveInEvents = []; + this.moveOutEvents = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * PartitionEventRecord commitTimestamp. + * @member {google.protobuf.ITimestamp|null|undefined} commitTimestamp + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @instance + */ + PartitionEventRecord.prototype.commitTimestamp = null; + + /** + * PartitionEventRecord recordSequence. + * @member {string} recordSequence + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @instance + */ + PartitionEventRecord.prototype.recordSequence = ""; + + /** + * PartitionEventRecord partitionToken. + * @member {string} partitionToken + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @instance + */ + PartitionEventRecord.prototype.partitionToken = ""; + + /** + * PartitionEventRecord moveInEvents. + * @member {Array.} moveInEvents + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @instance + */ + PartitionEventRecord.prototype.moveInEvents = $util.emptyArray; + + /** + * PartitionEventRecord moveOutEvents. + * @member {Array.} moveOutEvents + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @instance + */ + PartitionEventRecord.prototype.moveOutEvents = $util.emptyArray; + + /** + * Creates a new PartitionEventRecord instance using the specified properties. + * @function create + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord=} [properties] Properties to set + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord} PartitionEventRecord instance + */ + PartitionEventRecord.create = function create(properties) { + return new PartitionEventRecord(properties); + }; + + /** + * Encodes the specified PartitionEventRecord message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.verify|verify} messages. + * @function encode + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord} message PartitionEventRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PartitionEventRecord.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.commitTimestamp != null && Object.hasOwnProperty.call(message, "commitTimestamp")) + $root.google.protobuf.Timestamp.encode(message.commitTimestamp, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.recordSequence != null && Object.hasOwnProperty.call(message, "recordSequence")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.recordSequence); + if (message.partitionToken != null && Object.hasOwnProperty.call(message, "partitionToken")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.partitionToken); + if (message.moveInEvents != null && message.moveInEvents.length) + for (var i = 0; i < message.moveInEvents.length; ++i) + $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent.encode(message.moveInEvents[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.moveOutEvents != null && message.moveOutEvents.length) + for (var i = 0; i < message.moveOutEvents.length; ++i) + $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent.encode(message.moveOutEvents[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified PartitionEventRecord message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.IPartitionEventRecord} message PartitionEventRecord message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PartitionEventRecord.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a PartitionEventRecord message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord} PartitionEventRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PartitionEventRecord.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.commitTimestamp = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } + case 2: { + message.recordSequence = reader.string(); + break; + } + case 3: { + message.partitionToken = reader.string(); + break; + } + case 4: { + if (!(message.moveInEvents && message.moveInEvents.length)) + message.moveInEvents = []; + message.moveInEvents.push($root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent.decode(reader, reader.uint32())); + break; + } + case 5: { + if (!(message.moveOutEvents && message.moveOutEvents.length)) + message.moveOutEvents = []; + message.moveOutEvents.push($root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent.decode(reader, reader.uint32())); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a PartitionEventRecord message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord} PartitionEventRecord + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PartitionEventRecord.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a PartitionEventRecord message. + * @function verify + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + PartitionEventRecord.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.commitTimestamp != null && message.hasOwnProperty("commitTimestamp")) { + var error = $root.google.protobuf.Timestamp.verify(message.commitTimestamp); + if (error) + return "commitTimestamp." + error; + } + if (message.recordSequence != null && message.hasOwnProperty("recordSequence")) + if (!$util.isString(message.recordSequence)) + return "recordSequence: string expected"; + if (message.partitionToken != null && message.hasOwnProperty("partitionToken")) + if (!$util.isString(message.partitionToken)) + return "partitionToken: string expected"; + if (message.moveInEvents != null && message.hasOwnProperty("moveInEvents")) { + if (!Array.isArray(message.moveInEvents)) + return "moveInEvents: array expected"; + for (var i = 0; i < message.moveInEvents.length; ++i) { + var error = $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent.verify(message.moveInEvents[i]); + if (error) + return "moveInEvents." + error; + } + } + if (message.moveOutEvents != null && message.hasOwnProperty("moveOutEvents")) { + if (!Array.isArray(message.moveOutEvents)) + return "moveOutEvents: array expected"; + for (var i = 0; i < message.moveOutEvents.length; ++i) { + var error = $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent.verify(message.moveOutEvents[i]); + if (error) + return "moveOutEvents." + error; + } + } + return null; + }; + + /** + * Creates a PartitionEventRecord message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord} PartitionEventRecord + */ + PartitionEventRecord.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord) + return object; + var message = new $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord(); + if (object.commitTimestamp != null) { + if (typeof object.commitTimestamp !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.commitTimestamp: object expected"); + message.commitTimestamp = $root.google.protobuf.Timestamp.fromObject(object.commitTimestamp); + } + if (object.recordSequence != null) + message.recordSequence = String(object.recordSequence); + if (object.partitionToken != null) + message.partitionToken = String(object.partitionToken); + if (object.moveInEvents) { + if (!Array.isArray(object.moveInEvents)) + throw TypeError(".google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.moveInEvents: array expected"); + message.moveInEvents = []; + for (var i = 0; i < object.moveInEvents.length; ++i) { + if (typeof object.moveInEvents[i] !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.moveInEvents: object expected"); + message.moveInEvents[i] = $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent.fromObject(object.moveInEvents[i]); + } + } + if (object.moveOutEvents) { + if (!Array.isArray(object.moveOutEvents)) + throw TypeError(".google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.moveOutEvents: array expected"); + message.moveOutEvents = []; + for (var i = 0; i < object.moveOutEvents.length; ++i) { + if (typeof object.moveOutEvents[i] !== "object") + throw TypeError(".google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.moveOutEvents: object expected"); + message.moveOutEvents[i] = $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent.fromObject(object.moveOutEvents[i]); + } + } + return message; + }; + + /** + * Creates a plain object from a PartitionEventRecord message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @static + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord} message PartitionEventRecord + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + PartitionEventRecord.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.moveInEvents = []; + object.moveOutEvents = []; + } + if (options.defaults) { + object.commitTimestamp = null; + object.recordSequence = ""; + object.partitionToken = ""; + } + if (message.commitTimestamp != null && message.hasOwnProperty("commitTimestamp")) + object.commitTimestamp = $root.google.protobuf.Timestamp.toObject(message.commitTimestamp, options); + if (message.recordSequence != null && message.hasOwnProperty("recordSequence")) + object.recordSequence = message.recordSequence; + if (message.partitionToken != null && message.hasOwnProperty("partitionToken")) + object.partitionToken = message.partitionToken; + if (message.moveInEvents && message.moveInEvents.length) { + object.moveInEvents = []; + for (var j = 0; j < message.moveInEvents.length; ++j) + object.moveInEvents[j] = $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent.toObject(message.moveInEvents[j], options); + } + if (message.moveOutEvents && message.moveOutEvents.length) { + object.moveOutEvents = []; + for (var j = 0; j < message.moveOutEvents.length; ++j) + object.moveOutEvents[j] = $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent.toObject(message.moveOutEvents[j], options); + } + return object; + }; + + /** + * Converts this PartitionEventRecord to JSON. + * @function toJSON + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @instance + * @returns {Object.} JSON object + */ + PartitionEventRecord.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for PartitionEventRecord + * @function getTypeUrl + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + PartitionEventRecord.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.v1.ChangeStreamRecord.PartitionEventRecord"; + }; + + PartitionEventRecord.MoveInEvent = (function() { + + /** + * Properties of a MoveInEvent. + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @interface IMoveInEvent + * @property {string|null} [sourcePartitionToken] MoveInEvent sourcePartitionToken + */ + + /** + * Constructs a new MoveInEvent. + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @classdesc Represents a MoveInEvent. + * @implements IMoveInEvent + * @constructor + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveInEvent=} [properties] Properties to set + */ + function MoveInEvent(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MoveInEvent sourcePartitionToken. + * @member {string} sourcePartitionToken + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent + * @instance + */ + MoveInEvent.prototype.sourcePartitionToken = ""; + + /** + * Creates a new MoveInEvent instance using the specified properties. + * @function create + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent + * @static + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveInEvent=} [properties] Properties to set + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent} MoveInEvent instance + */ + MoveInEvent.create = function create(properties) { + return new MoveInEvent(properties); + }; + + /** + * Encodes the specified MoveInEvent message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent.verify|verify} messages. + * @function encode + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent + * @static + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveInEvent} message MoveInEvent message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MoveInEvent.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.sourcePartitionToken != null && Object.hasOwnProperty.call(message, "sourcePartitionToken")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.sourcePartitionToken); + return writer; + }; + + /** + * Encodes the specified MoveInEvent message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent + * @static + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveInEvent} message MoveInEvent message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MoveInEvent.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MoveInEvent message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent} MoveInEvent + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MoveInEvent.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.sourcePartitionToken = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MoveInEvent message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent} MoveInEvent + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MoveInEvent.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MoveInEvent message. + * @function verify + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MoveInEvent.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.sourcePartitionToken != null && message.hasOwnProperty("sourcePartitionToken")) + if (!$util.isString(message.sourcePartitionToken)) + return "sourcePartitionToken: string expected"; + return null; + }; + + /** + * Creates a MoveInEvent message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent} MoveInEvent + */ + MoveInEvent.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent) + return object; + var message = new $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent(); + if (object.sourcePartitionToken != null) + message.sourcePartitionToken = String(object.sourcePartitionToken); + return message; + }; + + /** + * Creates a plain object from a MoveInEvent message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent + * @static + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent} message MoveInEvent + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MoveInEvent.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.sourcePartitionToken = ""; + if (message.sourcePartitionToken != null && message.hasOwnProperty("sourcePartitionToken")) + object.sourcePartitionToken = message.sourcePartitionToken; + return object; + }; + + /** + * Converts this MoveInEvent to JSON. + * @function toJSON + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent + * @instance + * @returns {Object.} JSON object + */ + MoveInEvent.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MoveInEvent + * @function getTypeUrl + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MoveInEvent.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveInEvent"; + }; + + return MoveInEvent; + })(); + + PartitionEventRecord.MoveOutEvent = (function() { + + /** + * Properties of a MoveOutEvent. + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @interface IMoveOutEvent + * @property {string|null} [destinationPartitionToken] MoveOutEvent destinationPartitionToken + */ + + /** + * Constructs a new MoveOutEvent. + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord + * @classdesc Represents a MoveOutEvent. + * @implements IMoveOutEvent + * @constructor + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveOutEvent=} [properties] Properties to set + */ + function MoveOutEvent(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * MoveOutEvent destinationPartitionToken. + * @member {string} destinationPartitionToken + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent + * @instance + */ + MoveOutEvent.prototype.destinationPartitionToken = ""; + + /** + * Creates a new MoveOutEvent instance using the specified properties. + * @function create + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent + * @static + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveOutEvent=} [properties] Properties to set + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent} MoveOutEvent instance + */ + MoveOutEvent.create = function create(properties) { + return new MoveOutEvent(properties); + }; + + /** + * Encodes the specified MoveOutEvent message. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent.verify|verify} messages. + * @function encode + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent + * @static + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveOutEvent} message MoveOutEvent message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MoveOutEvent.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.destinationPartitionToken != null && Object.hasOwnProperty.call(message, "destinationPartitionToken")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.destinationPartitionToken); + return writer; + }; + + /** + * Encodes the specified MoveOutEvent message, length delimited. Does not implicitly {@link google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent + * @static + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.IMoveOutEvent} message MoveOutEvent message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MoveOutEvent.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MoveOutEvent message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent} MoveOutEvent + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MoveOutEvent.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.destinationPartitionToken = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MoveOutEvent message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent} MoveOutEvent + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MoveOutEvent.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MoveOutEvent message. + * @function verify + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MoveOutEvent.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.destinationPartitionToken != null && message.hasOwnProperty("destinationPartitionToken")) + if (!$util.isString(message.destinationPartitionToken)) + return "destinationPartitionToken: string expected"; + return null; + }; + + /** + * Creates a MoveOutEvent message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent} MoveOutEvent + */ + MoveOutEvent.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent) + return object; + var message = new $root.google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent(); + if (object.destinationPartitionToken != null) + message.destinationPartitionToken = String(object.destinationPartitionToken); + return message; + }; + + /** + * Creates a plain object from a MoveOutEvent message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent + * @static + * @param {google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent} message MoveOutEvent + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MoveOutEvent.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.destinationPartitionToken = ""; + if (message.destinationPartitionToken != null && message.hasOwnProperty("destinationPartitionToken")) + object.destinationPartitionToken = message.destinationPartitionToken; + return object; + }; + + /** + * Converts this MoveOutEvent to JSON. + * @function toJSON + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent + * @instance + * @returns {Object.} JSON object + */ + MoveOutEvent.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for MoveOutEvent + * @function getTypeUrl + * @memberof google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MoveOutEvent.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent"; + }; + + return MoveOutEvent; + })(); + + return PartitionEventRecord; + })(); + + return ChangeStreamRecord; + })(); + return v1; })(); diff --git a/protos/protos.json b/protos/protos.json index c178e9765..303527c69 100644 --- a/protos/protos.json +++ b/protos/protos.json @@ -7675,7 +7675,7 @@ "csharp_namespace": "Google.Cloud.Spanner.V1", "go_package": "cloud.google.com/go/spanner/apiv1/spannerpb;spannerpb", "java_multiple_files": true, - "java_outer_classname": "TypeProto", + "java_outer_classname": "ChangeStreamProto", "java_package": "com.google.spanner.v1", "php_namespace": "Google\\Cloud\\Spanner\\V1", "ruby_package": "Google::Cloud::Spanner::V1", @@ -9373,6 +9373,256 @@ "PG_JSONB": 3, "PG_OID": 4 } + }, + "ChangeStreamRecord": { + "oneofs": { + "record": { + "oneof": [ + "dataChangeRecord", + "heartbeatRecord", + "partitionStartRecord", + "partitionEndRecord", + "partitionEventRecord" + ] + } + }, + "fields": { + "dataChangeRecord": { + "type": "DataChangeRecord", + "id": 1 + }, + "heartbeatRecord": { + "type": "HeartbeatRecord", + "id": 2 + }, + "partitionStartRecord": { + "type": "PartitionStartRecord", + "id": 3 + }, + "partitionEndRecord": { + "type": "PartitionEndRecord", + "id": 4 + }, + "partitionEventRecord": { + "type": "PartitionEventRecord", + "id": 5 + } + }, + "nested": { + "DataChangeRecord": { + "fields": { + "commitTimestamp": { + "type": "google.protobuf.Timestamp", + "id": 1 + }, + "recordSequence": { + "type": "string", + "id": 2 + }, + "serverTransactionId": { + "type": "string", + "id": 3 + }, + "isLastRecordInTransactionInPartition": { + "type": "bool", + "id": 4 + }, + "table": { + "type": "string", + "id": 5 + }, + "columnMetadata": { + "rule": "repeated", + "type": "ColumnMetadata", + "id": 6 + }, + "mods": { + "rule": "repeated", + "type": "Mod", + "id": 7 + }, + "modType": { + "type": "ModType", + "id": 8 + }, + "valueCaptureType": { + "type": "ValueCaptureType", + "id": 9 + }, + "numberOfRecordsInTransaction": { + "type": "int32", + "id": 10 + }, + "numberOfPartitionsInTransaction": { + "type": "int32", + "id": 11 + }, + "transactionTag": { + "type": "string", + "id": 12 + }, + "isSystemTransaction": { + "type": "bool", + "id": 13 + } + }, + "nested": { + "ColumnMetadata": { + "fields": { + "name": { + "type": "string", + "id": 1 + }, + "type": { + "type": "Type", + "id": 2 + }, + "isPrimaryKey": { + "type": "bool", + "id": 3 + }, + "ordinalPosition": { + "type": "int64", + "id": 4 + } + } + }, + "ModValue": { + "fields": { + "columnMetadataIndex": { + "type": "int32", + "id": 1 + }, + "value": { + "type": "google.protobuf.Value", + "id": 2 + } + } + }, + "Mod": { + "fields": { + "keys": { + "rule": "repeated", + "type": "ModValue", + "id": 1 + }, + "oldValues": { + "rule": "repeated", + "type": "ModValue", + "id": 2 + }, + "newValues": { + "rule": "repeated", + "type": "ModValue", + "id": 3 + } + } + }, + "ModType": { + "values": { + "MOD_TYPE_UNSPECIFIED": 0, + "INSERT": 10, + "UPDATE": 20, + "DELETE": 30 + } + }, + "ValueCaptureType": { + "values": { + "VALUE_CAPTURE_TYPE_UNSPECIFIED": 0, + "OLD_AND_NEW_VALUES": 10, + "NEW_VALUES": 20, + "NEW_ROW": 30, + "NEW_ROW_AND_OLD_VALUES": 40 + } + } + } + }, + "HeartbeatRecord": { + "fields": { + "timestamp": { + "type": "google.protobuf.Timestamp", + "id": 1 + } + } + }, + "PartitionStartRecord": { + "fields": { + "startTimestamp": { + "type": "google.protobuf.Timestamp", + "id": 1 + }, + "recordSequence": { + "type": "string", + "id": 2 + }, + "partitionTokens": { + "rule": "repeated", + "type": "string", + "id": 3 + } + } + }, + "PartitionEndRecord": { + "fields": { + "endTimestamp": { + "type": "google.protobuf.Timestamp", + "id": 1 + }, + "recordSequence": { + "type": "string", + "id": 2 + }, + "partitionToken": { + "type": "string", + "id": 3 + } + } + }, + "PartitionEventRecord": { + "fields": { + "commitTimestamp": { + "type": "google.protobuf.Timestamp", + "id": 1 + }, + "recordSequence": { + "type": "string", + "id": 2 + }, + "partitionToken": { + "type": "string", + "id": 3 + }, + "moveInEvents": { + "rule": "repeated", + "type": "MoveInEvent", + "id": 4 + }, + "moveOutEvents": { + "rule": "repeated", + "type": "MoveOutEvent", + "id": 5 + } + }, + "nested": { + "MoveInEvent": { + "fields": { + "sourcePartitionToken": { + "type": "string", + "id": 1 + } + } + }, + "MoveOutEvent": { + "fields": { + "destinationPartitionToken": { + "type": "string", + "id": 1 + } + } + } + } + } + } } } } diff --git a/src/v1/spanner_proto_list.json b/src/v1/spanner_proto_list.json index de809666e..560228663 100644 --- a/src/v1/spanner_proto_list.json +++ b/src/v1/spanner_proto_list.json @@ -1,4 +1,5 @@ [ + "../../protos/google/spanner/v1/change_stream.proto", "../../protos/google/spanner/v1/commit_response.proto", "../../protos/google/spanner/v1/keys.proto", "../../protos/google/spanner/v1/mutation.proto", From ba542012f27c992e5bef27c4cf714f71514d11d2 Mon Sep 17 00:00:00 2001 From: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Date: Wed, 4 Jun 2025 07:11:59 +0000 Subject: [PATCH 08/31] chore: add sample for statement timeout within a transaction (#2314) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: add sample for statement timeout within a transaction * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix-presubmit --------- Co-authored-by: Owl Bot --- README.md | 1 + samples/README.md | 18 +++++++ samples/statement-timeout.js | 73 +++++++++++++++++++++++++++++ samples/system-test/spanner.test.js | 9 ++++ 4 files changed, 101 insertions(+) create mode 100644 samples/statement-timeout.js diff --git a/README.md b/README.md index cff2046b8..e9e138334 100644 --- a/README.md +++ b/README.md @@ -202,6 +202,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/nodejs-spanner/tre | Alters a sequence in a GoogleSQL database. | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/sequence-alter.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/sequence-alter.js,samples/README.md) | | Creates sequence in GoogleSQL database. | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/sequence-create.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/sequence-create.js,samples/README.md) | | Drops a sequence in GoogleSQL database. | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/sequence-drop.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/sequence-drop.js,samples/README.md) | +| Executes a read/write transaction with statement timeout | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/statement-timeout.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/statement-timeout.js,samples/README.md) | | Struct | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/struct.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/struct.js,samples/README.md) | | Alters a table with foreign key delete cascade action | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/table-alter-with-foreign-key-delete-cascade.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/table-alter-with-foreign-key-delete-cascade.js,samples/README.md) | | Creates a table with foreign key delete cascade action | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/table-create-with-foreign-key-delete-cascade.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/table-create-with-foreign-key-delete-cascade.js,samples/README.md) | diff --git a/samples/README.md b/samples/README.md index b91aad776..b7670c8c7 100644 --- a/samples/README.md +++ b/samples/README.md @@ -127,6 +127,7 @@ and automatic, synchronous replication for high availability. * [Alters a sequence in a GoogleSQL database.](#alters-a-sequence-in-a-googlesql-database.) * [Creates sequence in GoogleSQL database.](#creates-sequence-in-googlesql-database.) * [Drops a sequence in GoogleSQL database.](#drops-a-sequence-in-googlesql-database.) + * [Executes a read/write transaction with statement timeout](#executes-a-read/write-transaction-with-statement-timeout) * [Struct](#struct) * [Alters a table with foreign key delete cascade action](#alters-a-table-with-foreign-key-delete-cascade-action) * [Creates a table with foreign key delete cascade action](#creates-a-table-with-foreign-key-delete-cascade-action) @@ -2074,6 +2075,23 @@ __Usage:__ +### Executes a read/write transaction with statement timeout + +View the [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/statement-timeout.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/statement-timeout.js,samples/README.md) + +__Usage:__ + + +`node statement-timeout.js ` + + +----- + + + + ### Struct View the [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/struct.js). diff --git a/samples/statement-timeout.js b/samples/statement-timeout.js new file mode 100644 index 000000000..da3127475 --- /dev/null +++ b/samples/statement-timeout.js @@ -0,0 +1,73 @@ +/** + * Copyright 2025 Google LLC + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Executes a read/write transaction with statement timeout +// usage: node statement-timeout.js + +'use strict'; + +function main(instanceId, databaseId, projectId) { + // [START spanner_set_statement_timeout] + /** + * TODO(developer): Uncomment the following lines before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance'; + // const databaseId = 'my-database'; + + // Imports the Google Cloud client library + const {Spanner} = require('@google-cloud/spanner'); + + // Creates a client + const spanner = new Spanner({ + projectId: projectId, + }); + + async function executeSqlWithTimeout() { + // Gets a reference to a Cloud Spanner instance and database. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + try { + await database.runTransactionAsync(async tx => { + // NOTE: You can use gaxOptions to set a custom timeout for a single RPC + // invocation. This timeout can however ONLY BE SHORTER than the default timeout + // for the RPC. If you set a timeout that is longer than the default timeout, then + // the default timeout will be used. + const query = { + sql: "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES (110, 'George', 'Washington')", + gaxOptions: { + timeout: 60000, // 60 seconds timeout + }, + }; + const results = await tx.run(query); + console.log(`${results[1].rowCountExact} record inserted.`); + await tx.commit(); + }); + } catch (err) { + console.error('ERROR:', err); + } finally { + await database.close(); + } + } + executeSqlWithTimeout(); + // [END spanner_set_statement_timeout] +} +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/system-test/spanner.test.js b/samples/system-test/spanner.test.js index ef54d3bba..a7f93857f 100644 --- a/samples/system-test/spanner.test.js +++ b/samples/system-test/spanner.test.js @@ -37,6 +37,7 @@ const rpcPriorityQueryPartitionsCommand = const transactionCmd = 'node transaction.js'; const transactionTagCommand = 'node transaction-tag.js'; const transactionTimeoutCommand = 'node transaction-timeout.js'; +const statementTimeoutCommand = 'node statement-timeout.js'; const requestTagCommand = 'node request-tag.js'; const timestampCmd = 'node timestamp.js'; const structCmd = 'node struct.js'; @@ -1246,6 +1247,14 @@ describe('Autogenerated Admin Clients', () => { assert.include(output, '1 record inserted.'); }); + // read_write_transaction with statement timeout + it('should execute a read/write transaction with a statement timeout of 60 seconds', async () => { + const output = execSync( + `${statementTimeoutCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, + ); + assert.include(output, '1 record inserted.'); + }); + // add_json_column it('should add a VenueDetails column to Venues example table', async () => { const output = execSync( From 61c571c729c2a065df6ff166db784a6e6eaef74d Mon Sep 17 00:00:00 2001 From: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Date: Fri, 6 Jun 2025 05:54:48 +0000 Subject: [PATCH 09/31] fix: docs-test (#2297) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: docs-test * fix: docs-test * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * refactor package.json * add config file * update linkinator config * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert change in concurrency * use github token to increase number of requests * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * downgrade linkinator version * refactor concurrency * skip samples in linkinator * refactor --------- Co-authored-by: Owl Bot --- linkinator.config.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/linkinator.config.json b/linkinator.config.json index befd23c86..cbd5b15dc 100644 --- a/linkinator.config.json +++ b/linkinator.config.json @@ -5,7 +5,8 @@ "www.googleapis.com", "img.shields.io", "https://console.cloud.google.com/cloudshell", - "https://support.google.com" + "https://support.google.com", + "^https://github\\.com/googleapis/nodejs-spanner/blob/[^/]+/samples/.*" ], "silent": true, "concurrency": 5, From 8bd0781e8b434a421f0e0f3395439a5a86c7847c Mon Sep 17 00:00:00 2001 From: Sakthivel Subramanian <179120858+sakthivelmanii@users.noreply.github.com> Date: Fri, 6 Jun 2025 14:45:55 +0530 Subject: [PATCH 10/31] perf: Skip gRPC trailers for StreamingRead & ExecuteStreamingSql (#2313) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * perf: Skip gRPC trailers for StreamingRead & ExecuteStreamingSql * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Add tests * fix lint issue --------- Co-authored-by: Owl Bot Co-authored-by: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Co-authored-by: surbhigarg92 --- src/partial-result-stream.ts | 5 +++ test/spanner.ts | 85 +++++++++++++++++++++++++++++++++++- 2 files changed, 89 insertions(+), 1 deletion(-) diff --git a/src/partial-result-stream.ts b/src/partial-result-stream.ts index 225b48f37..c79c366ad 100644 --- a/src/partial-result-stream.ts +++ b/src/partial-result-stream.ts @@ -245,6 +245,11 @@ export class PartialResultStream extends Transform implements ResultEvents { res = this._addChunk(chunk); } + if (chunk.last) { + this.emit('end'); + return; + } + if (res) { next(); } else { diff --git a/test/spanner.ts b/test/spanner.ts index 751ae6751..2b3d7578f 100644 --- a/test/spanner.ts +++ b/test/spanner.ts @@ -40,7 +40,7 @@ import {TEST_INSTANCE_NAME} from './mockserver/mockinstanceadmin'; import * as mockDatabaseAdmin from './mockserver/mockdatabaseadmin'; import * as sinon from 'sinon'; import {google} from '../protos/protos'; -import {ExecuteSqlRequest, RunResponse} from '../src/transaction'; +import {ExecuteSqlRequest, ReadRequest, RunResponse} from '../src/transaction'; import {Row} from '../src/partial-result-stream'; import {GetDatabaseOperationsOptions} from '../src/instance'; import { @@ -1390,6 +1390,89 @@ describe('Spanner with mock server', () => { } }); + it('should return the results correctly when last field is present in PartialResultSet for query', async () => { + // Setup a query result with more than maxQueued (10) PartialResultSets. + // None of the PartialResultSets include a resume token. + const sql = 'SELECT C1 FROM TestTable'; + const fields = [ + protobuf.StructType.Field.create({ + name: 'C1', + type: protobuf.Type.create({code: protobuf.TypeCode.STRING}), + }), + ]; + const metadata = new protobuf.ResultSetMetadata({ + rowType: new protobuf.StructType({ + fields, + }), + }); + const results: PartialResultSet[] = []; + for (let i = 0; i < 2; i++) { + results.push( + PartialResultSet.create({ + metadata, + values: [{stringValue: `V${i}`}], + last: i === 1, + }), + ); + } + spannerMock.putStatementResult( + sql, + mock.StatementResult.resultSet(results), + ); + + const database = newTestDatabase(); + const [rows] = await database.run(sql); + assert.equal(rows.length, 2); + await database.close(); + }); + + it('should return the results correctly when last field is present in PartialResultSet for read', async () => { + // Setup a query result with more than maxQueued (10) PartialResultSets. + // None of the PartialResultSets include a resume token. + const fields = [ + protobuf.StructType.Field.create({ + name: 'C1', + type: protobuf.Type.create({code: protobuf.TypeCode.STRING}), + }), + ]; + const metadata = new protobuf.ResultSetMetadata({ + rowType: new protobuf.StructType({ + fields, + }), + }); + const results: PartialResultSet[] = []; + for (let i = 0; i < 2; i++) { + results.push( + PartialResultSet.create({ + metadata, + values: [{stringValue: `V${i}`}], + last: i === 0, + }), + ); + } + const request = { + table: 'TestTable', + keySet: { + keys: [], + all: true, + ranges: [], + }, + }; + spannerMock.putReadRequestResult( + request, + mock.ReadRequestResult.resultSet(results), + ); + + const database = newTestDatabase(); + const table = database.table('TestTable'); + const query = { + columns: ['C1'], + }; + const [rows] = await table.read(query); + assert.equal(rows.length, 1); + await database.close(); + }); + it('should handle missing parameters in query', async () => { const sql = 'SELECT * FROM tableId WHERE namedParameter = @namedParameter'; From 3676bfa60725c43f85a04ead87943be92e4a99f0 Mon Sep 17 00:00:00 2001 From: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Date: Wed, 11 Jun 2025 10:14:37 +0000 Subject: [PATCH 11/31] feat: track precommit token in r/w apis(multiplexed session) (#2312) * chore: track precommit token in read write apis * test: session factory * test: transaction * refactor tests --- src/session-factory.ts | 30 +++++++ src/transaction.ts | 33 +++++++- test/session-factory.ts | 94 ++++++++++++++++++++++ test/transaction.ts | 170 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 326 insertions(+), 1 deletion(-) diff --git a/src/session-factory.ts b/src/session-factory.ts index 068cac813..794e83896 100644 --- a/src/session-factory.ts +++ b/src/session-factory.ts @@ -64,6 +64,14 @@ export interface SessionFactoryInterface { */ getSessionForPartitionedOps(callback: GetSessionCallback): void; + /** + * When called returns a session for read write. + * + * @name SessionFactoryInterface#getSessionForReadWrite + * @param {GetSessionCallback} callback The callback function. + */ + getSessionForReadWrite(callback: GetSessionCallback): void; + /** * When called returns the pool object. * @@ -106,6 +114,7 @@ export class SessionFactory pool_: SessionPoolInterface; isMultiplexed: boolean; isMultiplexedPartitionOps: boolean; + isMultiplexedRW: boolean; constructor( database: Database, name: String, @@ -131,6 +140,10 @@ export class SessionFactory process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS === 'true' && process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_PARTITIONED_OPS === 'true'; + + this.isMultiplexedRW = + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS === 'true' && + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW === 'true'; // Multiplexed sessions should only be created if its enabled. if (this.isMultiplexed) { this.multiplexedSession_.on('error', this.emit.bind(database, 'error')); @@ -174,6 +187,23 @@ export class SessionFactory : this.pool_.getSession(callback); } + /** + * Retrieves a session for read write operations, selecting the appropriate session type + * based on whether multiplexed sessions are enabled. + * + * If multiplexed sessions are enabled for read write this methods delegates the request to `getSession()`, which returns + * either a multiplexed session or a regular session based on the configuration. + * + * If the multiplexed sessions are disabled, a session is retrieved from the regular session pool. + * + * @param {GetSessionCallback} callback The callback function. + */ + getSessionForReadWrite(callback: GetSessionCallback): void { + this.isMultiplexedRW + ? this.getSession(callback) + : this.pool_.getSession(callback); + } + /** * Returns the regular session pool object. * diff --git a/src/transaction.ts b/src/transaction.ts index eddac9c47..1023df392 100644 --- a/src/transaction.ts +++ b/src/transaction.ts @@ -226,6 +226,11 @@ export interface RunUpdateCallback { export type CommitCallback = NormalCallback; +type PrecommitTokenProvider = + | spannerClient.spanner.v1.ITransaction + | spannerClient.spanner.v1.IPartialResultSet + | spannerClient.spanner.v1.IExecuteBatchDmlResponse; + /** * @typedef {object} TimestampBounds * @property {boolean} [strong=true] Read at a timestamp where all previously @@ -286,6 +291,10 @@ export class Snapshot extends EventEmitter { protected _waitingRequests: Array<() => void>; protected _inlineBeginStarted; protected _useInRunner = false; + protected _latestPreCommitToken: + | spannerClient.spanner.v1.IMultiplexedSessionPrecommitToken + | undefined + | null; id?: Uint8Array | string; ended: boolean; metadata?: spannerClient.spanner.v1.ITransaction; @@ -371,6 +380,17 @@ export class Snapshot extends EventEmitter { opts: this._observabilityOptions, dbName: this._dbName, }; + this._latestPreCommitToken = null; + } + + protected _updatePrecommitToken(resp: PrecommitTokenProvider): void { + if ( + this._latestPreCommitToken === null || + this._latestPreCommitToken === undefined || + this._latestPreCommitToken!.seqNum! < resp.precommitToken!.seqNum! + ) { + this._latestPreCommitToken = resp.precommitToken; + } } /** @@ -477,6 +497,7 @@ export class Snapshot extends EventEmitter { if (err) { setSpanError(span, err); } else { + this._updatePrecommitToken(resp); this._update(resp); } span.end(); @@ -778,6 +799,7 @@ export class Snapshot extends EventEmitter { }, ) ?.on('response', response => { + this._updatePrecommitToken(response); if (response.metadata && response.metadata!.transaction && !this.id) { this._update(response.metadata!.transaction); } @@ -1381,6 +1403,7 @@ export class Snapshot extends EventEmitter { }, ) .on('response', response => { + this._updatePrecommitToken(response); if (response.metadata && response.metadata!.transaction && !this.id) { this._update(response.metadata!.transaction); } @@ -2040,6 +2063,8 @@ export class Transaction extends Dml { return; } + this._updatePrecommitToken(resp); + const {resultSets, status} = resp; for (const resultSet of resultSets) { if (!this.id && resultSet.metadata?.transaction) { @@ -2182,8 +2207,14 @@ export class Transaction extends Dml { const mutations = this._queuedMutations; const session = this.session.formattedName_!; + const precommitToken = this._latestPreCommitToken; const requestOptions = (options as CommitOptions).requestOptions; - const reqOpts: CommitRequest = {mutations, session, requestOptions}; + const reqOpts: CommitRequest = { + mutations, + session, + requestOptions, + precommitToken, + }; return startTrace( 'Transaction.commit', diff --git a/test/session-factory.ts b/test/session-factory.ts index 786bda168..130b78f59 100644 --- a/test/session-factory.ts +++ b/test/session-factory.ts @@ -152,6 +152,30 @@ describe('SessionFactory', () => { assert.strictEqual(sessionFactory.isMultiplexed, true); }); }); + + describe('when GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS and GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW both are disabled', () => { + before(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'false'; + }); + + it('should correctly initialize the isMultiplexedRW field', () => { + const sessionFactory = new SessionFactory(DATABASE, NAME, POOL_OPTIONS); + assert.strictEqual(sessionFactory.isMultiplexedRW, false); + }); + }); + + describe('when GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS and GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW both are enabled', () => { + before(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'true'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'true'; + }); + + it('should correctly initialize the isMultiplexedRW field', () => { + const sessionFactory = new SessionFactory(DATABASE, NAME, POOL_OPTIONS); + assert.strictEqual(sessionFactory.isMultiplexedRW, true); + }); + }); }); describe('getSession', () => { @@ -222,6 +246,76 @@ describe('SessionFactory', () => { }); }); + describe('getSessionForReadWrite', () => { + describe('when multiplexed session for r/w disabled', () => { + before(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'false'; + }); + + it('should retrieve a regular session from the pool', done => { + ( + sandbox.stub(sessionFactory.pool_, 'getSession') as sinon.SinonStub + ).callsFake(callback => callback(null, fakeSession)); + sessionFactory.getSessionForReadWrite((err, resp) => { + assert.strictEqual(err, null); + assert.strictEqual(resp, fakeSession); + done(); + }); + }); + + it('should propagate errors when regular session retrieval fails', done => { + const fakeError = new Error(); + ( + sandbox.stub(sessionFactory.pool_, 'getSession') as sinon.SinonStub + ).callsFake(callback => callback(fakeError, null)); + sessionFactory.getSessionForReadWrite((err, resp) => { + assert.strictEqual(err, fakeError); + assert.strictEqual(resp, null); + done(); + }); + }); + }); + + describe('when multiplexed session for r/w enabled', () => { + before(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'true'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'true'; + }); + + it('should return the multiplexed session', done => { + ( + sandbox.stub( + sessionFactory.multiplexedSession_, + 'getSession', + ) as sinon.SinonStub + ).callsFake(callback => callback(null, fakeMuxSession)); + sessionFactory.getSessionForReadWrite((err, resp) => { + assert.strictEqual(err, null); + assert.strictEqual(resp, fakeMuxSession); + assert.strictEqual(resp?.metadata.multiplexed, true); + assert.strictEqual(fakeMuxSession.metadata.multiplexed, true); + done(); + }); + }); + + it('should propagate error when multiplexed session return fails', done => { + const fakeError = new Error(); + ( + sandbox.stub( + sessionFactory.multiplexedSession_, + 'getSession', + ) as sinon.SinonStub + ).callsFake(callback => callback(fakeError, null)); + sessionFactory.getSessionForReadWrite((err, resp) => { + assert.strictEqual(err, fakeError); + assert.strictEqual(resp, null); + done(); + }); + }); + }); + }); + describe('getPool', () => { it('should return the session pool object', () => { const pool = sessionFactory.getPool(); diff --git a/test/transaction.ts b/test/transaction.ts index 50cf4717f..8fbff822c 100644 --- a/test/transaction.ts +++ b/test/transaction.ts @@ -257,6 +257,22 @@ describe('Transaction', () => { }); }); + it('should localize precommitToken if present', done => { + const precommitToken = { + precommitToken: Buffer.from('precommit-token-begin'), + seqNum: 1, + }; + const response = Object.assign({precommitToken}, BEGIN_RESPONSE); + + REQUEST.callsFake((_, callback) => callback(null, response)); + + snapshot.begin(err => { + assert.ifError(err); + assert.strictEqual(snapshot._latestPreCommitToken, precommitToken); + done(); + }); + }); + it('should return the response', done => { REQUEST.callsFake((_, callback) => callback(null, BEGIN_RESPONSE)); @@ -1565,6 +1581,34 @@ describe('Transaction', () => { const requestCallback = stub.lastCall.args[1]; setImmediate(requestCallback, null, fakeResponse); }); + + it('should return precommitToken in the api response', done => { + const stub = sandbox.stub(transaction, 'request'); + const expectedRowCounts = [5, 7]; + const fakeResponse = { + resultSets: [ + {stats: {rowCount: 'a', a: '5'}}, + {stats: {rowCount: 'b', b: '7'}}, + ], + precommitToken: { + precommitToken: Buffer.from('precommit-token-batch-update'), + seqNum: 1, + }, + }; + + transaction.batchUpdate( + OBJ_STATEMENTS, + (err, rowCounts, apiResponse) => { + assert.ifError(err); + assert.deepStrictEqual(rowCounts, expectedRowCounts); + assert.strictEqual(apiResponse, fakeResponse); + done(); + }, + ); + + const requestCallback = stub.lastCall.args[1]; + setImmediate(requestCallback, null, fakeResponse); + }); }); describe('begin', () => { @@ -1756,6 +1800,19 @@ describe('Transaction', () => { transaction.commit(options, assert.ifError); }); + it('should accept precommitToken', done => { + const precommitToken = { + precommitToken: Buffer.from('precommit-token-commit'), + seqNum: 1, + }; + transaction._latestPreCommitToken = precommitToken; + transaction.request = config => { + assert.strictEqual(config.reqOpts.precommitToken, precommitToken); + done(); + }; + transaction.commit(assert.ifError); + }); + it('should use the transaction `id` when set', () => { const id = 'transaction-id-123'; const stub = sandbox.stub(transaction, 'request'); @@ -2264,6 +2321,31 @@ describe('Transaction', () => { transaction.runStream(QUERY); }); + + it('should return a precommitToken in response', done => { + const QUERY: ExecuteSqlRequest = { + sql: 'SELET * FROM `MyTable`', + }; + const fakeStream = new EventEmitter(); + const fakePrecommitToken = { + precommitToken: Buffer.from('precommit-token-runStream'), + seqNum: 1, + }; + + PARTIAL_RESULT_STREAM.returns(fakeStream); + + const stream = transaction.runStream(QUERY); + assert.strictEqual(stream, fakeStream); + + stream.on('response', resp => { + assert.deepStrictEqual(resp.precommitToken, fakePrecommitToken); + done(); + }); + + fakeStream.emit('response', { + precommitToken: fakePrecommitToken, + }); + }); }); describe('createReadStream', () => { @@ -2319,6 +2401,94 @@ describe('Transaction', () => { transactionTag, }); }); + + it('should return a precommitToken in response', done => { + const TABLE = 'my-table-123'; + const fakeStream = new EventEmitter(); + const fakePrecommitToken = { + precommitToken: Buffer.from('precommit-token-createReadStream'), + seqNum: 1, + }; + + PARTIAL_RESULT_STREAM.returns(fakeStream); + + const stream = transaction.createReadStream(TABLE); + assert.strictEqual(stream, fakeStream); + + stream.on('response', resp => { + assert.deepStrictEqual(resp.precommitToken, fakePrecommitToken); + done(); + }); + + fakeStream.emit('response', { + precommitToken: fakePrecommitToken, + }); + }); + + it('should override the precommitToken with the value that has higher seqNum received in response', done => { + const TABLE = 'my-table-123'; + const fakeStream = new EventEmitter(); + const fakePrecommitToken1 = { + precommitToken: Buffer.from('precommit-token1-createReadStream'), + seqNum: 1, + }; + + const fakePrecommitToken2 = { + precommitToken: Buffer.from('precommit-token2-createReadStream'), + seqNum: 2, + }; + + const fakePrecommitToken3 = { + precommitToken: Buffer.from('precommit-token3-createReadStream'), + seqNum: 0, + }; + + PARTIAL_RESULT_STREAM.returns(fakeStream); + + const stream = transaction.createReadStream(TABLE); + assert.strictEqual(stream, fakeStream); + + assert.strictEqual(transaction._latestPreCommitToken, null); + + let responseCount = 0; + + stream.on('response', resp => { + responseCount++; + if (responseCount === 1) { + assert.deepStrictEqual(resp.precommitToken, fakePrecommitToken1); + assert.deepStrictEqual( + transaction._latestPreCommitToken, + fakePrecommitToken1, + ); + } else if (responseCount === 2) { + assert.deepStrictEqual(resp.precommitToken, fakePrecommitToken2); + assert.deepStrictEqual( + transaction._latestPreCommitToken, + fakePrecommitToken2, + ); + } else if (responseCount === 3) { + // fakePrecommitToken3 should get ignored + assert.deepStrictEqual(resp.precommitToken, fakePrecommitToken3); + assert.deepStrictEqual( + transaction._latestPreCommitToken, + fakePrecommitToken2, + ); + done(); + } + }); + + fakeStream.emit('response', { + precommitToken: fakePrecommitToken1, + }); + + fakeStream.emit('response', { + precommitToken: fakePrecommitToken2, + }); + + fakeStream.emit('response', { + precommitToken: fakePrecommitToken3, + }); + }); }); }); From 192bf2bb603bca4ac481fcfd1f04974173adc6a1 Mon Sep 17 00:00:00 2001 From: Lester Szeto Date: Fri, 13 Jun 2025 04:49:44 -0700 Subject: [PATCH 12/31] feat: add metrics tracers (#2319) * feat: add metrics tracers * Set instanced meter provider * linting * Addressed review comments - Set Metric location detector to use the one defined in @google-cloud/opentelemetry-resource-util - Fixed createResourceAttributes to use previously generated client UID --------- Co-authored-by: surbhigarg92 --- package.json | 1 + src/metrics/constants.ts | 4 + src/metrics/metrics-tracer-factory.ts | 289 +++++++++++++++++++++++++ src/metrics/metrics-tracer.ts | 247 +++++++++++++++++++++ test/metrics/metrics-tracer-factory.ts | 244 +++++++++++++++++++++ test/metrics/metrics-tracer.ts | 202 +++++++++++++++++ 6 files changed, 987 insertions(+) create mode 100644 src/metrics/metrics-tracer-factory.ts create mode 100644 src/metrics/metrics-tracer.ts create mode 100644 test/metrics/metrics-tracer-factory.ts create mode 100644 test/metrics/metrics-tracer.ts diff --git a/package.json b/package.json index 1a50cfc37..4496ae972 100644 --- a/package.json +++ b/package.json @@ -65,6 +65,7 @@ "@opentelemetry/api": "^1.9.0", "@opentelemetry/context-async-hooks": "^2.0.0", "@opentelemetry/core": "^2.0.0", + "@opentelemetry/resources": "^1.8.0", "@opentelemetry/sdk-metrics": "^1.30.1", "@opentelemetry/semantic-conventions": "^1.30.0", "@types/big.js": "^6.2.2", diff --git a/src/metrics/constants.ts b/src/metrics/constants.ts index 64d740409..068ae80b7 100644 --- a/src/metrics/constants.ts +++ b/src/metrics/constants.ts @@ -16,6 +16,10 @@ export const SPANNER_METER_NAME = 'spanner-nodejs'; export const CLIENT_METRICS_PREFIX = 'spanner.googleapis.com/internal/client'; export const SPANNER_RESOURCE_TYPE = 'spanner_instance_client'; +// OTel semantic conventions +// See https://github.com/open-telemetry/opentelemetry-js/blob/main/semantic-conventions/README.md#unstable-semconv +export const ATTR_CLOUD_REGION = 'cloud.region'; + // Monitored resource labels export const MONITORED_RES_LABEL_KEY_PROJECT = 'project_id'; export const MONITORED_RES_LABEL_KEY_INSTANCE = 'instance_id'; diff --git a/src/metrics/metrics-tracer-factory.ts b/src/metrics/metrics-tracer-factory.ts new file mode 100644 index 000000000..f8f82c9bb --- /dev/null +++ b/src/metrics/metrics-tracer-factory.ts @@ -0,0 +1,289 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as crypto from 'crypto'; +import * as os from 'os'; +import * as process from 'process'; +import {v4 as uuidv4} from 'uuid'; +import {MeterProvider} from '@opentelemetry/sdk-metrics'; +import {Counter, Histogram} from '@opentelemetry/api'; +import {detectResources, Resource} from '@opentelemetry/resources'; +import {GcpDetectorSync} from '@google-cloud/opentelemetry-resource-util'; +import * as Constants from './constants'; +import {MetricsTracer} from './metrics-tracer'; +const version = require('../../../package.json').version; + +export class MetricsTracerFactory { + private static _instance: MetricsTracerFactory | null = null; + private static _meterProvider: MeterProvider | null = null; + private _clientAttributes: {[key: string]: string}; + private _instrumentAttemptCounter!: Counter; + private _instrumentAttemptLatency!: Histogram; + private _instrumentOperationCounter!: Counter; + private _instrumentOperationLatency!: Histogram; + private _instrumentGfeConnectivityErrorCount!: Counter; + private _instrumentGfeLatency!: Histogram; + private _clientUid: string; + public enabled: boolean; + + private constructor(enabled = false) { + this.enabled = enabled; + this._createMetricInstruments(); + + this._clientUid = MetricsTracerFactory._generateClientUId(); + this._clientAttributes = this.createClientAttributes(); + } + + private createClientAttributes(): {[key: string]: string} { + const clientName = `${Constants.SPANNER_METER_NAME}/${version}`; + return { + [Constants.METRIC_LABEL_KEY_CLIENT_NAME]: clientName, + [Constants.METRIC_LABEL_KEY_CLIENT_UID]: this._clientUid, + }; + } + + /** + Create set of attributes for resource metrics + */ + public async createResourceAttributes( + projectId: string, + ): Promise<{[key: string]: string}> { + const clientHash = MetricsTracerFactory._generateClientHash( + this._clientUid, + ); + const location = await MetricsTracerFactory._detectClientLocation(); + return { + [Constants.MONITORED_RES_LABEL_KEY_PROJECT]: projectId, + [Constants.MONITORED_RES_LABEL_KEY_INSTANCE]: 'unknown', + [Constants.MONITORED_RES_LABEL_KEY_CLIENT_HASH]: clientHash, + // Skipping instance config until we have a way to get it + [Constants.MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG]: 'unknown', + [Constants.MONITORED_RES_LABEL_KEY_LOCATION]: location, + }; + } + + public static getInstance(enabled: boolean) { + // Create a singleton instance, enabling/disabling metrics can only be done on the initial call + if (MetricsTracerFactory._instance === null) { + MetricsTracerFactory._instance = new MetricsTracerFactory(enabled); + } + return MetricsTracerFactory._instance; + } + + public static getMeterProvider( + resourceAttributes: {[key: string]: string} = {}, + ): MeterProvider { + if (MetricsTracerFactory._meterProvider === null) { + const resource = new Resource(resourceAttributes); + MetricsTracerFactory._meterProvider = new MeterProvider({ + resource: resource, + }); + } + + return MetricsTracerFactory._meterProvider; + } + + public static resetMeterProvider() { + MetricsTracerFactory._meterProvider = null; + } + + get instrumentAttemptLatency(): Histogram { + return this._instrumentAttemptLatency; + } + + get instrumentAttemptCounter(): Counter { + return this._instrumentAttemptCounter; + } + + get instrumentOperationLatency(): Histogram { + return this._instrumentOperationLatency; + } + + get instrumentOperationCounter(): Counter { + return this._instrumentOperationCounter; + } + + get instrumentGfeConnectivityErrorCount(): Counter { + return this._instrumentGfeConnectivityErrorCount; + } + + get instrumentGfeLatency(): Histogram { + return this._instrumentGfeLatency; + } + + get clientAttributes(): Record { + return this._clientAttributes; + } + + set project(project: string) { + this._clientAttributes[Constants.MONITORED_RES_LABEL_KEY_PROJECT] = project; + } + + set instance(instance: string) { + this._clientAttributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE] = + instance; + } + + set instanceConfig(instanceConfig: string) { + this._clientAttributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG] = + instanceConfig; + } + + set location(location: string) { + this._clientAttributes[Constants.MONITORED_RES_LABEL_KEY_LOCATION] = + location; + } + + set clientHash(hash: string) { + this._clientAttributes[Constants.MONITORED_RES_LABEL_KEY_CLIENT_HASH] = + hash; + } + + set clientUid(clientUid: string) { + this._clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_UID] = clientUid; + } + + set clientName(clientName: string) { + this._clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_NAME] = clientName; + } + + set database(database: string) { + this._clientAttributes[Constants.METRIC_LABEL_KEY_DATABASE] = database; + } + + public createMetricsTracer(): MetricsTracer { + return new MetricsTracer( + this._clientAttributes, + this._instrumentAttemptCounter, + this._instrumentAttemptLatency, + this._instrumentOperationCounter, + this._instrumentOperationLatency, + this._instrumentGfeConnectivityErrorCount, + this._instrumentGfeLatency, + this.enabled, + ); + } + + private _createMetricInstruments() { + const meterProvider = MetricsTracerFactory.getMeterProvider(); + const meter = meterProvider.getMeter(Constants.SPANNER_METER_NAME, version); + + this._instrumentAttemptLatency = meter.createHistogram( + Constants.METRIC_NAME_ATTEMPT_LATENCIES, + {unit: 'ms', description: 'Time an individual attempt took.'}, + ); + + this._instrumentAttemptCounter = meter.createCounter( + Constants.METRIC_NAME_ATTEMPT_COUNT, + {unit: '1', description: 'Number of attempts.'}, + ); + + this._instrumentOperationLatency = meter.createHistogram( + Constants.METRIC_NAME_OPERATION_LATENCIES, + { + unit: 'ms', + description: + 'Total time until final operation success or failure, including retries and backoff.', + }, + ); + + this._instrumentOperationCounter = meter.createCounter( + Constants.METRIC_NAME_OPERATION_COUNT, + {unit: '1', description: 'Number of operations.'}, + ); + + this._instrumentGfeLatency = meter.createHistogram( + Constants.METRIC_NAME_GFE_LATENCIES, + { + unit: 'ms', + description: + "Latency between Google's network receiving an RPC and reading back the first byte of the response", + }, + ); + + this._instrumentGfeConnectivityErrorCount = meter.createCounter( + Constants.METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, + { + unit: '1', + description: + 'Number of requests that failed to reach the Google network.', + }, + ); + } + + /** + * Generates a unique identifier for the client_uid metric field. The identifier is composed of a + * UUID, the process ID (PID), and the hostname. + */ + private static _generateClientUId(): string { + const identifier = uuidv4(); + const pid = process.pid.toString(); + let hostname = 'localhost'; + + try { + hostname = os.hostname(); + } catch (err) { + console.warn('Unable to get the hostname.', err); + } + + return `${identifier}@${pid}@${hostname}`; + } + + /** + * Generates a 6-digit zero-padded lowercase hexadecimal hash using the 10 most significant bits + * of a 64-bit hash value. + * + * The primary purpose of this function is to generate a hash value for the `client_hash` + * resource label using `client_uid` metric field. The range of values is chosen to be small + * enough to keep the cardinality of the Resource targets under control. Note: If at later time + * the range needs to be increased, it can be done by increasing the value of `kPrefixLength` to + * up to 24 bits without changing the format of the returned value. + */ + private static _generateClientHash(clientUid: string): string { + if (clientUid === null || clientUid === undefined) { + return '000000'; + } + + const hash = crypto.createHash('sha256'); + hash.update(clientUid); + const digest = hash.digest('hex'); + const hashPart = digest.substring(0, 16); + const longHash = BigInt('0x' + hashPart); + const kPrefixLength = 10; + const shiftedValue = longHash >> BigInt(64 - kPrefixLength); + return shiftedValue.toString(16).padStart(6, '0'); + } + + /** + * Gets the location (region) of the client, otherwise returns to the "global" region. + */ + private static async _detectClientLocation(): Promise { + const defaultRegion = 'global'; + try { + const resource = await detectResources({ + detectors: [new GcpDetectorSync()], + }); + + await resource?.waitForAsyncAttributes?.(); + + const region = resource.attributes[Constants.ATTR_CLOUD_REGION]; + if (typeof region === 'string' && region) { + return region; + } + } catch (err) { + console.warn('Unable to detect location.', err); + } + return defaultRegion; + } +} diff --git a/src/metrics/metrics-tracer.ts b/src/metrics/metrics-tracer.ts new file mode 100644 index 000000000..ac1e92a35 --- /dev/null +++ b/src/metrics/metrics-tracer.ts @@ -0,0 +1,247 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {status as Status} from '@grpc/grpc-js'; +import {Counter, Histogram} from '@opentelemetry/api'; +import { + METRIC_LABEL_KEY_CLIENT_NAME, + METRIC_LABEL_KEY_CLIENT_UID, + METRIC_LABEL_KEY_DATABASE, + METRIC_LABEL_KEY_METHOD, + METRIC_LABEL_KEY_STATUS, + MONITORED_RES_LABEL_KEY_CLIENT_HASH, + MONITORED_RES_LABEL_KEY_INSTANCE, + MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG, + MONITORED_RES_LABEL_KEY_LOCATION, + MONITORED_RES_LABEL_KEY_PROJECT, +} from './constants'; + +class MetricAttemptTracer { + private _startTime: Date; + public status: number; + + constructor() { + this._startTime = new Date(Date.now()); + this.status = -1; + } + + get startTime() { + return this._startTime; + } +} + +class MetricOperationTracer { + private _attemptCount: number; + private _startTime: Date; + private _currentAttempt; + public status: number; + + constructor() { + this._attemptCount = 0; + this._startTime = new Date(Date.now()); + this._currentAttempt = null; + this.status = -1; + } + + get attemptCount() { + return this._attemptCount; + } + + get currentAttempt() { + return this._currentAttempt; + } + + get startTime() { + return this._startTime; + } + + public start() { + this._startTime = new Date(Date.now()); + } + + public createNewAttempt() { + this._attemptCount += 1; + this._currentAttempt = new MetricAttemptTracer(); + } +} + +export class MetricsTracer { + public currentOperation: MetricOperationTracer = new MetricOperationTracer(); + + constructor( + private _clientAttributes: {[key: string]: string}, + private _instrumentAttemptCounter: Counter, + private _instrumentAttemptLatency: Histogram, + private _instrumentOperationCounter: Counter, + private _instrumentOperationLatency: Histogram, + private _instrumentGfeConnectivityErrorCount: Counter, + private _instrumentGfeLatency: Histogram, + public enabled: boolean, + ) {} + + private _getMillisecondTimeDifference(start: Date, end: Date): number { + return end.valueOf() - start.valueOf(); + } + + get clientAttributes() { + return this._clientAttributes; + } + + get instrumentAttemptCounter() { + return this._instrumentAttemptCounter; + } + + get instrumentAttemptLatency() { + return this._instrumentAttemptLatency; + } + + get instrumentOperationCounter() { + return this._instrumentOperationCounter; + } + + get instrumentOperationLatency() { + return this._instrumentOperationLatency; + } + + public recordAttemptStart() { + if (!this.enabled) return; + this.currentOperation.createNewAttempt(); + } + + public recordAttemptCompletion(status: number = Status.OK) { + if (!this.enabled) return; + this.currentOperation.currentAttempt.status = status; + const attemptAttributes = this._createAttemptOtelAttributes(); + const endTime = new Date(Date.now()); + const attemptLatencyMilliseconds = this._getMillisecondTimeDifference( + this.currentOperation.currentAttempt.startTime, + endTime, + ); + this.instrumentAttemptLatency.record( + attemptLatencyMilliseconds, + attemptAttributes, + ); + } + + public recordOperationStart() { + if (!this.enabled) return; + this.currentOperation = new MetricOperationTracer(); + this.currentOperation.start(); + } + + public recordOperationCompletion() { + if (!this.enabled) return; + const endTime = new Date(Date.now()); + const operationAttributes = this._createOperationOtelAttributes(); + const attemptAttributes = this._createAttemptOtelAttributes(); + const operationLatencyMilliseconds = this._getMillisecondTimeDifference( + this.currentOperation.startTime, + endTime, + ); + + this.instrumentOperationCounter.add(1, operationAttributes); + this.instrumentOperationLatency.record( + operationLatencyMilliseconds, + operationAttributes, + ); + this.instrumentAttemptCounter.add( + this.currentOperation.attemptCount, + attemptAttributes, + ); + } + + public recordGfeLatency(latency: number) { + if (!this.enabled) return; + this._instrumentGfeLatency.record(latency, this.clientAttributes); + } + + public recordGfeConnectivityErrorCount() { + if (!this.enabled) return; + this._instrumentGfeConnectivityErrorCount.add(1, this.clientAttributes); + } + + private _createOperationOtelAttributes() { + if (!this.enabled) return {}; + const attributes = {...this._clientAttributes}; + attributes[METRIC_LABEL_KEY_STATUS] = + this.currentOperation.status.toString(); + + return attributes; + } + + private _createAttemptOtelAttributes() { + if (!this.enabled) return {}; + const attributes = {...this._clientAttributes}; + if (this.currentOperation.currentAttempt === null) return attributes; + attributes[METRIC_LABEL_KEY_STATUS] = + this.currentOperation.currentAttempt.status.toString(); + + return attributes; + } + + set project(project: string) { + if (!(MONITORED_RES_LABEL_KEY_PROJECT in this._clientAttributes)) { + this._clientAttributes[MONITORED_RES_LABEL_KEY_PROJECT] = project; + } + } + + set instance(instance: string) { + if (!(MONITORED_RES_LABEL_KEY_INSTANCE in this._clientAttributes)) { + this._clientAttributes[MONITORED_RES_LABEL_KEY_INSTANCE] = instance; + } + } + + set instanceConfig(instanceConfig: string) { + if (!(MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG in this._clientAttributes)) { + this._clientAttributes[MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG] = + instanceConfig; + } + } + + set location(location: string) { + if (!(MONITORED_RES_LABEL_KEY_LOCATION in this._clientAttributes)) { + this._clientAttributes[MONITORED_RES_LABEL_KEY_LOCATION] = location; + } + } + + set clientHash(clientHash: string) { + if (!(MONITORED_RES_LABEL_KEY_CLIENT_HASH in this._clientAttributes)) { + this._clientAttributes[MONITORED_RES_LABEL_KEY_CLIENT_HASH] = clientHash; + } + } + + set clientUid(clientUid: string) { + if (!(METRIC_LABEL_KEY_CLIENT_UID in this._clientAttributes)) { + this._clientAttributes[METRIC_LABEL_KEY_CLIENT_UID] = clientUid; + } + } + + set clientName(clientName: string) { + if (!(METRIC_LABEL_KEY_CLIENT_NAME in this._clientAttributes)) { + this._clientAttributes[METRIC_LABEL_KEY_CLIENT_NAME] = clientName; + } + } + + set database(database: string) { + if (!(METRIC_LABEL_KEY_DATABASE in this._clientAttributes)) { + this._clientAttributes[METRIC_LABEL_KEY_DATABASE] = database; + } + } + + set methodName(methodName: string) { + if (!(METRIC_LABEL_KEY_METHOD in this._clientAttributes)) { + this._clientAttributes[METRIC_LABEL_KEY_METHOD] = methodName; + } + } +} diff --git a/test/metrics/metrics-tracer-factory.ts b/test/metrics/metrics-tracer-factory.ts new file mode 100644 index 000000000..911991e3a --- /dev/null +++ b/test/metrics/metrics-tracer-factory.ts @@ -0,0 +1,244 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { + MeterProvider, + PeriodicExportingMetricReader, +} from '@opentelemetry/sdk-metrics'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import * as Constants from '../../src/metrics/constants'; +import {MetricsTracerFactory} from '../../src/metrics/metrics-tracer-factory'; +import {CloudMonitoringMetricsExporter} from '../../src/metrics/spanner-metrics-exporter'; + +describe('MetricsTracerFactory', () => { + let sandbox: sinon.SinonSandbox; + let mockExporter: CloudMonitoringMetricsExporter; + let recordAttemptLatencyStub: sinon.SinonStub; + let addAttemptCounterStub: sinon.SinonStub; + let recordOperationLatencyStub: sinon.SinonStub; + let addOperationCounterStub: sinon.SinonStub; + let recordGfeLatencyStub: sinon.SinonStub; + let addGfeConnectivityErrorCountStub: sinon.SinonStub; + + before(() => { + sandbox = sinon.createSandbox(); + + recordAttemptLatencyStub = sandbox.stub(); + addAttemptCounterStub = sandbox.stub(); + recordOperationLatencyStub = sandbox.stub(); + addOperationCounterStub = sandbox.stub(); + recordGfeLatencyStub = sandbox.stub(); + addGfeConnectivityErrorCountStub = sandbox.stub(); + + const meterStub = { + createHistogram: sandbox.stub(), + createCounter: sandbox.stub(), + }; + + // Stub the methods called by _createMetricInstruments + meterStub.createHistogram + .onFirstCall() + .returns({record: recordAttemptLatencyStub}) + .onSecondCall() + .returns({record: recordOperationLatencyStub}) + .onThirdCall() + .returns({record: recordGfeLatencyStub}); + + meterStub.createCounter + .onFirstCall() + .returns({add: addAttemptCounterStub}) + .onSecondCall() + .returns({add: addOperationCounterStub}) + .onThirdCall() + .returns({add: addGfeConnectivityErrorCountStub}); + + sandbox.stub(MeterProvider.prototype, 'getMeter').returns(meterStub as any); + + // metrics provider and related objects + mockExporter = sandbox.createStubInstance(CloudMonitoringMetricsExporter); + const provider = MetricsTracerFactory.getMeterProvider(); + const reader = new PeriodicExportingMetricReader({ + exporter: mockExporter, + exportIntervalMillis: 60000, + }); + provider.addMetricReader(reader); + }); + + after(() => { + sandbox.restore(); + MetricsTracerFactory.resetMeterProvider(); + }); + + beforeEach(() => { + sandbox.resetHistory(); + }); + + it('should use the globally set meter provider', async () => { + const factory = MetricsTracerFactory.getInstance(true); + const tracer = factory.createMetricsTracer(); + + const operations = 3; + const attempts = 5; + for (let i = 0; i < operations; i++) { + tracer.recordOperationStart(); + for (let j = 0; j < attempts; j++) { + tracer.recordAttemptStart(); + // Simulate processing time during attempt + await new Promise(resolve => { + setTimeout(resolve, 50); + }); + tracer.recordAttemptCompletion(); + } + tracer.recordOperationCompletion(); + } + + assert.ok(recordOperationLatencyStub.calledWith(sinon.match.number)); + assert.strictEqual(recordOperationLatencyStub.callCount, operations); + + assert.ok(recordAttemptLatencyStub.calledWith(sinon.match.number)); + assert.strictEqual( + recordAttemptLatencyStub.callCount, + operations * attempts, + ); + }); + + it('should initialize metric instruments when enabled', () => { + const factory = MetricsTracerFactory.getInstance(true); + + assert.deepStrictEqual(factory.instrumentAttemptLatency, { + record: recordAttemptLatencyStub, + }); + assert.deepStrictEqual(factory.instrumentAttemptCounter, { + add: addAttemptCounterStub, + }); + assert.deepStrictEqual(factory.instrumentOperationLatency, { + record: recordOperationLatencyStub, + }); + assert.deepStrictEqual(factory.instrumentOperationCounter, { + add: addOperationCounterStub, + }); + assert.deepStrictEqual(factory.instrumentGfeLatency, { + record: recordGfeLatencyStub, + }); + assert.deepStrictEqual(factory.instrumentGfeConnectivityErrorCount, { + add: addGfeConnectivityErrorCountStub, + }); + }); + + it('should create a MetricsTracer instance', () => { + const factory = MetricsTracerFactory.getInstance(true); + const tracer = factory.createMetricsTracer(); + assert.ok(tracer); + }); + + it('should correctly set default attributes', () => { + const factory = MetricsTracerFactory.getInstance(true); + assert.ok(factory.clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_NAME]); + assert.ok(factory.clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_UID]); + }); + + it('should correctly create resource attributes', async () => { + const factory = MetricsTracerFactory.getInstance(true); + const resourceAttributes = + await factory.createResourceAttributes('test-proj-id'); + + assert.strictEqual( + resourceAttributes[Constants.MONITORED_RES_LABEL_KEY_PROJECT], + 'test-proj-id', + ); + assert.ok(resourceAttributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE]); + assert.ok( + resourceAttributes[Constants.MONITORED_RES_LABEL_KEY_CLIENT_HASH], + ); + assert.ok( + resourceAttributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG], + ); + assert.ok(resourceAttributes[Constants.MONITORED_RES_LABEL_KEY_LOCATION]); + }); + + it('should correctly set project attribute', () => { + const factory = MetricsTracerFactory.getInstance(true); + factory.project = 'test-project'; + assert.strictEqual( + factory.clientAttributes[Constants.MONITORED_RES_LABEL_KEY_PROJECT], + 'test-project', + ); + }); + + it('should correctly set instance attribute', () => { + const factory = MetricsTracerFactory.getInstance(true); + factory.instance = 'my-instance'; + assert.strictEqual( + factory.clientAttributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE], + 'my-instance', + ); + }); + + it('should correctly set instanceConfig attribute', () => { + const factory = MetricsTracerFactory.getInstance(true); + factory.instanceConfig = 'my-config'; + assert.strictEqual( + factory.clientAttributes[ + Constants.MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG + ], + 'my-config', + ); + }); + + it('should correctly set location attribute', () => { + const factory = MetricsTracerFactory.getInstance(true); + factory.location = 'us-central1'; + assert.strictEqual( + factory.clientAttributes[Constants.MONITORED_RES_LABEL_KEY_LOCATION], + 'us-central1', + ); + }); + + it('should correctly set clientHash attribute', () => { + const factory = MetricsTracerFactory.getInstance(true); + factory.clientHash = 'abc123'; + assert.strictEqual( + factory.clientAttributes[Constants.MONITORED_RES_LABEL_KEY_CLIENT_HASH], + 'abc123', + ); + }); + + it('should correctly set clientUid attribute', () => { + const factory = MetricsTracerFactory.getInstance(true); + factory.clientUid = 'uid123'; + assert.strictEqual( + factory.clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_UID], + 'uid123', + ); + }); + + it('should correctly set clientName attribute', () => { + const factory = MetricsTracerFactory.getInstance(true); + factory.clientName = 'client-app'; + assert.strictEqual( + factory.clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_NAME], + 'client-app', + ); + }); + + it('should correctly set database attribute', () => { + const factory = MetricsTracerFactory.getInstance(true); + factory.database = 'my-database'; + assert.strictEqual( + factory.clientAttributes[Constants.METRIC_LABEL_KEY_DATABASE], + 'my-database', + ); + }); +}); diff --git a/test/metrics/metrics-tracer.ts b/test/metrics/metrics-tracer.ts new file mode 100644 index 000000000..9333d7221 --- /dev/null +++ b/test/metrics/metrics-tracer.ts @@ -0,0 +1,202 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {status as Status} from '@grpc/grpc-js'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import * as Constants from '../../src/metrics/constants'; +import {MetricsTracer} from '../../src/metrics/metrics-tracer'; + +const PROJECT_ID = 'test-project'; + +describe('MetricsTracer', () => { + let tracer: MetricsTracer; + let fakeAttemptCounter: any; + let fakeAttemptLatency: any; + let fakeOperationCounter: any; + let fakeOperationLatency: any; + let fakeGfeCounter: any; + let fakeGfeLatency: any; + let attributes: {[key: string]: string}; + + beforeEach(() => { + attributes = { + [Constants.MONITORED_RES_LABEL_KEY_PROJECT]: PROJECT_ID, + }; + + fakeAttemptCounter = { + add: sinon.spy(), + }; + + fakeAttemptLatency = { + record: sinon.spy(), + }; + + fakeOperationCounter = { + add: sinon.spy(), + }; + + fakeOperationLatency = { + record: sinon.spy(), + }; + + fakeGfeCounter = { + add: sinon.spy(), + }; + + fakeGfeLatency = { + record: sinon.spy(), + }; + + tracer = new MetricsTracer( + attributes, + fakeAttemptCounter, + fakeAttemptLatency, + fakeOperationCounter, + fakeOperationLatency, + fakeGfeCounter, + fakeGfeLatency, + true, // enabled + ); + }); + + describe('recordAttemptCompletion', () => { + it('should record attempt latency when enabled', () => { + tracer.recordAttemptStart(); + assert.ok(tracer.currentOperation.currentAttempt); + assert.ok(tracer.currentOperation.currentAttempt.startTime); + assert.strictEqual(tracer.currentOperation.attemptCount, 1); + + tracer.recordAttemptCompletion(Status.OK); + + assert.strictEqual(fakeAttemptLatency.record.calledOnce, true); + const [[latency, otelAttrs]] = fakeAttemptLatency.record.args; + assert.strictEqual(typeof latency, 'number'); + assert.strictEqual( + otelAttrs[Constants.MONITORED_RES_LABEL_KEY_PROJECT], + PROJECT_ID, + ); + assert.strictEqual( + otelAttrs[Constants.METRIC_LABEL_KEY_STATUS], + Status.OK.toString(), + ); + }); + + it('should do nothing if disabled', () => { + tracer.enabled = false; + tracer.recordAttemptStart(); + tracer.recordAttemptCompletion(Status.OK); + assert.strictEqual(fakeAttemptLatency.record.called, false); + }); + }); + + describe('recordOperationCompletion', () => { + it('should record operation and attempt metrics when enabled', () => { + tracer.recordOperationStart(); + assert.ok(tracer.currentOperation.startTime); + tracer.recordAttemptStart(); + tracer.recordOperationCompletion(); + + assert.strictEqual(fakeOperationCounter.add.calledOnce, true); + assert.strictEqual(fakeAttemptCounter.add.calledOnce, true); + assert.strictEqual(fakeOperationLatency.record.calledOnce, true); + + const [[_, opAttrs]] = fakeOperationLatency.record.args; + assert.strictEqual(opAttrs[Constants.METRIC_LABEL_KEY_STATUS], '-1'); + }); + + it('should do nothing if disabled', () => { + tracer.enabled = false; + tracer.recordOperationCompletion(); + assert.strictEqual(fakeOperationCounter.add.called, false); + assert.strictEqual(fakeOperationLatency.record.called, false); + }); + }); + + describe('recordGfeLatency', () => { + it('should record GFE latency if enabled', () => { + tracer.enabled = true; + tracer.recordGfeLatency(123); + assert.strictEqual(fakeGfeLatency.record.calledOnce, true); + }); + + it('should not record if disabled', () => { + tracer.enabled = false; + tracer.recordGfeLatency(123); + assert.strictEqual(fakeGfeLatency.record.called, false); + }); + }); + + describe('recordGfeConnectivityErrorCount', () => { + it('should increment GFE error counter if enabled', () => { + tracer.recordGfeConnectivityErrorCount(); + assert.strictEqual(fakeGfeCounter.add.calledOnce, true); + }); + + it('should not increment if disabled', () => { + tracer.enabled = false; + tracer.recordGfeConnectivityErrorCount(); + assert.strictEqual(fakeGfeCounter.add.called, false); + }); + }); + + it('should not overwrite project if already set', () => { + tracer.project = 'new-project'; + assert.strictEqual( + attributes[Constants.MONITORED_RES_LABEL_KEY_PROJECT], + PROJECT_ID, + ); + }); + + it('should set all other attribute setters', () => { + tracer.instance = 'test-instance'; + tracer.instanceConfig = 'config'; + tracer.location = 'us-central1'; + tracer.clientHash = 'hash123'; + tracer.clientUid = 'uid123'; + tracer.clientName = 'name123'; + tracer.database = 'db123'; + tracer.methodName = 'method'; + + assert.strictEqual( + attributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE], + 'test-instance', + ); + assert.strictEqual( + attributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG], + 'config', + ); + assert.strictEqual( + attributes[Constants.MONITORED_RES_LABEL_KEY_LOCATION], + 'us-central1', + ); + assert.strictEqual( + attributes[Constants.MONITORED_RES_LABEL_KEY_CLIENT_HASH], + 'hash123', + ); + assert.strictEqual( + attributes[Constants.METRIC_LABEL_KEY_CLIENT_UID], + 'uid123', + ); + assert.strictEqual( + attributes[Constants.METRIC_LABEL_KEY_CLIENT_NAME], + 'name123', + ); + assert.strictEqual( + attributes[Constants.METRIC_LABEL_KEY_DATABASE], + 'db123', + ); + assert.strictEqual(attributes[Constants.METRIC_LABEL_KEY_METHOD], 'method'); + }); +}); From e8cdbedd55f049b8c7766e97388ed045fedd1b4e Mon Sep 17 00:00:00 2001 From: surbhigarg92 Date: Fri, 13 Jun 2025 17:44:22 +0530 Subject: [PATCH 13/31] fix: Ensure context propagation works in Node.js 22 with async/await (#2326) --- src/instrument.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/instrument.ts b/src/instrument.ts index 9a07a8c0c..26f877a20 100644 --- a/src/instrument.ts +++ b/src/instrument.ts @@ -105,8 +105,9 @@ const { * associated in their respective hierarchies. */ function ensureInitialContextManagerSet() { - if (context.active() === ROOT_CONTEXT) { - // If no active context was set previously, trace context propagation cannot + if (!context['_contextManager'] || context.active() === ROOT_CONTEXT) { + // If no context manager is currently set, or if the active context is the ROOT_CONTEXT, + // trace context propagation cannot // function correctly with async/await for OpenTelemetry // See {@link https://opentelemetry.io/docs/languages/js/context/#active-context} context.disable(); // Disable any prior contextManager. From 88711dee7c730824dd0dfaa113e93d9fc847442a Mon Sep 17 00:00:00 2001 From: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Date: Wed, 18 Jun 2025 06:13:22 +0000 Subject: [PATCH 14/31] chore: add commit retry logic (#2322) * chore: add commit retry logic * refactor test --- src/transaction.ts | 15 ++++++++++- test/session-factory.ts | 22 +++++++++++++++ test/transaction.ts | 59 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 95 insertions(+), 1 deletion(-) diff --git a/src/transaction.ts b/src/transaction.ts index 1023df392..230d61395 100644 --- a/src/transaction.ts +++ b/src/transaction.ts @@ -229,7 +229,8 @@ export type CommitCallback = type PrecommitTokenProvider = | spannerClient.spanner.v1.ITransaction | spannerClient.spanner.v1.IPartialResultSet - | spannerClient.spanner.v1.IExecuteBatchDmlResponse; + | spannerClient.spanner.v1.IExecuteBatchDmlResponse + | spannerClient.spanner.v1.ICommitResponse; /** * @typedef {object} TimestampBounds @@ -1831,6 +1832,7 @@ export class Transaction extends Dml { commitTimestamp?: PreciseDate; commitTimestampProto?: spannerClient.protobuf.ITimestamp; private _queuedMutations: spannerClient.spanner.v1.Mutation[]; + private _retryCommit: Boolean; /** * Timestamp at which the transaction was committed. Will be populated once @@ -1884,6 +1886,7 @@ export class Transaction extends Dml { this._options = {readWrite: options}; this._options.isolationLevel = IsolationLevel.ISOLATION_LEVEL_UNSPECIFIED; this.requestOptions = requestOptions; + this._retryCommit = false; } /** @@ -2291,6 +2294,16 @@ export class Transaction extends Dml { err: null | Error, resp: spannerClient.spanner.v1.ICommitResponse, ) => { + if ( + resp && + 'MultiplexedSessionRetry' in resp && + !this._retryCommit + ) { + this._retryCommit = true; + this._updatePrecommitToken(resp); + return this.commit(options, callback); + } + this.end(); if (err) { diff --git a/test/session-factory.ts b/test/session-factory.ts index 130b78f59..39b4557e5 100644 --- a/test/session-factory.ts +++ b/test/session-factory.ts @@ -130,6 +130,10 @@ describe('SessionFactory', () => { process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'true'; }); + after(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + }); + it('should create a MultiplexedSession object', () => { assert( sessionFactory.multiplexedSession_ instanceof MultiplexedSession, @@ -171,6 +175,11 @@ describe('SessionFactory', () => { process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'true'; }); + after(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'false'; + }); + it('should correctly initialize the isMultiplexedRW field', () => { const sessionFactory = new SessionFactory(DATABASE, NAME, POOL_OPTIONS); assert.strictEqual(sessionFactory.isMultiplexedRW, true); @@ -213,6 +222,10 @@ describe('SessionFactory', () => { process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'true'; }); + after(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + }); + it('should return the multiplexed session', done => { ( sandbox.stub( @@ -283,6 +296,11 @@ describe('SessionFactory', () => { process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'true'; }); + after(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'false'; + }); + it('should return the multiplexed session', done => { ( sandbox.stub( @@ -330,6 +348,10 @@ describe('SessionFactory', () => { process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'true'; }); + after(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + }); + it('should not call the release method', () => { const releaseStub = sandbox.stub(sessionFactory.pool_, 'release'); const fakeMuxSession = createMuxSession(); diff --git a/test/transaction.ts b/test/transaction.ts index 8fbff822c..95c9883d3 100644 --- a/test/transaction.ts +++ b/test/transaction.ts @@ -1879,6 +1879,65 @@ describe('Transaction', () => { assert.strictEqual(transaction.commitTimestampProto, fakeTimestamp); }); + it('should retry commit only once upon sending precommitToken to read-only participants', () => { + const requestStub = sandbox.stub(transaction, 'request'); + + const expectedTimestamp = new PreciseDate(0); + const fakeTimestamp = {seconds: 0, nanos: 0}; + + const fakeResponse = {commitTimestamp: fakeTimestamp}; + const fakePrecommitToken = { + precommitToken: Buffer.from('precommit-token-commit'), + seqNum: 1, + }; + + transaction._latestPreCommitToken = fakePrecommitToken; + + // retry response on commit retry + const fakeCommitRetryResponse = { + commitTimestamp: null, + MultiplexedSessionRetry: 'precommitToken', + precommitToken: { + precommitToken: Buffer.from('precommit-token-commit-retry'), + seqNum: 2, + }, + }; + + requestStub.onFirstCall().callsFake((_, callback) => { + // assert that the transaction contains the precommit token + assert.deepStrictEqual( + transaction._latestPreCommitToken, + fakePrecommitToken, + ); + // retry commit response + callback(null, fakeCommitRetryResponse); + }); + + requestStub.onSecondCall().callsFake((_, callback) => { + // assert that before second commit retry the _latestPreCommitToken + // containing the commit retry reponse in the transaction object + assert.deepStrictEqual( + transaction._latestPreCommitToken, + fakeCommitRetryResponse.precommitToken, + ); + callback(null, fakeResponse); + }); + + transaction.commit((err, resp) => { + // assert there is no error + assert.ifError(err); + // make sure that retry happens only once + assert.strictEqual(requestStub.callCount, 2); + assert.deepStrictEqual( + transaction.commitTimestamp, + expectedTimestamp, + ); + assert.strictEqual(transaction.commitTimestampProto, fakeTimestamp); + // assert on the successfull commit response + assert.deepStrictEqual(resp, fakeResponse); + }); + }); + it('should return any errors and the response', () => { const requestStub = sandbox.stub(transaction, 'request'); From edaee7791b2d814f749ed35119dd705924984a78 Mon Sep 17 00:00:00 2001 From: surbhigarg92 Date: Tue, 24 Jun 2025 21:24:26 +0530 Subject: [PATCH 15/31] fix: pass the Span correctly (#2332) #2320 --- observability-test/spanner.ts | 3 ++- src/transaction.ts | 18 ++++++++++-------- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/observability-test/spanner.ts b/observability-test/spanner.ts index 113484623..95e93ffb8 100644 --- a/observability-test/spanner.ts +++ b/observability-test/spanner.ts @@ -652,6 +652,7 @@ describe('ObservabilityOptions injection and propagation', async () => { ...cacheSessionEvents, 'Using Session', 'Starting stream', + 'Transaction Creation Done', ]; assert.deepStrictEqual( actualEventNames, @@ -1729,8 +1730,8 @@ describe('Traces for ExecuteStream broken stream retries', () => { const expectedEventNames = [ ...batchCreateSessionsEvents, 'Starting stream', - ...waitingSessionsEvents, 'Transaction Creation Done', + ...waitingSessionsEvents, ]; assert.deepStrictEqual( actualEventNames, diff --git a/src/transaction.ts b/src/transaction.ts index 230d61395..90fdeaf80 100644 --- a/src/transaction.ts +++ b/src/transaction.ts @@ -33,7 +33,7 @@ import { } from './partial-result-stream'; import {Session} from './session'; import {Key} from './table'; -import {getActiveOrNoopSpan} from './instrument'; +import {Span} from './instrument'; import {google as spannerClient} from '../protos/protos'; import { NormalCallback, @@ -499,7 +499,7 @@ export class Snapshot extends EventEmitter { setSpanError(span, err); } else { this._updatePrecommitToken(resp); - this._update(resp); + this._update(resp, span); } span.end(); callback!(err, resp); @@ -802,7 +802,7 @@ export class Snapshot extends EventEmitter { ?.on('response', response => { this._updatePrecommitToken(response); if (response.metadata && response.metadata!.transaction && !this.id) { - this._update(response.metadata!.transaction); + this._update(response.metadata!.transaction, span); } }) .on('error', err => { @@ -1159,7 +1159,7 @@ export class Snapshot extends EventEmitter { if (response.metadata) { metadata = response.metadata; if (metadata.transaction && !this.id) { - this._update(metadata.transaction); + this._update(metadata.transaction, span); } } }) @@ -1406,7 +1406,7 @@ export class Snapshot extends EventEmitter { .on('response', response => { this._updatePrecommitToken(response); if (response.metadata && response.metadata!.transaction && !this.id) { - this._update(response.metadata!.transaction); + this._update(response.metadata!.transaction, span); } }) .on('error', err => { @@ -1617,13 +1617,15 @@ export class Snapshot extends EventEmitter { * * @param {spannerClient.spanner.v1.ITransaction} resp Response object. */ - protected _update(resp: spannerClient.spanner.v1.ITransaction): void { + protected _update( + resp: spannerClient.spanner.v1.ITransaction, + span: Span, + ): void { const {id, readTimestamp} = resp; this.id = id!; this.metadata = resp; - const span = getActiveOrNoopSpan(); span.addEvent('Transaction Creation Done', {id: this.id.toString()}); if (readTimestamp) { @@ -2071,7 +2073,7 @@ export class Transaction extends Dml { const {resultSets, status} = resp; for (const resultSet of resultSets) { if (!this.id && resultSet.metadata?.transaction) { - this._update(resultSet.metadata.transaction); + this._update(resultSet.metadata.transaction, span); } } const rowCounts: number[] = resultSets.map(({stats}) => { From 93de2af919df0f00d90bc46392e8427b7648f20a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Jun 2025 15:52:16 +0530 Subject: [PATCH 16/31] chore(owlbot-nodejs): install 3.13.5 Python (#2334) * chore: install higher version of Python * chore: update to python 3.15 * update lagging dependency * fix vulnerability * change the version Source-Link: https://github.com/googleapis/synthtool/commit/ca4c7ce65c001886c12b1c9b4ee216a7a1b807d2 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:6062c519ce78ee08490e7ac7330eca80f00f139ef1a241c5c2b306550b60c728 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/workflows/issues-no-repro.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 604433423..4b14618e1 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:c7e4968cfc97a204a4b2381f3ecb55cabc40c4cccf88b1ef8bef0d976be87fee -# created: 2025-04-08T17:33:08.498793944Z + digest: sha256:6062c519ce78ee08490e7ac7330eca80f00f139ef1a241c5c2b306550b60c728 +# created: 2025-06-26T22:34:58.583582089Z diff --git a/.github/workflows/issues-no-repro.yaml b/.github/workflows/issues-no-repro.yaml index 9b2f70148..816d9a709 100644 --- a/.github/workflows/issues-no-repro.yaml +++ b/.github/workflows/issues-no-repro.yaml @@ -11,7 +11,7 @@ jobs: pull-requests: write steps: - uses: actions/checkout@v4 - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: 18 - run: npm install From 6428bcd2980852c1bdbc4c3d0ab210a139e5f193 Mon Sep 17 00:00:00 2001 From: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Date: Tue, 1 Jul 2025 07:08:44 +0000 Subject: [PATCH 17/31] fix: unhandled exceptions from gax (#2338) --- src/index.ts | 8 ++++++-- test/index.ts | 16 ++++++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/src/index.ts b/src/index.ts index 39b4b437f..fd28a5699 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1523,8 +1523,12 @@ class Spanner extends GrpcService { return; } const clientName = config.client; - if (!this.clients_.has(clientName)) { - this.clients_.set(clientName, new v1[clientName](this.options)); + try { + if (!this.clients_.has(clientName)) { + this.clients_.set(clientName, new v1[clientName](this.options)); + } + } catch (err) { + callback(err, null); } const gaxClient = this.clients_.get(clientName)!; let reqOpts = extend(true, {}, config.reqOpts); diff --git a/test/index.ts b/test/index.ts index 4be0abfaf..bd1f9b372 100644 --- a/test/index.ts +++ b/test/index.ts @@ -2000,6 +2000,22 @@ describe('Spanner', () => { }); }); + it('should be able to catch any error from google-gax-library', done => { + const error = new Error('Error.'); + + fakeV1[CONFIG.client] = class { + constructor(options) { + assert.strictEqual(options, spanner.options); + throw error; + } + }; + + spanner.prepareGapicRequest_(CONFIG, err => { + assert.strictEqual(err, error); + done(); + }); + }); + it('should create and cache a gapic client', done => { fakeV1[CONFIG.client] = class { constructor(options) { From 2a6af4c36484f44929a2fac80d8f225dad5d702c Mon Sep 17 00:00:00 2001 From: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Date: Wed, 2 Jul 2025 10:04:06 +0000 Subject: [PATCH 18/31] fix: system test against emulator (#2339) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: system test against emulator * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- protos/protos.d.ts | 570 +++++++++++- protos/protos.js | 1980 +++++++++++++++++++++++++++++++++++++++- protos/protos.json | 263 +++++- system-test/spanner.ts | 29 +- 4 files changed, 2773 insertions(+), 69 deletions(-) diff --git a/protos/protos.d.ts b/protos/protos.d.ts index 8eaff7767..5dbcac381 100644 --- a/protos/protos.d.ts +++ b/protos/protos.d.ts @@ -223,6 +223,7 @@ export namespace google { /** Edition enum. */ enum Edition { EDITION_UNKNOWN = 0, + EDITION_LEGACY = 900, EDITION_PROTO2 = 998, EDITION_PROTO3 = 999, EDITION_2023 = 1000, @@ -253,6 +254,9 @@ export namespace google { /** FileDescriptorProto weakDependency */ weakDependency?: (number[]|null); + /** FileDescriptorProto optionDependency */ + optionDependency?: (string[]|null); + /** FileDescriptorProto messageType */ messageType?: (google.protobuf.IDescriptorProto[]|null); @@ -302,6 +306,9 @@ export namespace google { /** FileDescriptorProto weakDependency. */ public weakDependency: number[]; + /** FileDescriptorProto optionDependency. */ + public optionDependency: string[]; + /** FileDescriptorProto messageType. */ public messageType: google.protobuf.IDescriptorProto[]; @@ -436,6 +443,9 @@ export namespace google { /** DescriptorProto reservedName */ reservedName?: (string[]|null); + + /** DescriptorProto visibility */ + visibility?: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility|null); } /** Represents a DescriptorProto. */ @@ -477,6 +487,9 @@ export namespace google { /** DescriptorProto reservedName. */ public reservedName: string[]; + /** DescriptorProto visibility. */ + public visibility: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility); + /** * Creates a new DescriptorProto instance using the specified properties. * @param [properties] Properties to set @@ -1324,6 +1337,9 @@ export namespace google { /** EnumDescriptorProto reservedName */ reservedName?: (string[]|null); + + /** EnumDescriptorProto visibility */ + visibility?: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility|null); } /** Represents an EnumDescriptorProto. */ @@ -1350,6 +1366,9 @@ export namespace google { /** EnumDescriptorProto reservedName. */ public reservedName: string[]; + /** EnumDescriptorProto visibility. */ + public visibility: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility); + /** * Creates a new EnumDescriptorProto instance using the specified properties. * @param [properties] Properties to set @@ -2284,6 +2303,9 @@ export namespace google { /** FieldOptions features */ features?: (google.protobuf.IFeatureSet|null); + /** FieldOptions featureSupport */ + featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** FieldOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); @@ -2339,6 +2361,9 @@ export namespace google { /** FieldOptions features. */ public features?: (google.protobuf.IFeatureSet|null); + /** FieldOptions featureSupport. */ + public featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** FieldOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -2559,6 +2584,121 @@ export namespace google { */ public static getTypeUrl(typeUrlPrefix?: string): string; } + + /** Properties of a FeatureSupport. */ + interface IFeatureSupport { + + /** FeatureSupport editionIntroduced */ + editionIntroduced?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + + /** FeatureSupport editionDeprecated */ + editionDeprecated?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + + /** FeatureSupport deprecationWarning */ + deprecationWarning?: (string|null); + + /** FeatureSupport editionRemoved */ + editionRemoved?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); + } + + /** Represents a FeatureSupport. */ + class FeatureSupport implements IFeatureSupport { + + /** + * Constructs a new FeatureSupport. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.FieldOptions.IFeatureSupport); + + /** FeatureSupport editionIntroduced. */ + public editionIntroduced: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** FeatureSupport editionDeprecated. */ + public editionDeprecated: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** FeatureSupport deprecationWarning. */ + public deprecationWarning: string; + + /** FeatureSupport editionRemoved. */ + public editionRemoved: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); + + /** + * Creates a new FeatureSupport instance using the specified properties. + * @param [properties] Properties to set + * @returns FeatureSupport instance + */ + public static create(properties?: google.protobuf.FieldOptions.IFeatureSupport): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Encodes the specified FeatureSupport message. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @param message FeatureSupport message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.FieldOptions.IFeatureSupport, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FeatureSupport message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @param message FeatureSupport message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.FieldOptions.IFeatureSupport, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Verifies a FeatureSupport message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FeatureSupport message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FeatureSupport + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions.FeatureSupport; + + /** + * Creates a plain object from a FeatureSupport message. Also converts values to other types if specified. + * @param message FeatureSupport + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FieldOptions.FeatureSupport, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FeatureSupport to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FeatureSupport + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } } /** Properties of an OneofOptions. */ @@ -2797,6 +2937,9 @@ export namespace google { /** EnumValueOptions debugRedact */ debugRedact?: (boolean|null); + /** EnumValueOptions featureSupport */ + featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** EnumValueOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); } @@ -2819,6 +2962,9 @@ export namespace google { /** EnumValueOptions debugRedact. */ public debugRedact: boolean; + /** EnumValueOptions featureSupport. */ + public featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); + /** EnumValueOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -3411,6 +3557,12 @@ export namespace google { /** FeatureSet jsonFormat */ jsonFormat?: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat|null); + + /** FeatureSet enforceNamingStyle */ + enforceNamingStyle?: (google.protobuf.FeatureSet.EnforceNamingStyle|keyof typeof google.protobuf.FeatureSet.EnforceNamingStyle|null); + + /** FeatureSet defaultSymbolVisibility */ + defaultSymbolVisibility?: (google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|keyof typeof google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|null); } /** Represents a FeatureSet. */ @@ -3440,6 +3592,12 @@ export namespace google { /** FeatureSet jsonFormat. */ public jsonFormat: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat); + /** FeatureSet enforceNamingStyle. */ + public enforceNamingStyle: (google.protobuf.FeatureSet.EnforceNamingStyle|keyof typeof google.protobuf.FeatureSet.EnforceNamingStyle); + + /** FeatureSet defaultSymbolVisibility. */ + public defaultSymbolVisibility: (google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|keyof typeof google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility); + /** * Creates a new FeatureSet instance using the specified properties. * @param [properties] Properties to set @@ -3562,6 +3720,116 @@ export namespace google { ALLOW = 1, LEGACY_BEST_EFFORT = 2 } + + /** EnforceNamingStyle enum. */ + enum EnforceNamingStyle { + ENFORCE_NAMING_STYLE_UNKNOWN = 0, + STYLE2024 = 1, + STYLE_LEGACY = 2 + } + + /** Properties of a VisibilityFeature. */ + interface IVisibilityFeature { + } + + /** Represents a VisibilityFeature. */ + class VisibilityFeature implements IVisibilityFeature { + + /** + * Constructs a new VisibilityFeature. + * @param [properties] Properties to set + */ + constructor(properties?: google.protobuf.FeatureSet.IVisibilityFeature); + + /** + * Creates a new VisibilityFeature instance using the specified properties. + * @param [properties] Properties to set + * @returns VisibilityFeature instance + */ + public static create(properties?: google.protobuf.FeatureSet.IVisibilityFeature): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Encodes the specified VisibilityFeature message. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @param message VisibilityFeature message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.protobuf.FeatureSet.IVisibilityFeature, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified VisibilityFeature message, length delimited. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @param message VisibilityFeature message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.protobuf.FeatureSet.IVisibilityFeature, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Verifies a VisibilityFeature message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a VisibilityFeature message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns VisibilityFeature + */ + public static fromObject(object: { [k: string]: any }): google.protobuf.FeatureSet.VisibilityFeature; + + /** + * Creates a plain object from a VisibilityFeature message. Also converts values to other types if specified. + * @param message VisibilityFeature + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.protobuf.FeatureSet.VisibilityFeature, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this VisibilityFeature to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for VisibilityFeature + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + namespace VisibilityFeature { + + /** DefaultSymbolVisibility enum. */ + enum DefaultSymbolVisibility { + DEFAULT_SYMBOL_VISIBILITY_UNKNOWN = 0, + EXPORT_ALL = 1, + EXPORT_TOP_LEVEL = 2, + LOCAL_ALL = 3, + STRICT = 4 + } + } } /** Properties of a FeatureSetDefaults. */ @@ -3681,8 +3949,11 @@ export namespace google { /** FeatureSetEditionDefault edition */ edition?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); - /** FeatureSetEditionDefault features */ - features?: (google.protobuf.IFeatureSet|null); + /** FeatureSetEditionDefault overridableFeatures */ + overridableFeatures?: (google.protobuf.IFeatureSet|null); + + /** FeatureSetEditionDefault fixedFeatures */ + fixedFeatures?: (google.protobuf.IFeatureSet|null); } /** Represents a FeatureSetEditionDefault. */ @@ -3697,8 +3968,11 @@ export namespace google { /** FeatureSetEditionDefault edition. */ public edition: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); - /** FeatureSetEditionDefault features. */ - public features?: (google.protobuf.IFeatureSet|null); + /** FeatureSetEditionDefault overridableFeatures. */ + public overridableFeatures?: (google.protobuf.IFeatureSet|null); + + /** FeatureSetEditionDefault fixedFeatures. */ + public fixedFeatures?: (google.protobuf.IFeatureSet|null); /** * Creates a new FeatureSetEditionDefault instance using the specified properties. @@ -4231,6 +4505,13 @@ export namespace google { } } + /** SymbolVisibility enum. */ + enum SymbolVisibility { + VISIBILITY_UNSET = 0, + VISIBILITY_LOCAL = 1, + VISIBILITY_EXPORT = 2 + } + /** Properties of an Any. */ interface IAny { @@ -5374,6 +5655,24 @@ export namespace google { /** Violation description */ description?: (string|null); + + /** Violation apiService */ + apiService?: (string|null); + + /** Violation quotaMetric */ + quotaMetric?: (string|null); + + /** Violation quotaId */ + quotaId?: (string|null); + + /** Violation quotaDimensions */ + quotaDimensions?: ({ [k: string]: string }|null); + + /** Violation quotaValue */ + quotaValue?: (number|Long|string|null); + + /** Violation futureQuotaValue */ + futureQuotaValue?: (number|Long|string|null); } /** Represents a Violation. */ @@ -5391,6 +5690,24 @@ export namespace google { /** Violation description. */ public description: string; + /** Violation apiService. */ + public apiService: string; + + /** Violation quotaMetric. */ + public quotaMetric: string; + + /** Violation quotaId. */ + public quotaId: string; + + /** Violation quotaDimensions. */ + public quotaDimensions: { [k: string]: string }; + + /** Violation quotaValue. */ + public quotaValue: (number|Long|string); + + /** Violation futureQuotaValue. */ + public futureQuotaValue?: (number|Long|string|null); + /** * Creates a new Violation instance using the specified properties. * @param [properties] Properties to set @@ -5786,6 +6103,12 @@ export namespace google { /** FieldViolation description */ description?: (string|null); + + /** FieldViolation reason */ + reason?: (string|null); + + /** FieldViolation localizedMessage */ + localizedMessage?: (google.rpc.ILocalizedMessage|null); } /** Represents a FieldViolation. */ @@ -5803,6 +6126,12 @@ export namespace google { /** FieldViolation description. */ public description: string; + /** FieldViolation reason. */ + public reason: string; + + /** FieldViolation localizedMessage. */ + public localizedMessage?: (google.rpc.ILocalizedMessage|null); + /** * Creates a new FieldViolation instance using the specified properties. * @param [properties] Properties to set @@ -37036,6 +37365,9 @@ export namespace google { /** CommonLanguageSettings destinations */ destinations?: (google.api.ClientLibraryDestination[]|null); + + /** CommonLanguageSettings selectiveGapicGeneration */ + selectiveGapicGeneration?: (google.api.ISelectiveGapicGeneration|null); } /** Represents a CommonLanguageSettings. */ @@ -37053,6 +37385,9 @@ export namespace google { /** CommonLanguageSettings destinations. */ public destinations: google.api.ClientLibraryDestination[]; + /** CommonLanguageSettings selectiveGapicGeneration. */ + public selectiveGapicGeneration?: (google.api.ISelectiveGapicGeneration|null); + /** * Creates a new CommonLanguageSettings instance using the specified properties. * @param [properties] Properties to set @@ -37753,6 +38088,9 @@ export namespace google { /** PythonSettings common */ common?: (google.api.ICommonLanguageSettings|null); + + /** PythonSettings experimentalFeatures */ + experimentalFeatures?: (google.api.PythonSettings.IExperimentalFeatures|null); } /** Represents a PythonSettings. */ @@ -37767,6 +38105,9 @@ export namespace google { /** PythonSettings common. */ public common?: (google.api.ICommonLanguageSettings|null); + /** PythonSettings experimentalFeatures. */ + public experimentalFeatures?: (google.api.PythonSettings.IExperimentalFeatures|null); + /** * Creates a new PythonSettings instance using the specified properties. * @param [properties] Properties to set @@ -37845,6 +38186,118 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + namespace PythonSettings { + + /** Properties of an ExperimentalFeatures. */ + interface IExperimentalFeatures { + + /** ExperimentalFeatures restAsyncIoEnabled */ + restAsyncIoEnabled?: (boolean|null); + + /** ExperimentalFeatures protobufPythonicTypesEnabled */ + protobufPythonicTypesEnabled?: (boolean|null); + + /** ExperimentalFeatures unversionedPackageDisabled */ + unversionedPackageDisabled?: (boolean|null); + } + + /** Represents an ExperimentalFeatures. */ + class ExperimentalFeatures implements IExperimentalFeatures { + + /** + * Constructs a new ExperimentalFeatures. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.PythonSettings.IExperimentalFeatures); + + /** ExperimentalFeatures restAsyncIoEnabled. */ + public restAsyncIoEnabled: boolean; + + /** ExperimentalFeatures protobufPythonicTypesEnabled. */ + public protobufPythonicTypesEnabled: boolean; + + /** ExperimentalFeatures unversionedPackageDisabled. */ + public unversionedPackageDisabled: boolean; + + /** + * Creates a new ExperimentalFeatures instance using the specified properties. + * @param [properties] Properties to set + * @returns ExperimentalFeatures instance + */ + public static create(properties?: google.api.PythonSettings.IExperimentalFeatures): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Encodes the specified ExperimentalFeatures message. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @param message ExperimentalFeatures message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.PythonSettings.IExperimentalFeatures, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified ExperimentalFeatures message, length delimited. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @param message ExperimentalFeatures message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.PythonSettings.IExperimentalFeatures, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Verifies an ExperimentalFeatures message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an ExperimentalFeatures message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns ExperimentalFeatures + */ + public static fromObject(object: { [k: string]: any }): google.api.PythonSettings.ExperimentalFeatures; + + /** + * Creates a plain object from an ExperimentalFeatures message. Also converts values to other types if specified. + * @param message ExperimentalFeatures + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.PythonSettings.ExperimentalFeatures, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this ExperimentalFeatures to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for ExperimentalFeatures + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + /** Properties of a NodeSettings. */ interface INodeSettings { @@ -38171,6 +38624,9 @@ export namespace google { /** GoSettings common */ common?: (google.api.ICommonLanguageSettings|null); + + /** GoSettings renamedServices */ + renamedServices?: ({ [k: string]: string }|null); } /** Represents a GoSettings. */ @@ -38185,6 +38641,9 @@ export namespace google { /** GoSettings common. */ public common?: (google.api.ICommonLanguageSettings|null); + /** GoSettings renamedServices. */ + public renamedServices: { [k: string]: string }; + /** * Creates a new GoSettings instance using the specified properties. * @param [properties] Properties to set @@ -38509,6 +38968,109 @@ export namespace google { PACKAGE_MANAGER = 20 } + /** Properties of a SelectiveGapicGeneration. */ + interface ISelectiveGapicGeneration { + + /** SelectiveGapicGeneration methods */ + methods?: (string[]|null); + + /** SelectiveGapicGeneration generateOmittedAsInternal */ + generateOmittedAsInternal?: (boolean|null); + } + + /** Represents a SelectiveGapicGeneration. */ + class SelectiveGapicGeneration implements ISelectiveGapicGeneration { + + /** + * Constructs a new SelectiveGapicGeneration. + * @param [properties] Properties to set + */ + constructor(properties?: google.api.ISelectiveGapicGeneration); + + /** SelectiveGapicGeneration methods. */ + public methods: string[]; + + /** SelectiveGapicGeneration generateOmittedAsInternal. */ + public generateOmittedAsInternal: boolean; + + /** + * Creates a new SelectiveGapicGeneration instance using the specified properties. + * @param [properties] Properties to set + * @returns SelectiveGapicGeneration instance + */ + public static create(properties?: google.api.ISelectiveGapicGeneration): google.api.SelectiveGapicGeneration; + + /** + * Encodes the specified SelectiveGapicGeneration message. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @param message SelectiveGapicGeneration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.api.ISelectiveGapicGeneration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified SelectiveGapicGeneration message, length delimited. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @param message SelectiveGapicGeneration message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.api.ISelectiveGapicGeneration, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.SelectiveGapicGeneration; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.SelectiveGapicGeneration; + + /** + * Verifies a SelectiveGapicGeneration message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a SelectiveGapicGeneration message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns SelectiveGapicGeneration + */ + public static fromObject(object: { [k: string]: any }): google.api.SelectiveGapicGeneration; + + /** + * Creates a plain object from a SelectiveGapicGeneration message. Also converts values to other types if specified. + * @param message SelectiveGapicGeneration + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.api.SelectiveGapicGeneration, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this SelectiveGapicGeneration to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for SelectiveGapicGeneration + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + /** LaunchStage enum. */ enum LaunchStage { LAUNCH_STAGE_UNSPECIFIED = 0, diff --git a/protos/protos.js b/protos/protos.js index 89afc55e6..31694e130 100644 --- a/protos/protos.js +++ b/protos/protos.js @@ -522,6 +522,7 @@ * @name google.protobuf.Edition * @enum {number} * @property {number} EDITION_UNKNOWN=0 EDITION_UNKNOWN value + * @property {number} EDITION_LEGACY=900 EDITION_LEGACY value * @property {number} EDITION_PROTO2=998 EDITION_PROTO2 value * @property {number} EDITION_PROTO3=999 EDITION_PROTO3 value * @property {number} EDITION_2023=1000 EDITION_2023 value @@ -536,6 +537,7 @@ protobuf.Edition = (function() { var valuesById = {}, values = Object.create(valuesById); values[valuesById[0] = "EDITION_UNKNOWN"] = 0; + values[valuesById[900] = "EDITION_LEGACY"] = 900; values[valuesById[998] = "EDITION_PROTO2"] = 998; values[valuesById[999] = "EDITION_PROTO3"] = 999; values[valuesById[1000] = "EDITION_2023"] = 1000; @@ -560,6 +562,7 @@ * @property {Array.|null} [dependency] FileDescriptorProto dependency * @property {Array.|null} [publicDependency] FileDescriptorProto publicDependency * @property {Array.|null} [weakDependency] FileDescriptorProto weakDependency + * @property {Array.|null} [optionDependency] FileDescriptorProto optionDependency * @property {Array.|null} [messageType] FileDescriptorProto messageType * @property {Array.|null} [enumType] FileDescriptorProto enumType * @property {Array.|null} [service] FileDescriptorProto service @@ -582,6 +585,7 @@ this.dependency = []; this.publicDependency = []; this.weakDependency = []; + this.optionDependency = []; this.messageType = []; this.enumType = []; this.service = []; @@ -632,6 +636,14 @@ */ FileDescriptorProto.prototype.weakDependency = $util.emptyArray; + /** + * FileDescriptorProto optionDependency. + * @member {Array.} optionDependency + * @memberof google.protobuf.FileDescriptorProto + * @instance + */ + FileDescriptorProto.prototype.optionDependency = $util.emptyArray; + /** * FileDescriptorProto messageType. * @member {Array.} messageType @@ -753,6 +765,9 @@ writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) writer.uint32(/* id 14, wireType 0 =*/112).int32(message.edition); + if (message.optionDependency != null && message.optionDependency.length) + for (var i = 0; i < message.optionDependency.length; ++i) + writer.uint32(/* id 15, wireType 2 =*/122).string(message.optionDependency[i]); return writer; }; @@ -825,6 +840,12 @@ message.weakDependency.push(reader.int32()); break; } + case 15: { + if (!(message.optionDependency && message.optionDependency.length)) + message.optionDependency = []; + message.optionDependency.push(reader.string()); + break; + } case 4: { if (!(message.messageType && message.messageType.length)) message.messageType = []; @@ -927,6 +948,13 @@ if (!$util.isInteger(message.weakDependency[i])) return "weakDependency: integer[] expected"; } + if (message.optionDependency != null && message.hasOwnProperty("optionDependency")) { + if (!Array.isArray(message.optionDependency)) + return "optionDependency: array expected"; + for (var i = 0; i < message.optionDependency.length; ++i) + if (!$util.isString(message.optionDependency[i])) + return "optionDependency: string[] expected"; + } if (message.messageType != null && message.hasOwnProperty("messageType")) { if (!Array.isArray(message.messageType)) return "messageType: array expected"; @@ -981,6 +1009,7 @@ default: return "edition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -1033,6 +1062,13 @@ for (var i = 0; i < object.weakDependency.length; ++i) message.weakDependency[i] = object.weakDependency[i] | 0; } + if (object.optionDependency) { + if (!Array.isArray(object.optionDependency)) + throw TypeError(".google.protobuf.FileDescriptorProto.optionDependency: array expected"); + message.optionDependency = []; + for (var i = 0; i < object.optionDependency.length; ++i) + message.optionDependency[i] = String(object.optionDependency[i]); + } if (object.messageType) { if (!Array.isArray(object.messageType)) throw TypeError(".google.protobuf.FileDescriptorProto.messageType: array expected"); @@ -1096,6 +1132,10 @@ case 0: message.edition = 0; break; + case "EDITION_LEGACY": + case 900: + message.edition = 900; + break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -1161,6 +1201,7 @@ object.extension = []; object.publicDependency = []; object.weakDependency = []; + object.optionDependency = []; } if (options.defaults) { object.name = ""; @@ -1217,6 +1258,11 @@ object.syntax = message.syntax; if (message.edition != null && message.hasOwnProperty("edition")) object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; + if (message.optionDependency && message.optionDependency.length) { + object.optionDependency = []; + for (var j = 0; j < message.optionDependency.length; ++j) + object.optionDependency[j] = message.optionDependency[j]; + } return object; }; @@ -1265,6 +1311,7 @@ * @property {google.protobuf.IMessageOptions|null} [options] DescriptorProto options * @property {Array.|null} [reservedRange] DescriptorProto reservedRange * @property {Array.|null} [reservedName] DescriptorProto reservedName + * @property {google.protobuf.SymbolVisibility|null} [visibility] DescriptorProto visibility */ /** @@ -1370,6 +1417,14 @@ */ DescriptorProto.prototype.reservedName = $util.emptyArray; + /** + * DescriptorProto visibility. + * @member {google.protobuf.SymbolVisibility} visibility + * @memberof google.protobuf.DescriptorProto + * @instance + */ + DescriptorProto.prototype.visibility = 0; + /** * Creates a new DescriptorProto instance using the specified properties. * @function create @@ -1422,6 +1477,8 @@ if (message.reservedName != null && message.reservedName.length) for (var i = 0; i < message.reservedName.length; ++i) writer.uint32(/* id 10, wireType 2 =*/82).string(message.reservedName[i]); + if (message.visibility != null && Object.hasOwnProperty.call(message, "visibility")) + writer.uint32(/* id 11, wireType 0 =*/88).int32(message.visibility); return writer; }; @@ -1514,6 +1571,10 @@ message.reservedName.push(reader.string()); break; } + case 11: { + message.visibility = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -1627,6 +1688,15 @@ if (!$util.isString(message.reservedName[i])) return "reservedName: string[] expected"; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + switch (message.visibility) { + default: + return "visibility: enum value expected"; + case 0: + case 1: + case 2: + break; + } return null; }; @@ -1726,6 +1796,26 @@ for (var i = 0; i < object.reservedName.length; ++i) message.reservedName[i] = String(object.reservedName[i]); } + switch (object.visibility) { + default: + if (typeof object.visibility === "number") { + message.visibility = object.visibility; + break; + } + break; + case "VISIBILITY_UNSET": + case 0: + message.visibility = 0; + break; + case "VISIBILITY_LOCAL": + case 1: + message.visibility = 1; + break; + case "VISIBILITY_EXPORT": + case 2: + message.visibility = 2; + break; + } return message; }; @@ -1755,6 +1845,7 @@ if (options.defaults) { object.name = ""; object.options = null; + object.visibility = options.enums === String ? "VISIBILITY_UNSET" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -1800,6 +1891,8 @@ for (var j = 0; j < message.reservedName.length; ++j) object.reservedName[j] = message.reservedName[j]; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + object.visibility = options.enums === String ? $root.google.protobuf.SymbolVisibility[message.visibility] === undefined ? message.visibility : $root.google.protobuf.SymbolVisibility[message.visibility] : message.visibility; return object; }; @@ -3844,6 +3937,7 @@ * @property {google.protobuf.IEnumOptions|null} [options] EnumDescriptorProto options * @property {Array.|null} [reservedRange] EnumDescriptorProto reservedRange * @property {Array.|null} [reservedName] EnumDescriptorProto reservedName + * @property {google.protobuf.SymbolVisibility|null} [visibility] EnumDescriptorProto visibility */ /** @@ -3904,6 +3998,14 @@ */ EnumDescriptorProto.prototype.reservedName = $util.emptyArray; + /** + * EnumDescriptorProto visibility. + * @member {google.protobuf.SymbolVisibility} visibility + * @memberof google.protobuf.EnumDescriptorProto + * @instance + */ + EnumDescriptorProto.prototype.visibility = 0; + /** * Creates a new EnumDescriptorProto instance using the specified properties. * @function create @@ -3941,6 +4043,8 @@ if (message.reservedName != null && message.reservedName.length) for (var i = 0; i < message.reservedName.length; ++i) writer.uint32(/* id 5, wireType 2 =*/42).string(message.reservedName[i]); + if (message.visibility != null && Object.hasOwnProperty.call(message, "visibility")) + writer.uint32(/* id 6, wireType 0 =*/48).int32(message.visibility); return writer; }; @@ -4003,6 +4107,10 @@ message.reservedName.push(reader.string()); break; } + case 6: { + message.visibility = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -4071,6 +4179,15 @@ if (!$util.isString(message.reservedName[i])) return "reservedName: string[] expected"; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + switch (message.visibility) { + default: + return "visibility: enum value expected"; + case 0: + case 1: + case 2: + break; + } return null; }; @@ -4120,6 +4237,26 @@ for (var i = 0; i < object.reservedName.length; ++i) message.reservedName[i] = String(object.reservedName[i]); } + switch (object.visibility) { + default: + if (typeof object.visibility === "number") { + message.visibility = object.visibility; + break; + } + break; + case "VISIBILITY_UNSET": + case 0: + message.visibility = 0; + break; + case "VISIBILITY_LOCAL": + case 1: + message.visibility = 1; + break; + case "VISIBILITY_EXPORT": + case 2: + message.visibility = 2; + break; + } return message; }; @@ -4144,6 +4281,7 @@ if (options.defaults) { object.name = ""; object.options = null; + object.visibility = options.enums === String ? "VISIBILITY_UNSET" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -4164,6 +4302,8 @@ for (var j = 0; j < message.reservedName.length; ++j) object.reservedName[j] = message.reservedName[j]; } + if (message.visibility != null && message.hasOwnProperty("visibility")) + object.visibility = options.enums === String ? $root.google.protobuf.SymbolVisibility[message.visibility] === undefined ? message.visibility : $root.google.protobuf.SymbolVisibility[message.visibility] : message.visibility; return object; }; @@ -6482,6 +6622,7 @@ * @property {Array.|null} [targets] FieldOptions targets * @property {Array.|null} [editionDefaults] FieldOptions editionDefaults * @property {google.protobuf.IFeatureSet|null} [features] FieldOptions features + * @property {google.protobuf.FieldOptions.IFeatureSupport|null} [featureSupport] FieldOptions featureSupport * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior * @property {google.api.IResourceReference|null} [".google.api.resourceReference"] FieldOptions .google.api.resourceReference @@ -6602,6 +6743,14 @@ */ FieldOptions.prototype.features = null; + /** + * FieldOptions featureSupport. + * @member {google.protobuf.FieldOptions.IFeatureSupport|null|undefined} featureSupport + * @memberof google.protobuf.FieldOptions + * @instance + */ + FieldOptions.prototype.featureSupport = null; + /** * FieldOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -6676,6 +6825,8 @@ $root.google.protobuf.FieldOptions.EditionDefault.encode(message.editionDefaults[i], writer.uint32(/* id 20, wireType 2 =*/162).fork()).ldelim(); if (message.features != null && Object.hasOwnProperty.call(message, "features")) $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 21, wireType 2 =*/170).fork()).ldelim(); + if (message.featureSupport != null && Object.hasOwnProperty.call(message, "featureSupport")) + $root.google.protobuf.FieldOptions.FeatureSupport.encode(message.featureSupport, writer.uint32(/* id 22, wireType 2 =*/178).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -6777,6 +6928,10 @@ message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); break; } + case 22: { + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.decode(reader, reader.uint32()); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -6912,6 +7067,11 @@ if (error) return "features." + error; } + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) { + var error = $root.google.protobuf.FieldOptions.FeatureSupport.verify(message.featureSupport); + if (error) + return "featureSupport." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -7100,6 +7260,11 @@ throw TypeError(".google.protobuf.FieldOptions.features: object expected"); message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); } + if (object.featureSupport != null) { + if (typeof object.featureSupport !== "object") + throw TypeError(".google.protobuf.FieldOptions.featureSupport: object expected"); + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.fromObject(object.featureSupport); + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: array expected"); @@ -7197,6 +7362,7 @@ object.debugRedact = false; object.retention = options.enums === String ? "RETENTION_UNKNOWN" : 0; object.features = null; + object.featureSupport = null; object[".google.api.resourceReference"] = null; } if (message.ctype != null && message.hasOwnProperty("ctype")) @@ -7229,6 +7395,8 @@ } if (message.features != null && message.hasOwnProperty("features")) object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) + object.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.toObject(message.featureSupport, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -7501,6 +7669,7 @@ default: return "edition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -7542,6 +7711,10 @@ case 0: message.edition = 0; break; + case "EDITION_LEGACY": + case 900: + message.edition = 900; + break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -7641,6 +7814,488 @@ return EditionDefault; })(); + FieldOptions.FeatureSupport = (function() { + + /** + * Properties of a FeatureSupport. + * @memberof google.protobuf.FieldOptions + * @interface IFeatureSupport + * @property {google.protobuf.Edition|null} [editionIntroduced] FeatureSupport editionIntroduced + * @property {google.protobuf.Edition|null} [editionDeprecated] FeatureSupport editionDeprecated + * @property {string|null} [deprecationWarning] FeatureSupport deprecationWarning + * @property {google.protobuf.Edition|null} [editionRemoved] FeatureSupport editionRemoved + */ + + /** + * Constructs a new FeatureSupport. + * @memberof google.protobuf.FieldOptions + * @classdesc Represents a FeatureSupport. + * @implements IFeatureSupport + * @constructor + * @param {google.protobuf.FieldOptions.IFeatureSupport=} [properties] Properties to set + */ + function FeatureSupport(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FeatureSupport editionIntroduced. + * @member {google.protobuf.Edition} editionIntroduced + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.editionIntroduced = 0; + + /** + * FeatureSupport editionDeprecated. + * @member {google.protobuf.Edition} editionDeprecated + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.editionDeprecated = 0; + + /** + * FeatureSupport deprecationWarning. + * @member {string} deprecationWarning + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.deprecationWarning = ""; + + /** + * FeatureSupport editionRemoved. + * @member {google.protobuf.Edition} editionRemoved + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + */ + FeatureSupport.prototype.editionRemoved = 0; + + /** + * Creates a new FeatureSupport instance using the specified properties. + * @function create + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {google.protobuf.FieldOptions.IFeatureSupport=} [properties] Properties to set + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport instance + */ + FeatureSupport.create = function create(properties) { + return new FeatureSupport(properties); + }; + + /** + * Encodes the specified FeatureSupport message. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {google.protobuf.FieldOptions.IFeatureSupport} message FeatureSupport message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSupport.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.editionIntroduced != null && Object.hasOwnProperty.call(message, "editionIntroduced")) + writer.uint32(/* id 1, wireType 0 =*/8).int32(message.editionIntroduced); + if (message.editionDeprecated != null && Object.hasOwnProperty.call(message, "editionDeprecated")) + writer.uint32(/* id 2, wireType 0 =*/16).int32(message.editionDeprecated); + if (message.deprecationWarning != null && Object.hasOwnProperty.call(message, "deprecationWarning")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.deprecationWarning); + if (message.editionRemoved != null && Object.hasOwnProperty.call(message, "editionRemoved")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.editionRemoved); + return writer; + }; + + /** + * Encodes the specified FeatureSupport message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {google.protobuf.FieldOptions.IFeatureSupport} message FeatureSupport message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FeatureSupport.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSupport.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions.FeatureSupport(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.editionIntroduced = reader.int32(); + break; + } + case 2: { + message.editionDeprecated = reader.int32(); + break; + } + case 3: { + message.deprecationWarning = reader.string(); + break; + } + case 4: { + message.editionRemoved = reader.int32(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FeatureSupport message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FeatureSupport.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FeatureSupport message. + * @function verify + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FeatureSupport.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.editionIntroduced != null && message.hasOwnProperty("editionIntroduced")) + switch (message.editionIntroduced) { + default: + return "editionIntroduced: enum value expected"; + case 0: + case 900: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + if (message.editionDeprecated != null && message.hasOwnProperty("editionDeprecated")) + switch (message.editionDeprecated) { + default: + return "editionDeprecated: enum value expected"; + case 0: + case 900: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + if (message.deprecationWarning != null && message.hasOwnProperty("deprecationWarning")) + if (!$util.isString(message.deprecationWarning)) + return "deprecationWarning: string expected"; + if (message.editionRemoved != null && message.hasOwnProperty("editionRemoved")) + switch (message.editionRemoved) { + default: + return "editionRemoved: enum value expected"; + case 0: + case 900: + case 998: + case 999: + case 1000: + case 1001: + case 1: + case 2: + case 99997: + case 99998: + case 99999: + case 2147483647: + break; + } + return null; + }; + + /** + * Creates a FeatureSupport message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport + */ + FeatureSupport.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FieldOptions.FeatureSupport) + return object; + var message = new $root.google.protobuf.FieldOptions.FeatureSupport(); + switch (object.editionIntroduced) { + default: + if (typeof object.editionIntroduced === "number") { + message.editionIntroduced = object.editionIntroduced; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.editionIntroduced = 0; + break; + case "EDITION_LEGACY": + case 900: + message.editionIntroduced = 900; + break; + case "EDITION_PROTO2": + case 998: + message.editionIntroduced = 998; + break; + case "EDITION_PROTO3": + case 999: + message.editionIntroduced = 999; + break; + case "EDITION_2023": + case 1000: + message.editionIntroduced = 1000; + break; + case "EDITION_2024": + case 1001: + message.editionIntroduced = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.editionIntroduced = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.editionIntroduced = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.editionIntroduced = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.editionIntroduced = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.editionIntroduced = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.editionIntroduced = 2147483647; + break; + } + switch (object.editionDeprecated) { + default: + if (typeof object.editionDeprecated === "number") { + message.editionDeprecated = object.editionDeprecated; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.editionDeprecated = 0; + break; + case "EDITION_LEGACY": + case 900: + message.editionDeprecated = 900; + break; + case "EDITION_PROTO2": + case 998: + message.editionDeprecated = 998; + break; + case "EDITION_PROTO3": + case 999: + message.editionDeprecated = 999; + break; + case "EDITION_2023": + case 1000: + message.editionDeprecated = 1000; + break; + case "EDITION_2024": + case 1001: + message.editionDeprecated = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.editionDeprecated = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.editionDeprecated = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.editionDeprecated = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.editionDeprecated = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.editionDeprecated = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.editionDeprecated = 2147483647; + break; + } + if (object.deprecationWarning != null) + message.deprecationWarning = String(object.deprecationWarning); + switch (object.editionRemoved) { + default: + if (typeof object.editionRemoved === "number") { + message.editionRemoved = object.editionRemoved; + break; + } + break; + case "EDITION_UNKNOWN": + case 0: + message.editionRemoved = 0; + break; + case "EDITION_LEGACY": + case 900: + message.editionRemoved = 900; + break; + case "EDITION_PROTO2": + case 998: + message.editionRemoved = 998; + break; + case "EDITION_PROTO3": + case 999: + message.editionRemoved = 999; + break; + case "EDITION_2023": + case 1000: + message.editionRemoved = 1000; + break; + case "EDITION_2024": + case 1001: + message.editionRemoved = 1001; + break; + case "EDITION_1_TEST_ONLY": + case 1: + message.editionRemoved = 1; + break; + case "EDITION_2_TEST_ONLY": + case 2: + message.editionRemoved = 2; + break; + case "EDITION_99997_TEST_ONLY": + case 99997: + message.editionRemoved = 99997; + break; + case "EDITION_99998_TEST_ONLY": + case 99998: + message.editionRemoved = 99998; + break; + case "EDITION_99999_TEST_ONLY": + case 99999: + message.editionRemoved = 99999; + break; + case "EDITION_MAX": + case 2147483647: + message.editionRemoved = 2147483647; + break; + } + return message; + }; + + /** + * Creates a plain object from a FeatureSupport message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {google.protobuf.FieldOptions.FeatureSupport} message FeatureSupport + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FeatureSupport.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.editionIntroduced = options.enums === String ? "EDITION_UNKNOWN" : 0; + object.editionDeprecated = options.enums === String ? "EDITION_UNKNOWN" : 0; + object.deprecationWarning = ""; + object.editionRemoved = options.enums === String ? "EDITION_UNKNOWN" : 0; + } + if (message.editionIntroduced != null && message.hasOwnProperty("editionIntroduced")) + object.editionIntroduced = options.enums === String ? $root.google.protobuf.Edition[message.editionIntroduced] === undefined ? message.editionIntroduced : $root.google.protobuf.Edition[message.editionIntroduced] : message.editionIntroduced; + if (message.editionDeprecated != null && message.hasOwnProperty("editionDeprecated")) + object.editionDeprecated = options.enums === String ? $root.google.protobuf.Edition[message.editionDeprecated] === undefined ? message.editionDeprecated : $root.google.protobuf.Edition[message.editionDeprecated] : message.editionDeprecated; + if (message.deprecationWarning != null && message.hasOwnProperty("deprecationWarning")) + object.deprecationWarning = message.deprecationWarning; + if (message.editionRemoved != null && message.hasOwnProperty("editionRemoved")) + object.editionRemoved = options.enums === String ? $root.google.protobuf.Edition[message.editionRemoved] === undefined ? message.editionRemoved : $root.google.protobuf.Edition[message.editionRemoved] : message.editionRemoved; + return object; + }; + + /** + * Converts this FeatureSupport to JSON. + * @function toJSON + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @instance + * @returns {Object.} JSON object + */ + FeatureSupport.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FeatureSupport + * @function getTypeUrl + * @memberof google.protobuf.FieldOptions.FeatureSupport + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FeatureSupport.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FieldOptions.FeatureSupport"; + }; + + return FeatureSupport; + })(); + return FieldOptions; })(); @@ -8233,6 +8888,7 @@ * @property {boolean|null} [deprecated] EnumValueOptions deprecated * @property {google.protobuf.IFeatureSet|null} [features] EnumValueOptions features * @property {boolean|null} [debugRedact] EnumValueOptions debugRedact + * @property {google.protobuf.FieldOptions.IFeatureSupport|null} [featureSupport] EnumValueOptions featureSupport * @property {Array.|null} [uninterpretedOption] EnumValueOptions uninterpretedOption */ @@ -8276,6 +8932,14 @@ */ EnumValueOptions.prototype.debugRedact = false; + /** + * EnumValueOptions featureSupport. + * @member {google.protobuf.FieldOptions.IFeatureSupport|null|undefined} featureSupport + * @memberof google.protobuf.EnumValueOptions + * @instance + */ + EnumValueOptions.prototype.featureSupport = null; + /** * EnumValueOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -8314,6 +8978,8 @@ $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.debugRedact != null && Object.hasOwnProperty.call(message, "debugRedact")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.debugRedact); + if (message.featureSupport != null && Object.hasOwnProperty.call(message, "featureSupport")) + $root.google.protobuf.FieldOptions.FeatureSupport.encode(message.featureSupport, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -8365,6 +9031,10 @@ message.debugRedact = reader.bool(); break; } + case 4: { + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.decode(reader, reader.uint32()); + break; + } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -8417,6 +9087,11 @@ if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) if (typeof message.debugRedact !== "boolean") return "debugRedact: boolean expected"; + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) { + var error = $root.google.protobuf.FieldOptions.FeatureSupport.verify(message.featureSupport); + if (error) + return "featureSupport." + error; + } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -8450,6 +9125,11 @@ } if (object.debugRedact != null) message.debugRedact = Boolean(object.debugRedact); + if (object.featureSupport != null) { + if (typeof object.featureSupport !== "object") + throw TypeError(".google.protobuf.EnumValueOptions.featureSupport: object expected"); + message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.fromObject(object.featureSupport); + } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: array expected"); @@ -8482,6 +9162,7 @@ object.deprecated = false; object.features = null; object.debugRedact = false; + object.featureSupport = null; } if (message.deprecated != null && message.hasOwnProperty("deprecated")) object.deprecated = message.deprecated; @@ -8489,6 +9170,8 @@ object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) object.debugRedact = message.debugRedact; + if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) + object.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.toObject(message.featureSupport, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -9956,6 +10639,8 @@ * @property {google.protobuf.FeatureSet.Utf8Validation|null} [utf8Validation] FeatureSet utf8Validation * @property {google.protobuf.FeatureSet.MessageEncoding|null} [messageEncoding] FeatureSet messageEncoding * @property {google.protobuf.FeatureSet.JsonFormat|null} [jsonFormat] FeatureSet jsonFormat + * @property {google.protobuf.FeatureSet.EnforceNamingStyle|null} [enforceNamingStyle] FeatureSet enforceNamingStyle + * @property {google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|null} [defaultSymbolVisibility] FeatureSet defaultSymbolVisibility */ /** @@ -10021,6 +10706,22 @@ */ FeatureSet.prototype.jsonFormat = 0; + /** + * FeatureSet enforceNamingStyle. + * @member {google.protobuf.FeatureSet.EnforceNamingStyle} enforceNamingStyle + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.enforceNamingStyle = 0; + + /** + * FeatureSet defaultSymbolVisibility. + * @member {google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility} defaultSymbolVisibility + * @memberof google.protobuf.FeatureSet + * @instance + */ + FeatureSet.prototype.defaultSymbolVisibility = 0; + /** * Creates a new FeatureSet instance using the specified properties. * @function create @@ -10057,6 +10758,10 @@ writer.uint32(/* id 5, wireType 0 =*/40).int32(message.messageEncoding); if (message.jsonFormat != null && Object.hasOwnProperty.call(message, "jsonFormat")) writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jsonFormat); + if (message.enforceNamingStyle != null && Object.hasOwnProperty.call(message, "enforceNamingStyle")) + writer.uint32(/* id 7, wireType 0 =*/56).int32(message.enforceNamingStyle); + if (message.defaultSymbolVisibility != null && Object.hasOwnProperty.call(message, "defaultSymbolVisibility")) + writer.uint32(/* id 8, wireType 0 =*/64).int32(message.defaultSymbolVisibility); return writer; }; @@ -10117,6 +10822,14 @@ message.jsonFormat = reader.int32(); break; } + case 7: { + message.enforceNamingStyle = reader.int32(); + break; + } + case 8: { + message.defaultSymbolVisibility = reader.int32(); + break; + } default: reader.skipType(tag & 7); break; @@ -10207,6 +10920,26 @@ case 2: break; } + if (message.enforceNamingStyle != null && message.hasOwnProperty("enforceNamingStyle")) + switch (message.enforceNamingStyle) { + default: + return "enforceNamingStyle: enum value expected"; + case 0: + case 1: + case 2: + break; + } + if (message.defaultSymbolVisibility != null && message.hasOwnProperty("defaultSymbolVisibility")) + switch (message.defaultSymbolVisibility) { + default: + return "defaultSymbolVisibility: enum value expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + break; + } return null; }; @@ -10346,6 +11079,54 @@ message.jsonFormat = 2; break; } + switch (object.enforceNamingStyle) { + default: + if (typeof object.enforceNamingStyle === "number") { + message.enforceNamingStyle = object.enforceNamingStyle; + break; + } + break; + case "ENFORCE_NAMING_STYLE_UNKNOWN": + case 0: + message.enforceNamingStyle = 0; + break; + case "STYLE2024": + case 1: + message.enforceNamingStyle = 1; + break; + case "STYLE_LEGACY": + case 2: + message.enforceNamingStyle = 2; + break; + } + switch (object.defaultSymbolVisibility) { + default: + if (typeof object.defaultSymbolVisibility === "number") { + message.defaultSymbolVisibility = object.defaultSymbolVisibility; + break; + } + break; + case "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": + case 0: + message.defaultSymbolVisibility = 0; + break; + case "EXPORT_ALL": + case 1: + message.defaultSymbolVisibility = 1; + break; + case "EXPORT_TOP_LEVEL": + case 2: + message.defaultSymbolVisibility = 2; + break; + case "LOCAL_ALL": + case 3: + message.defaultSymbolVisibility = 3; + break; + case "STRICT": + case 4: + message.defaultSymbolVisibility = 4; + break; + } return message; }; @@ -10369,6 +11150,8 @@ object.utf8Validation = options.enums === String ? "UTF8_VALIDATION_UNKNOWN" : 0; object.messageEncoding = options.enums === String ? "MESSAGE_ENCODING_UNKNOWN" : 0; object.jsonFormat = options.enums === String ? "JSON_FORMAT_UNKNOWN" : 0; + object.enforceNamingStyle = options.enums === String ? "ENFORCE_NAMING_STYLE_UNKNOWN" : 0; + object.defaultSymbolVisibility = options.enums === String ? "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN" : 0; } if (message.fieldPresence != null && message.hasOwnProperty("fieldPresence")) object.fieldPresence = options.enums === String ? $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] === undefined ? message.fieldPresence : $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] : message.fieldPresence; @@ -10382,6 +11165,10 @@ object.messageEncoding = options.enums === String ? $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] === undefined ? message.messageEncoding : $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] : message.messageEncoding; if (message.jsonFormat != null && message.hasOwnProperty("jsonFormat")) object.jsonFormat = options.enums === String ? $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] === undefined ? message.jsonFormat : $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] : message.jsonFormat; + if (message.enforceNamingStyle != null && message.hasOwnProperty("enforceNamingStyle")) + object.enforceNamingStyle = options.enums === String ? $root.google.protobuf.FeatureSet.EnforceNamingStyle[message.enforceNamingStyle] === undefined ? message.enforceNamingStyle : $root.google.protobuf.FeatureSet.EnforceNamingStyle[message.enforceNamingStyle] : message.enforceNamingStyle; + if (message.defaultSymbolVisibility != null && message.hasOwnProperty("defaultSymbolVisibility")) + object.defaultSymbolVisibility = options.enums === String ? $root.google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility[message.defaultSymbolVisibility] === undefined ? message.defaultSymbolVisibility : $root.google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility[message.defaultSymbolVisibility] : message.defaultSymbolVisibility; return object; }; @@ -10509,6 +11296,219 @@ return values; })(); + /** + * EnforceNamingStyle enum. + * @name google.protobuf.FeatureSet.EnforceNamingStyle + * @enum {number} + * @property {number} ENFORCE_NAMING_STYLE_UNKNOWN=0 ENFORCE_NAMING_STYLE_UNKNOWN value + * @property {number} STYLE2024=1 STYLE2024 value + * @property {number} STYLE_LEGACY=2 STYLE_LEGACY value + */ + FeatureSet.EnforceNamingStyle = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "ENFORCE_NAMING_STYLE_UNKNOWN"] = 0; + values[valuesById[1] = "STYLE2024"] = 1; + values[valuesById[2] = "STYLE_LEGACY"] = 2; + return values; + })(); + + FeatureSet.VisibilityFeature = (function() { + + /** + * Properties of a VisibilityFeature. + * @memberof google.protobuf.FeatureSet + * @interface IVisibilityFeature + */ + + /** + * Constructs a new VisibilityFeature. + * @memberof google.protobuf.FeatureSet + * @classdesc Represents a VisibilityFeature. + * @implements IVisibilityFeature + * @constructor + * @param {google.protobuf.FeatureSet.IVisibilityFeature=} [properties] Properties to set + */ + function VisibilityFeature(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Creates a new VisibilityFeature instance using the specified properties. + * @function create + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.IVisibilityFeature=} [properties] Properties to set + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature instance + */ + VisibilityFeature.create = function create(properties) { + return new VisibilityFeature(properties); + }; + + /** + * Encodes the specified VisibilityFeature message. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @function encode + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.IVisibilityFeature} message VisibilityFeature message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + VisibilityFeature.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + return writer; + }; + + /** + * Encodes the specified VisibilityFeature message, length delimited. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. + * @function encodeDelimited + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.IVisibilityFeature} message VisibilityFeature message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + VisibilityFeature.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer. + * @function decode + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + VisibilityFeature.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FeatureSet.VisibilityFeature(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a VisibilityFeature message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + VisibilityFeature.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a VisibilityFeature message. + * @function verify + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + VisibilityFeature.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + return null; + }; + + /** + * Creates a VisibilityFeature message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {Object.} object Plain object + * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature + */ + VisibilityFeature.fromObject = function fromObject(object) { + if (object instanceof $root.google.protobuf.FeatureSet.VisibilityFeature) + return object; + return new $root.google.protobuf.FeatureSet.VisibilityFeature(); + }; + + /** + * Creates a plain object from a VisibilityFeature message. Also converts values to other types if specified. + * @function toObject + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {google.protobuf.FeatureSet.VisibilityFeature} message VisibilityFeature + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + VisibilityFeature.toObject = function toObject() { + return {}; + }; + + /** + * Converts this VisibilityFeature to JSON. + * @function toJSON + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @instance + * @returns {Object.} JSON object + */ + VisibilityFeature.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for VisibilityFeature + * @function getTypeUrl + * @memberof google.protobuf.FeatureSet.VisibilityFeature + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + VisibilityFeature.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.protobuf.FeatureSet.VisibilityFeature"; + }; + + /** + * DefaultSymbolVisibility enum. + * @name google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility + * @enum {number} + * @property {number} DEFAULT_SYMBOL_VISIBILITY_UNKNOWN=0 DEFAULT_SYMBOL_VISIBILITY_UNKNOWN value + * @property {number} EXPORT_ALL=1 EXPORT_ALL value + * @property {number} EXPORT_TOP_LEVEL=2 EXPORT_TOP_LEVEL value + * @property {number} LOCAL_ALL=3 LOCAL_ALL value + * @property {number} STRICT=4 STRICT value + */ + VisibilityFeature.DefaultSymbolVisibility = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN"] = 0; + values[valuesById[1] = "EXPORT_ALL"] = 1; + values[valuesById[2] = "EXPORT_TOP_LEVEL"] = 2; + values[valuesById[3] = "LOCAL_ALL"] = 3; + values[valuesById[4] = "STRICT"] = 4; + return values; + })(); + + return VisibilityFeature; + })(); + return FeatureSet; })(); @@ -10693,6 +11693,7 @@ default: return "minimumEdition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -10710,6 +11711,7 @@ default: return "maximumEdition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -10758,6 +11760,10 @@ case 0: message.minimumEdition = 0; break; + case "EDITION_LEGACY": + case 900: + message.minimumEdition = 900; + break; case "EDITION_PROTO2": case 998: message.minimumEdition = 998; @@ -10810,6 +11816,10 @@ case 0: message.maximumEdition = 0; break; + case "EDITION_LEGACY": + case 900: + message.maximumEdition = 900; + break; case "EDITION_PROTO2": case 998: message.maximumEdition = 998; @@ -10918,7 +11928,8 @@ * @memberof google.protobuf.FeatureSetDefaults * @interface IFeatureSetEditionDefault * @property {google.protobuf.Edition|null} [edition] FeatureSetEditionDefault edition - * @property {google.protobuf.IFeatureSet|null} [features] FeatureSetEditionDefault features + * @property {google.protobuf.IFeatureSet|null} [overridableFeatures] FeatureSetEditionDefault overridableFeatures + * @property {google.protobuf.IFeatureSet|null} [fixedFeatures] FeatureSetEditionDefault fixedFeatures */ /** @@ -10945,12 +11956,20 @@ FeatureSetEditionDefault.prototype.edition = 0; /** - * FeatureSetEditionDefault features. - * @member {google.protobuf.IFeatureSet|null|undefined} features + * FeatureSetEditionDefault overridableFeatures. + * @member {google.protobuf.IFeatureSet|null|undefined} overridableFeatures * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault * @instance */ - FeatureSetEditionDefault.prototype.features = null; + FeatureSetEditionDefault.prototype.overridableFeatures = null; + + /** + * FeatureSetEditionDefault fixedFeatures. + * @member {google.protobuf.IFeatureSet|null|undefined} fixedFeatures + * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + * @instance + */ + FeatureSetEditionDefault.prototype.fixedFeatures = null; /** * Creates a new FeatureSetEditionDefault instance using the specified properties. @@ -10976,10 +11995,12 @@ FeatureSetEditionDefault.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.features != null && Object.hasOwnProperty.call(message, "features")) - $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.edition); + if (message.overridableFeatures != null && Object.hasOwnProperty.call(message, "overridableFeatures")) + $root.google.protobuf.FeatureSet.encode(message.overridableFeatures, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.fixedFeatures != null && Object.hasOwnProperty.call(message, "fixedFeatures")) + $root.google.protobuf.FeatureSet.encode(message.fixedFeatures, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); return writer; }; @@ -11020,8 +12041,12 @@ message.edition = reader.int32(); break; } - case 2: { - message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + case 4: { + message.overridableFeatures = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + break; + } + case 5: { + message.fixedFeatures = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); break; } default: @@ -11064,6 +12089,7 @@ default: return "edition: enum value expected"; case 0: + case 900: case 998: case 999: case 1000: @@ -11076,10 +12102,15 @@ case 2147483647: break; } - if (message.features != null && message.hasOwnProperty("features")) { - var error = $root.google.protobuf.FeatureSet.verify(message.features); + if (message.overridableFeatures != null && message.hasOwnProperty("overridableFeatures")) { + var error = $root.google.protobuf.FeatureSet.verify(message.overridableFeatures); + if (error) + return "overridableFeatures." + error; + } + if (message.fixedFeatures != null && message.hasOwnProperty("fixedFeatures")) { + var error = $root.google.protobuf.FeatureSet.verify(message.fixedFeatures); if (error) - return "features." + error; + return "fixedFeatures." + error; } return null; }; @@ -11107,6 +12138,10 @@ case 0: message.edition = 0; break; + case "EDITION_LEGACY": + case 900: + message.edition = 900; + break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -11148,10 +12183,15 @@ message.edition = 2147483647; break; } - if (object.features != null) { - if (typeof object.features !== "object") - throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.features: object expected"); - message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); + if (object.overridableFeatures != null) { + if (typeof object.overridableFeatures !== "object") + throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.overridableFeatures: object expected"); + message.overridableFeatures = $root.google.protobuf.FeatureSet.fromObject(object.overridableFeatures); + } + if (object.fixedFeatures != null) { + if (typeof object.fixedFeatures !== "object") + throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.fixedFeatures: object expected"); + message.fixedFeatures = $root.google.protobuf.FeatureSet.fromObject(object.fixedFeatures); } return message; }; @@ -11170,13 +12210,16 @@ options = {}; var object = {}; if (options.defaults) { - object.features = null; object.edition = options.enums === String ? "EDITION_UNKNOWN" : 0; + object.overridableFeatures = null; + object.fixedFeatures = null; } - if (message.features != null && message.hasOwnProperty("features")) - object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.edition != null && message.hasOwnProperty("edition")) object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; + if (message.overridableFeatures != null && message.hasOwnProperty("overridableFeatures")) + object.overridableFeatures = $root.google.protobuf.FeatureSet.toObject(message.overridableFeatures, options); + if (message.fixedFeatures != null && message.hasOwnProperty("fixedFeatures")) + object.fixedFeatures = $root.google.protobuf.FeatureSet.toObject(message.fixedFeatures, options); return object; }; @@ -12391,6 +13434,22 @@ return GeneratedCodeInfo; })(); + /** + * SymbolVisibility enum. + * @name google.protobuf.SymbolVisibility + * @enum {number} + * @property {number} VISIBILITY_UNSET=0 VISIBILITY_UNSET value + * @property {number} VISIBILITY_LOCAL=1 VISIBILITY_LOCAL value + * @property {number} VISIBILITY_EXPORT=2 VISIBILITY_EXPORT value + */ + protobuf.SymbolVisibility = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "VISIBILITY_UNSET"] = 0; + values[valuesById[1] = "VISIBILITY_LOCAL"] = 1; + values[valuesById[2] = "VISIBILITY_EXPORT"] = 2; + return values; + })(); + protobuf.Any = (function() { /** @@ -15135,6 +16194,12 @@ * @interface IViolation * @property {string|null} [subject] Violation subject * @property {string|null} [description] Violation description + * @property {string|null} [apiService] Violation apiService + * @property {string|null} [quotaMetric] Violation quotaMetric + * @property {string|null} [quotaId] Violation quotaId + * @property {Object.|null} [quotaDimensions] Violation quotaDimensions + * @property {number|Long|null} [quotaValue] Violation quotaValue + * @property {number|Long|null} [futureQuotaValue] Violation futureQuotaValue */ /** @@ -15146,6 +16211,7 @@ * @param {google.rpc.QuotaFailure.IViolation=} [properties] Properties to set */ function Violation(properties) { + this.quotaDimensions = {}; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -15168,6 +16234,63 @@ */ Violation.prototype.description = ""; + /** + * Violation apiService. + * @member {string} apiService + * @memberof google.rpc.QuotaFailure.Violation + * @instance + */ + Violation.prototype.apiService = ""; + + /** + * Violation quotaMetric. + * @member {string} quotaMetric + * @memberof google.rpc.QuotaFailure.Violation + * @instance + */ + Violation.prototype.quotaMetric = ""; + + /** + * Violation quotaId. + * @member {string} quotaId + * @memberof google.rpc.QuotaFailure.Violation + * @instance + */ + Violation.prototype.quotaId = ""; + + /** + * Violation quotaDimensions. + * @member {Object.} quotaDimensions + * @memberof google.rpc.QuotaFailure.Violation + * @instance + */ + Violation.prototype.quotaDimensions = $util.emptyObject; + + /** + * Violation quotaValue. + * @member {number|Long} quotaValue + * @memberof google.rpc.QuotaFailure.Violation + * @instance + */ + Violation.prototype.quotaValue = $util.Long ? $util.Long.fromBits(0,0,false) : 0; + + /** + * Violation futureQuotaValue. + * @member {number|Long|null|undefined} futureQuotaValue + * @memberof google.rpc.QuotaFailure.Violation + * @instance + */ + Violation.prototype.futureQuotaValue = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + // Virtual OneOf for proto3 optional field + Object.defineProperty(Violation.prototype, "_futureQuotaValue", { + get: $util.oneOfGetter($oneOfFields = ["futureQuotaValue"]), + set: $util.oneOfSetter($oneOfFields) + }); + /** * Creates a new Violation instance using the specified properties. * @function create @@ -15196,6 +16319,19 @@ writer.uint32(/* id 1, wireType 2 =*/10).string(message.subject); if (message.description != null && Object.hasOwnProperty.call(message, "description")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.description); + if (message.apiService != null && Object.hasOwnProperty.call(message, "apiService")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.apiService); + if (message.quotaMetric != null && Object.hasOwnProperty.call(message, "quotaMetric")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.quotaMetric); + if (message.quotaId != null && Object.hasOwnProperty.call(message, "quotaId")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.quotaId); + if (message.quotaDimensions != null && Object.hasOwnProperty.call(message, "quotaDimensions")) + for (var keys = Object.keys(message.quotaDimensions), i = 0; i < keys.length; ++i) + writer.uint32(/* id 6, wireType 2 =*/50).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.quotaDimensions[keys[i]]).ldelim(); + if (message.quotaValue != null && Object.hasOwnProperty.call(message, "quotaValue")) + writer.uint32(/* id 7, wireType 0 =*/56).int64(message.quotaValue); + if (message.futureQuotaValue != null && Object.hasOwnProperty.call(message, "futureQuotaValue")) + writer.uint32(/* id 8, wireType 0 =*/64).int64(message.futureQuotaValue); return writer; }; @@ -15226,7 +16362,7 @@ Violation.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.rpc.QuotaFailure.Violation(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.rpc.QuotaFailure.Violation(), key, value; while (reader.pos < end) { var tag = reader.uint32(); if (tag === error) @@ -15240,6 +16376,49 @@ message.description = reader.string(); break; } + case 3: { + message.apiService = reader.string(); + break; + } + case 4: { + message.quotaMetric = reader.string(); + break; + } + case 5: { + message.quotaId = reader.string(); + break; + } + case 6: { + if (message.quotaDimensions === $util.emptyObject) + message.quotaDimensions = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.quotaDimensions[key] = value; + break; + } + case 7: { + message.quotaValue = reader.int64(); + break; + } + case 8: { + message.futureQuotaValue = reader.int64(); + break; + } default: reader.skipType(tag & 7); break; @@ -15275,12 +16454,38 @@ Violation.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; + var properties = {}; if (message.subject != null && message.hasOwnProperty("subject")) if (!$util.isString(message.subject)) return "subject: string expected"; if (message.description != null && message.hasOwnProperty("description")) if (!$util.isString(message.description)) return "description: string expected"; + if (message.apiService != null && message.hasOwnProperty("apiService")) + if (!$util.isString(message.apiService)) + return "apiService: string expected"; + if (message.quotaMetric != null && message.hasOwnProperty("quotaMetric")) + if (!$util.isString(message.quotaMetric)) + return "quotaMetric: string expected"; + if (message.quotaId != null && message.hasOwnProperty("quotaId")) + if (!$util.isString(message.quotaId)) + return "quotaId: string expected"; + if (message.quotaDimensions != null && message.hasOwnProperty("quotaDimensions")) { + if (!$util.isObject(message.quotaDimensions)) + return "quotaDimensions: object expected"; + var key = Object.keys(message.quotaDimensions); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.quotaDimensions[key[i]])) + return "quotaDimensions: string{k:string} expected"; + } + if (message.quotaValue != null && message.hasOwnProperty("quotaValue")) + if (!$util.isInteger(message.quotaValue) && !(message.quotaValue && $util.isInteger(message.quotaValue.low) && $util.isInteger(message.quotaValue.high))) + return "quotaValue: integer|Long expected"; + if (message.futureQuotaValue != null && message.hasOwnProperty("futureQuotaValue")) { + properties._futureQuotaValue = 1; + if (!$util.isInteger(message.futureQuotaValue) && !(message.futureQuotaValue && $util.isInteger(message.futureQuotaValue.low) && $util.isInteger(message.futureQuotaValue.high))) + return "futureQuotaValue: integer|Long expected"; + } return null; }; @@ -15300,6 +16505,37 @@ message.subject = String(object.subject); if (object.description != null) message.description = String(object.description); + if (object.apiService != null) + message.apiService = String(object.apiService); + if (object.quotaMetric != null) + message.quotaMetric = String(object.quotaMetric); + if (object.quotaId != null) + message.quotaId = String(object.quotaId); + if (object.quotaDimensions) { + if (typeof object.quotaDimensions !== "object") + throw TypeError(".google.rpc.QuotaFailure.Violation.quotaDimensions: object expected"); + message.quotaDimensions = {}; + for (var keys = Object.keys(object.quotaDimensions), i = 0; i < keys.length; ++i) + message.quotaDimensions[keys[i]] = String(object.quotaDimensions[keys[i]]); + } + if (object.quotaValue != null) + if ($util.Long) + (message.quotaValue = $util.Long.fromValue(object.quotaValue)).unsigned = false; + else if (typeof object.quotaValue === "string") + message.quotaValue = parseInt(object.quotaValue, 10); + else if (typeof object.quotaValue === "number") + message.quotaValue = object.quotaValue; + else if (typeof object.quotaValue === "object") + message.quotaValue = new $util.LongBits(object.quotaValue.low >>> 0, object.quotaValue.high >>> 0).toNumber(); + if (object.futureQuotaValue != null) + if ($util.Long) + (message.futureQuotaValue = $util.Long.fromValue(object.futureQuotaValue)).unsigned = false; + else if (typeof object.futureQuotaValue === "string") + message.futureQuotaValue = parseInt(object.futureQuotaValue, 10); + else if (typeof object.futureQuotaValue === "number") + message.futureQuotaValue = object.futureQuotaValue; + else if (typeof object.futureQuotaValue === "object") + message.futureQuotaValue = new $util.LongBits(object.futureQuotaValue.low >>> 0, object.futureQuotaValue.high >>> 0).toNumber(); return message; }; @@ -15316,14 +16552,49 @@ if (!options) options = {}; var object = {}; + if (options.objects || options.defaults) + object.quotaDimensions = {}; if (options.defaults) { object.subject = ""; object.description = ""; + object.apiService = ""; + object.quotaMetric = ""; + object.quotaId = ""; + if ($util.Long) { + var long = new $util.Long(0, 0, false); + object.quotaValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; + } else + object.quotaValue = options.longs === String ? "0" : 0; } if (message.subject != null && message.hasOwnProperty("subject")) object.subject = message.subject; if (message.description != null && message.hasOwnProperty("description")) object.description = message.description; + if (message.apiService != null && message.hasOwnProperty("apiService")) + object.apiService = message.apiService; + if (message.quotaMetric != null && message.hasOwnProperty("quotaMetric")) + object.quotaMetric = message.quotaMetric; + if (message.quotaId != null && message.hasOwnProperty("quotaId")) + object.quotaId = message.quotaId; + var keys2; + if (message.quotaDimensions && (keys2 = Object.keys(message.quotaDimensions)).length) { + object.quotaDimensions = {}; + for (var j = 0; j < keys2.length; ++j) + object.quotaDimensions[keys2[j]] = message.quotaDimensions[keys2[j]]; + } + if (message.quotaValue != null && message.hasOwnProperty("quotaValue")) + if (typeof message.quotaValue === "number") + object.quotaValue = options.longs === String ? String(message.quotaValue) : message.quotaValue; + else + object.quotaValue = options.longs === String ? $util.Long.prototype.toString.call(message.quotaValue) : options.longs === Number ? new $util.LongBits(message.quotaValue.low >>> 0, message.quotaValue.high >>> 0).toNumber() : message.quotaValue; + if (message.futureQuotaValue != null && message.hasOwnProperty("futureQuotaValue")) { + if (typeof message.futureQuotaValue === "number") + object.futureQuotaValue = options.longs === String ? String(message.futureQuotaValue) : message.futureQuotaValue; + else + object.futureQuotaValue = options.longs === String ? $util.Long.prototype.toString.call(message.futureQuotaValue) : options.longs === Number ? new $util.LongBits(message.futureQuotaValue.low >>> 0, message.futureQuotaValue.high >>> 0).toNumber() : message.futureQuotaValue; + if (options.oneofs) + object._futureQuotaValue = "futureQuotaValue"; + } return object; }; @@ -16068,6 +17339,8 @@ * @interface IFieldViolation * @property {string|null} [field] FieldViolation field * @property {string|null} [description] FieldViolation description + * @property {string|null} [reason] FieldViolation reason + * @property {google.rpc.ILocalizedMessage|null} [localizedMessage] FieldViolation localizedMessage */ /** @@ -16101,6 +17374,22 @@ */ FieldViolation.prototype.description = ""; + /** + * FieldViolation reason. + * @member {string} reason + * @memberof google.rpc.BadRequest.FieldViolation + * @instance + */ + FieldViolation.prototype.reason = ""; + + /** + * FieldViolation localizedMessage. + * @member {google.rpc.ILocalizedMessage|null|undefined} localizedMessage + * @memberof google.rpc.BadRequest.FieldViolation + * @instance + */ + FieldViolation.prototype.localizedMessage = null; + /** * Creates a new FieldViolation instance using the specified properties. * @function create @@ -16129,6 +17418,10 @@ writer.uint32(/* id 1, wireType 2 =*/10).string(message.field); if (message.description != null && Object.hasOwnProperty.call(message, "description")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.description); + if (message.reason != null && Object.hasOwnProperty.call(message, "reason")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.reason); + if (message.localizedMessage != null && Object.hasOwnProperty.call(message, "localizedMessage")) + $root.google.rpc.LocalizedMessage.encode(message.localizedMessage, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); return writer; }; @@ -16173,6 +17466,14 @@ message.description = reader.string(); break; } + case 3: { + message.reason = reader.string(); + break; + } + case 4: { + message.localizedMessage = $root.google.rpc.LocalizedMessage.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -16214,6 +17515,14 @@ if (message.description != null && message.hasOwnProperty("description")) if (!$util.isString(message.description)) return "description: string expected"; + if (message.reason != null && message.hasOwnProperty("reason")) + if (!$util.isString(message.reason)) + return "reason: string expected"; + if (message.localizedMessage != null && message.hasOwnProperty("localizedMessage")) { + var error = $root.google.rpc.LocalizedMessage.verify(message.localizedMessage); + if (error) + return "localizedMessage." + error; + } return null; }; @@ -16233,6 +17542,13 @@ message.field = String(object.field); if (object.description != null) message.description = String(object.description); + if (object.reason != null) + message.reason = String(object.reason); + if (object.localizedMessage != null) { + if (typeof object.localizedMessage !== "object") + throw TypeError(".google.rpc.BadRequest.FieldViolation.localizedMessage: object expected"); + message.localizedMessage = $root.google.rpc.LocalizedMessage.fromObject(object.localizedMessage); + } return message; }; @@ -16252,11 +17568,17 @@ if (options.defaults) { object.field = ""; object.description = ""; + object.reason = ""; + object.localizedMessage = null; } if (message.field != null && message.hasOwnProperty("field")) object.field = message.field; if (message.description != null && message.hasOwnProperty("description")) object.description = message.description; + if (message.reason != null && message.hasOwnProperty("reason")) + object.reason = message.reason; + if (message.localizedMessage != null && message.hasOwnProperty("localizedMessage")) + object.localizedMessage = $root.google.rpc.LocalizedMessage.toObject(message.localizedMessage, options); return object; }; @@ -95672,6 +96994,7 @@ * @interface ICommonLanguageSettings * @property {string|null} [referenceDocsUri] CommonLanguageSettings referenceDocsUri * @property {Array.|null} [destinations] CommonLanguageSettings destinations + * @property {google.api.ISelectiveGapicGeneration|null} [selectiveGapicGeneration] CommonLanguageSettings selectiveGapicGeneration */ /** @@ -95706,6 +97029,14 @@ */ CommonLanguageSettings.prototype.destinations = $util.emptyArray; + /** + * CommonLanguageSettings selectiveGapicGeneration. + * @member {google.api.ISelectiveGapicGeneration|null|undefined} selectiveGapicGeneration + * @memberof google.api.CommonLanguageSettings + * @instance + */ + CommonLanguageSettings.prototype.selectiveGapicGeneration = null; + /** * Creates a new CommonLanguageSettings instance using the specified properties. * @function create @@ -95738,6 +97069,8 @@ writer.int32(message.destinations[i]); writer.ldelim(); } + if (message.selectiveGapicGeneration != null && Object.hasOwnProperty.call(message, "selectiveGapicGeneration")) + $root.google.api.SelectiveGapicGeneration.encode(message.selectiveGapicGeneration, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -95789,6 +97122,10 @@ message.destinations.push(reader.int32()); break; } + case 3: { + message.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -95840,6 +97177,11 @@ break; } } + if (message.selectiveGapicGeneration != null && message.hasOwnProperty("selectiveGapicGeneration")) { + var error = $root.google.api.SelectiveGapicGeneration.verify(message.selectiveGapicGeneration); + if (error) + return "selectiveGapicGeneration." + error; + } return null; }; @@ -95882,6 +97224,11 @@ break; } } + if (object.selectiveGapicGeneration != null) { + if (typeof object.selectiveGapicGeneration !== "object") + throw TypeError(".google.api.CommonLanguageSettings.selectiveGapicGeneration: object expected"); + message.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.fromObject(object.selectiveGapicGeneration); + } return message; }; @@ -95900,8 +97247,10 @@ var object = {}; if (options.arrays || options.defaults) object.destinations = []; - if (options.defaults) + if (options.defaults) { object.referenceDocsUri = ""; + object.selectiveGapicGeneration = null; + } if (message.referenceDocsUri != null && message.hasOwnProperty("referenceDocsUri")) object.referenceDocsUri = message.referenceDocsUri; if (message.destinations && message.destinations.length) { @@ -95909,6 +97258,8 @@ for (var j = 0; j < message.destinations.length; ++j) object.destinations[j] = options.enums === String ? $root.google.api.ClientLibraryDestination[message.destinations[j]] === undefined ? message.destinations[j] : $root.google.api.ClientLibraryDestination[message.destinations[j]] : message.destinations[j]; } + if (message.selectiveGapicGeneration != null && message.hasOwnProperty("selectiveGapicGeneration")) + object.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.toObject(message.selectiveGapicGeneration, options); return object; }; @@ -97731,6 +99082,7 @@ * @memberof google.api * @interface IPythonSettings * @property {google.api.ICommonLanguageSettings|null} [common] PythonSettings common + * @property {google.api.PythonSettings.IExperimentalFeatures|null} [experimentalFeatures] PythonSettings experimentalFeatures */ /** @@ -97756,6 +99108,14 @@ */ PythonSettings.prototype.common = null; + /** + * PythonSettings experimentalFeatures. + * @member {google.api.PythonSettings.IExperimentalFeatures|null|undefined} experimentalFeatures + * @memberof google.api.PythonSettings + * @instance + */ + PythonSettings.prototype.experimentalFeatures = null; + /** * Creates a new PythonSettings instance using the specified properties. * @function create @@ -97782,6 +99142,8 @@ writer = $Writer.create(); if (message.common != null && Object.hasOwnProperty.call(message, "common")) $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.experimentalFeatures != null && Object.hasOwnProperty.call(message, "experimentalFeatures")) + $root.google.api.PythonSettings.ExperimentalFeatures.encode(message.experimentalFeatures, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; @@ -97822,6 +99184,10 @@ message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); break; } + case 2: { + message.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -97862,6 +99228,11 @@ if (error) return "common." + error; } + if (message.experimentalFeatures != null && message.hasOwnProperty("experimentalFeatures")) { + var error = $root.google.api.PythonSettings.ExperimentalFeatures.verify(message.experimentalFeatures); + if (error) + return "experimentalFeatures." + error; + } return null; }; @@ -97882,6 +99253,11 @@ throw TypeError(".google.api.PythonSettings.common: object expected"); message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); } + if (object.experimentalFeatures != null) { + if (typeof object.experimentalFeatures !== "object") + throw TypeError(".google.api.PythonSettings.experimentalFeatures: object expected"); + message.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.fromObject(object.experimentalFeatures); + } return message; }; @@ -97898,10 +99274,14 @@ if (!options) options = {}; var object = {}; - if (options.defaults) + if (options.defaults) { object.common = null; + object.experimentalFeatures = null; + } if (message.common != null && message.hasOwnProperty("common")) object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + if (message.experimentalFeatures != null && message.hasOwnProperty("experimentalFeatures")) + object.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.toObject(message.experimentalFeatures, options); return object; }; @@ -97931,6 +99311,258 @@ return typeUrlPrefix + "/google.api.PythonSettings"; }; + PythonSettings.ExperimentalFeatures = (function() { + + /** + * Properties of an ExperimentalFeatures. + * @memberof google.api.PythonSettings + * @interface IExperimentalFeatures + * @property {boolean|null} [restAsyncIoEnabled] ExperimentalFeatures restAsyncIoEnabled + * @property {boolean|null} [protobufPythonicTypesEnabled] ExperimentalFeatures protobufPythonicTypesEnabled + * @property {boolean|null} [unversionedPackageDisabled] ExperimentalFeatures unversionedPackageDisabled + */ + + /** + * Constructs a new ExperimentalFeatures. + * @memberof google.api.PythonSettings + * @classdesc Represents an ExperimentalFeatures. + * @implements IExperimentalFeatures + * @constructor + * @param {google.api.PythonSettings.IExperimentalFeatures=} [properties] Properties to set + */ + function ExperimentalFeatures(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ExperimentalFeatures restAsyncIoEnabled. + * @member {boolean} restAsyncIoEnabled + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + */ + ExperimentalFeatures.prototype.restAsyncIoEnabled = false; + + /** + * ExperimentalFeatures protobufPythonicTypesEnabled. + * @member {boolean} protobufPythonicTypesEnabled + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + */ + ExperimentalFeatures.prototype.protobufPythonicTypesEnabled = false; + + /** + * ExperimentalFeatures unversionedPackageDisabled. + * @member {boolean} unversionedPackageDisabled + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + */ + ExperimentalFeatures.prototype.unversionedPackageDisabled = false; + + /** + * Creates a new ExperimentalFeatures instance using the specified properties. + * @function create + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.IExperimentalFeatures=} [properties] Properties to set + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures instance + */ + ExperimentalFeatures.create = function create(properties) { + return new ExperimentalFeatures(properties); + }; + + /** + * Encodes the specified ExperimentalFeatures message. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @function encode + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.IExperimentalFeatures} message ExperimentalFeatures message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExperimentalFeatures.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.restAsyncIoEnabled != null && Object.hasOwnProperty.call(message, "restAsyncIoEnabled")) + writer.uint32(/* id 1, wireType 0 =*/8).bool(message.restAsyncIoEnabled); + if (message.protobufPythonicTypesEnabled != null && Object.hasOwnProperty.call(message, "protobufPythonicTypesEnabled")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.protobufPythonicTypesEnabled); + if (message.unversionedPackageDisabled != null && Object.hasOwnProperty.call(message, "unversionedPackageDisabled")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.unversionedPackageDisabled); + return writer; + }; + + /** + * Encodes the specified ExperimentalFeatures message, length delimited. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.IExperimentalFeatures} message ExperimentalFeatures message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExperimentalFeatures.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer. + * @function decode + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExperimentalFeatures.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.PythonSettings.ExperimentalFeatures(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.restAsyncIoEnabled = reader.bool(); + break; + } + case 2: { + message.protobufPythonicTypesEnabled = reader.bool(); + break; + } + case 3: { + message.unversionedPackageDisabled = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an ExperimentalFeatures message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExperimentalFeatures.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an ExperimentalFeatures message. + * @function verify + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ExperimentalFeatures.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.restAsyncIoEnabled != null && message.hasOwnProperty("restAsyncIoEnabled")) + if (typeof message.restAsyncIoEnabled !== "boolean") + return "restAsyncIoEnabled: boolean expected"; + if (message.protobufPythonicTypesEnabled != null && message.hasOwnProperty("protobufPythonicTypesEnabled")) + if (typeof message.protobufPythonicTypesEnabled !== "boolean") + return "protobufPythonicTypesEnabled: boolean expected"; + if (message.unversionedPackageDisabled != null && message.hasOwnProperty("unversionedPackageDisabled")) + if (typeof message.unversionedPackageDisabled !== "boolean") + return "unversionedPackageDisabled: boolean expected"; + return null; + }; + + /** + * Creates an ExperimentalFeatures message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {Object.} object Plain object + * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures + */ + ExperimentalFeatures.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.PythonSettings.ExperimentalFeatures) + return object; + var message = new $root.google.api.PythonSettings.ExperimentalFeatures(); + if (object.restAsyncIoEnabled != null) + message.restAsyncIoEnabled = Boolean(object.restAsyncIoEnabled); + if (object.protobufPythonicTypesEnabled != null) + message.protobufPythonicTypesEnabled = Boolean(object.protobufPythonicTypesEnabled); + if (object.unversionedPackageDisabled != null) + message.unversionedPackageDisabled = Boolean(object.unversionedPackageDisabled); + return message; + }; + + /** + * Creates a plain object from an ExperimentalFeatures message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {google.api.PythonSettings.ExperimentalFeatures} message ExperimentalFeatures + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + ExperimentalFeatures.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.restAsyncIoEnabled = false; + object.protobufPythonicTypesEnabled = false; + object.unversionedPackageDisabled = false; + } + if (message.restAsyncIoEnabled != null && message.hasOwnProperty("restAsyncIoEnabled")) + object.restAsyncIoEnabled = message.restAsyncIoEnabled; + if (message.protobufPythonicTypesEnabled != null && message.hasOwnProperty("protobufPythonicTypesEnabled")) + object.protobufPythonicTypesEnabled = message.protobufPythonicTypesEnabled; + if (message.unversionedPackageDisabled != null && message.hasOwnProperty("unversionedPackageDisabled")) + object.unversionedPackageDisabled = message.unversionedPackageDisabled; + return object; + }; + + /** + * Converts this ExperimentalFeatures to JSON. + * @function toJSON + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @instance + * @returns {Object.} JSON object + */ + ExperimentalFeatures.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for ExperimentalFeatures + * @function getTypeUrl + * @memberof google.api.PythonSettings.ExperimentalFeatures + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + ExperimentalFeatures.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.PythonSettings.ExperimentalFeatures"; + }; + + return ExperimentalFeatures; + })(); + return PythonSettings; })(); @@ -98807,6 +100439,7 @@ * @memberof google.api * @interface IGoSettings * @property {google.api.ICommonLanguageSettings|null} [common] GoSettings common + * @property {Object.|null} [renamedServices] GoSettings renamedServices */ /** @@ -98818,6 +100451,7 @@ * @param {google.api.IGoSettings=} [properties] Properties to set */ function GoSettings(properties) { + this.renamedServices = {}; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -98832,6 +100466,14 @@ */ GoSettings.prototype.common = null; + /** + * GoSettings renamedServices. + * @member {Object.} renamedServices + * @memberof google.api.GoSettings + * @instance + */ + GoSettings.prototype.renamedServices = $util.emptyObject; + /** * Creates a new GoSettings instance using the specified properties. * @function create @@ -98858,6 +100500,9 @@ writer = $Writer.create(); if (message.common != null && Object.hasOwnProperty.call(message, "common")) $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.renamedServices != null && Object.hasOwnProperty.call(message, "renamedServices")) + for (var keys = Object.keys(message.renamedServices), i = 0; i < keys.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.renamedServices[keys[i]]).ldelim(); return writer; }; @@ -98888,7 +100533,7 @@ GoSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.GoSettings(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.GoSettings(), key, value; while (reader.pos < end) { var tag = reader.uint32(); if (tag === error) @@ -98898,6 +100543,29 @@ message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); break; } + case 2: { + if (message.renamedServices === $util.emptyObject) + message.renamedServices = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.renamedServices[key] = value; + break; + } default: reader.skipType(tag & 7); break; @@ -98938,6 +100606,14 @@ if (error) return "common." + error; } + if (message.renamedServices != null && message.hasOwnProperty("renamedServices")) { + if (!$util.isObject(message.renamedServices)) + return "renamedServices: object expected"; + var key = Object.keys(message.renamedServices); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.renamedServices[key[i]])) + return "renamedServices: string{k:string} expected"; + } return null; }; @@ -98958,6 +100634,13 @@ throw TypeError(".google.api.GoSettings.common: object expected"); message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); } + if (object.renamedServices) { + if (typeof object.renamedServices !== "object") + throw TypeError(".google.api.GoSettings.renamedServices: object expected"); + message.renamedServices = {}; + for (var keys = Object.keys(object.renamedServices), i = 0; i < keys.length; ++i) + message.renamedServices[keys[i]] = String(object.renamedServices[keys[i]]); + } return message; }; @@ -98974,10 +100657,18 @@ if (!options) options = {}; var object = {}; + if (options.objects || options.defaults) + object.renamedServices = {}; if (options.defaults) object.common = null; if (message.common != null && message.hasOwnProperty("common")) object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); + var keys2; + if (message.renamedServices && (keys2 = Object.keys(message.renamedServices)).length) { + object.renamedServices = {}; + for (var j = 0; j < keys2.length; ++j) + object.renamedServices[keys2[j]] = message.renamedServices[keys2[j]]; + } return object; }; @@ -99616,6 +101307,251 @@ return values; })(); + api.SelectiveGapicGeneration = (function() { + + /** + * Properties of a SelectiveGapicGeneration. + * @memberof google.api + * @interface ISelectiveGapicGeneration + * @property {Array.|null} [methods] SelectiveGapicGeneration methods + * @property {boolean|null} [generateOmittedAsInternal] SelectiveGapicGeneration generateOmittedAsInternal + */ + + /** + * Constructs a new SelectiveGapicGeneration. + * @memberof google.api + * @classdesc Represents a SelectiveGapicGeneration. + * @implements ISelectiveGapicGeneration + * @constructor + * @param {google.api.ISelectiveGapicGeneration=} [properties] Properties to set + */ + function SelectiveGapicGeneration(properties) { + this.methods = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * SelectiveGapicGeneration methods. + * @member {Array.} methods + * @memberof google.api.SelectiveGapicGeneration + * @instance + */ + SelectiveGapicGeneration.prototype.methods = $util.emptyArray; + + /** + * SelectiveGapicGeneration generateOmittedAsInternal. + * @member {boolean} generateOmittedAsInternal + * @memberof google.api.SelectiveGapicGeneration + * @instance + */ + SelectiveGapicGeneration.prototype.generateOmittedAsInternal = false; + + /** + * Creates a new SelectiveGapicGeneration instance using the specified properties. + * @function create + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.ISelectiveGapicGeneration=} [properties] Properties to set + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration instance + */ + SelectiveGapicGeneration.create = function create(properties) { + return new SelectiveGapicGeneration(properties); + }; + + /** + * Encodes the specified SelectiveGapicGeneration message. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @function encode + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.ISelectiveGapicGeneration} message SelectiveGapicGeneration message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SelectiveGapicGeneration.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.methods != null && message.methods.length) + for (var i = 0; i < message.methods.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.methods[i]); + if (message.generateOmittedAsInternal != null && Object.hasOwnProperty.call(message, "generateOmittedAsInternal")) + writer.uint32(/* id 2, wireType 0 =*/16).bool(message.generateOmittedAsInternal); + return writer; + }; + + /** + * Encodes the specified SelectiveGapicGeneration message, length delimited. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. + * @function encodeDelimited + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.ISelectiveGapicGeneration} message SelectiveGapicGeneration message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + SelectiveGapicGeneration.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer. + * @function decode + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SelectiveGapicGeneration.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.SelectiveGapicGeneration(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + if (!(message.methods && message.methods.length)) + message.methods = []; + message.methods.push(reader.string()); + break; + } + case 2: { + message.generateOmittedAsInternal = reader.bool(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a SelectiveGapicGeneration message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + SelectiveGapicGeneration.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a SelectiveGapicGeneration message. + * @function verify + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + SelectiveGapicGeneration.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.methods != null && message.hasOwnProperty("methods")) { + if (!Array.isArray(message.methods)) + return "methods: array expected"; + for (var i = 0; i < message.methods.length; ++i) + if (!$util.isString(message.methods[i])) + return "methods: string[] expected"; + } + if (message.generateOmittedAsInternal != null && message.hasOwnProperty("generateOmittedAsInternal")) + if (typeof message.generateOmittedAsInternal !== "boolean") + return "generateOmittedAsInternal: boolean expected"; + return null; + }; + + /** + * Creates a SelectiveGapicGeneration message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {Object.} object Plain object + * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration + */ + SelectiveGapicGeneration.fromObject = function fromObject(object) { + if (object instanceof $root.google.api.SelectiveGapicGeneration) + return object; + var message = new $root.google.api.SelectiveGapicGeneration(); + if (object.methods) { + if (!Array.isArray(object.methods)) + throw TypeError(".google.api.SelectiveGapicGeneration.methods: array expected"); + message.methods = []; + for (var i = 0; i < object.methods.length; ++i) + message.methods[i] = String(object.methods[i]); + } + if (object.generateOmittedAsInternal != null) + message.generateOmittedAsInternal = Boolean(object.generateOmittedAsInternal); + return message; + }; + + /** + * Creates a plain object from a SelectiveGapicGeneration message. Also converts values to other types if specified. + * @function toObject + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {google.api.SelectiveGapicGeneration} message SelectiveGapicGeneration + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + SelectiveGapicGeneration.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) + object.methods = []; + if (options.defaults) + object.generateOmittedAsInternal = false; + if (message.methods && message.methods.length) { + object.methods = []; + for (var j = 0; j < message.methods.length; ++j) + object.methods[j] = message.methods[j]; + } + if (message.generateOmittedAsInternal != null && message.hasOwnProperty("generateOmittedAsInternal")) + object.generateOmittedAsInternal = message.generateOmittedAsInternal; + return object; + }; + + /** + * Converts this SelectiveGapicGeneration to JSON. + * @function toJSON + * @memberof google.api.SelectiveGapicGeneration + * @instance + * @returns {Object.} JSON object + */ + SelectiveGapicGeneration.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for SelectiveGapicGeneration + * @function getTypeUrl + * @memberof google.api.SelectiveGapicGeneration + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + SelectiveGapicGeneration.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.api.SelectiveGapicGeneration"; + }; + + return SelectiveGapicGeneration; + })(); + /** * LaunchStage enum. * @name google.api.LaunchStage diff --git a/protos/protos.json b/protos/protos.json index 303527c69..dfcf55244 100644 --- a/protos/protos.json +++ b/protos/protos.json @@ -33,12 +33,19 @@ "type": "FileDescriptorProto", "id": 1 } - } + }, + "extensions": [ + [ + 536000000, + 536000000 + ] + ] }, "Edition": { "edition": "proto2", "values": { "EDITION_UNKNOWN": 0, + "EDITION_LEGACY": 900, "EDITION_PROTO2": 998, "EDITION_PROTO3": 999, "EDITION_2023": 1000, @@ -77,6 +84,11 @@ "type": "int32", "id": 11 }, + "optionDependency": { + "rule": "repeated", + "type": "string", + "id": 15 + }, "messageType": { "rule": "repeated", "type": "DescriptorProto", @@ -165,6 +177,10 @@ "rule": "repeated", "type": "string", "id": 10 + }, + "visibility": { + "type": "SymbolVisibility", + "id": 11 } }, "nested": { @@ -390,6 +406,10 @@ "rule": "repeated", "type": "string", "id": 5 + }, + "visibility": { + "type": "SymbolVisibility", + "id": 6 } }, "nested": { @@ -604,6 +624,7 @@ 42, 42 ], + "php_generic_services", [ 38, 38 @@ -739,7 +760,8 @@ "type": "bool", "id": 10, "options": { - "default": false + "default": false, + "deprecated": true } }, "debugRedact": { @@ -767,6 +789,10 @@ "type": "FeatureSet", "id": 21 }, + "featureSupport": { + "type": "FeatureSupport", + "id": 22 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -836,6 +862,26 @@ "id": 2 } } + }, + "FeatureSupport": { + "fields": { + "editionIntroduced": { + "type": "Edition", + "id": 1 + }, + "editionDeprecated": { + "type": "Edition", + "id": 2 + }, + "deprecationWarning": { + "type": "string", + "id": 3 + }, + "editionRemoved": { + "type": "Edition", + "id": 4 + } + } } } }, @@ -924,6 +970,10 @@ "default": false } }, + "featureSupport": { + "type": "FieldOptions.FeatureSupport", + "id": 4 + }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -1066,6 +1116,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_2023", "edition_defaults.value": "EXPLICIT" } @@ -1076,6 +1127,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "OPEN" } @@ -1086,6 +1138,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "PACKED" } @@ -1096,6 +1149,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "VERIFY" } @@ -1106,7 +1160,8 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "edition_defaults.edition": "EDITION_PROTO2", + "feature_support.edition_introduced": "EDITION_2023", + "edition_defaults.edition": "EDITION_LEGACY", "edition_defaults.value": "LENGTH_PREFIXED" } }, @@ -1116,27 +1171,38 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "ALLOW" } + }, + "enforceNamingStyle": { + "type": "EnforceNamingStyle", + "id": 7, + "options": { + "retention": "RETENTION_SOURCE", + "targets": "TARGET_TYPE_METHOD", + "feature_support.edition_introduced": "EDITION_2024", + "edition_defaults.edition": "EDITION_2024", + "edition_defaults.value": "STYLE2024" + } + }, + "defaultSymbolVisibility": { + "type": "VisibilityFeature.DefaultSymbolVisibility", + "id": 8, + "options": { + "retention": "RETENTION_SOURCE", + "targets": "TARGET_TYPE_FILE", + "feature_support.edition_introduced": "EDITION_2024", + "edition_defaults.edition": "EDITION_2024", + "edition_defaults.value": "EXPORT_TOP_LEVEL" + } } }, "extensions": [ [ 1000, - 1000 - ], - [ - 1001, - 1001 - ], - [ - 1002, - 1002 - ], - [ - 9990, - 9990 + 9994 ], [ 9995, @@ -1181,7 +1247,13 @@ "UTF8_VALIDATION_UNKNOWN": 0, "VERIFY": 2, "NONE": 3 - } + }, + "reserved": [ + [ + 1, + 1 + ] + ] }, "MessageEncoding": { "values": { @@ -1196,6 +1268,33 @@ "ALLOW": 1, "LEGACY_BEST_EFFORT": 2 } + }, + "EnforceNamingStyle": { + "values": { + "ENFORCE_NAMING_STYLE_UNKNOWN": 0, + "STYLE2024": 1, + "STYLE_LEGACY": 2 + } + }, + "VisibilityFeature": { + "fields": {}, + "reserved": [ + [ + 1, + 536870911 + ] + ], + "nested": { + "DefaultSymbolVisibility": { + "values": { + "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": 0, + "EXPORT_ALL": 1, + "EXPORT_TOP_LEVEL": 2, + "LOCAL_ALL": 3, + "STRICT": 4 + } + } + } } } }, @@ -1223,11 +1322,26 @@ "type": "Edition", "id": 3 }, - "features": { + "overridableFeatures": { "type": "FeatureSet", - "id": 2 + "id": 4 + }, + "fixedFeatures": { + "type": "FeatureSet", + "id": 5 } - } + }, + "reserved": [ + [ + 1, + 1 + ], + [ + 2, + 2 + ], + "features" + ] } } }, @@ -1240,6 +1354,12 @@ "id": 1 } }, + "extensions": [ + [ + 536000000, + 536000000 + ] + ], "nested": { "Location": { "fields": { @@ -1325,6 +1445,14 @@ } } }, + "SymbolVisibility": { + "edition": "proto2", + "values": { + "VISIBILITY_UNSET": 0, + "VISIBILITY_LOCAL": 1, + "VISIBILITY_EXPORT": 2 + } + }, "Any": { "fields": { "type_url": { @@ -1484,6 +1612,13 @@ }, "nested": { "Violation": { + "oneofs": { + "_futureQuotaValue": { + "oneof": [ + "futureQuotaValue" + ] + } + }, "fields": { "subject": { "type": "string", @@ -1492,6 +1627,34 @@ "description": { "type": "string", "id": 2 + }, + "apiService": { + "type": "string", + "id": 3 + }, + "quotaMetric": { + "type": "string", + "id": 4 + }, + "quotaId": { + "type": "string", + "id": 5 + }, + "quotaDimensions": { + "keyType": "string", + "type": "string", + "id": 6 + }, + "quotaValue": { + "type": "int64", + "id": 7 + }, + "futureQuotaValue": { + "type": "int64", + "id": 8, + "options": { + "proto3_optional": true + } } } } @@ -1542,6 +1705,14 @@ "description": { "type": "string", "id": 2 + }, + "reason": { + "type": "string", + "id": 3 + }, + "localizedMessage": { + "type": "LocalizedMessage", + "id": 4 } } } @@ -9634,8 +9805,7 @@ "java_multiple_files": true, "java_outer_classname": "LaunchStageProto", "java_package": "com.google.api", - "objc_class_prefix": "GAPI", - "cc_enable_arenas": true + "objc_class_prefix": "GAPI" }, "nested": { "fieldBehavior": { @@ -9858,6 +10028,10 @@ "rule": "repeated", "type": "ClientLibraryDestination", "id": 2 + }, + "selectiveGapicGeneration": { + "type": "SelectiveGapicGeneration", + "id": 3 } } }, @@ -9998,6 +10172,28 @@ "common": { "type": "CommonLanguageSettings", "id": 1 + }, + "experimentalFeatures": { + "type": "ExperimentalFeatures", + "id": 2 + } + }, + "nested": { + "ExperimentalFeatures": { + "fields": { + "restAsyncIoEnabled": { + "type": "bool", + "id": 1 + }, + "protobufPythonicTypesEnabled": { + "type": "bool", + "id": 2 + }, + "unversionedPackageDisabled": { + "type": "bool", + "id": 3 + } + } } } }, @@ -10055,6 +10251,11 @@ "common": { "type": "CommonLanguageSettings", "id": 1 + }, + "renamedServices": { + "keyType": "string", + "type": "string", + "id": 2 } } }, @@ -10116,6 +10317,19 @@ "PACKAGE_MANAGER": 20 } }, + "SelectiveGapicGeneration": { + "fields": { + "methods": { + "rule": "repeated", + "type": "string", + "id": 1 + }, + "generateOmittedAsInternal": { + "type": "bool", + "id": 2 + } + } + }, "LaunchStage": { "values": { "LAUNCH_STAGE_UNSPECIFIED": 0, @@ -10138,6 +10352,7 @@ "java_multiple_files": true, "java_outer_classname": "OperationsProto", "java_package": "com.google.longrunning", + "objc_class_prefix": "GLRUN", "php_namespace": "Google\\LongRunning" }, "nested": { @@ -10350,13 +10565,13 @@ "nested": { "v1": { "options": { - "cc_enable_arenas": true, "csharp_namespace": "Google.Cloud.Iam.V1", "go_package": "cloud.google.com/go/iam/apiv1/iampb;iampb", "java_multiple_files": true, "java_outer_classname": "PolicyProto", "java_package": "com.google.iam.v1", - "php_namespace": "Google\\Cloud\\Iam\\V1" + "php_namespace": "Google\\Cloud\\Iam\\V1", + "cc_enable_arenas": true }, "nested": { "IAMPolicy": { diff --git a/system-test/spanner.ts b/system-test/spanner.ts index e07ee86f4..219b72eb2 100644 --- a/system-test/spanner.ts +++ b/system-test/spanner.ts @@ -166,6 +166,7 @@ describe('Spanner', () => { Name STRING(1024), ) PRIMARY KEY(SingerId)`, ]; + const [googleSqlOperation] = await databaseAdminClient.createDatabase({ createStatement: 'CREATE DATABASE `' + gSQLdatabaseId + '`', extraStatements: createSingersTableStatement, @@ -342,26 +343,23 @@ describe('Spanner', () => { name: databaseAdminClient.databasePath( projectId, instanceId, - database, + database.id, ), }); assert.strictEqual( metadata!.name, - databaseAdminClient.databasePath(projectId, instanceId, database), + databaseAdminClient.databasePath(projectId, instanceId, database.id), ); assert.strictEqual(metadata!.state, 'READY'); - if (IS_EMULATOR_ENABLED) { - assert.strictEqual( - metadata!.databaseDialect, - 'DATABASE_DIALECT_UNSPECIFIED', - ); - } else { - assert.strictEqual(metadata!.databaseDialect, dialect); - } + assert.strictEqual(metadata!.databaseDialect, dialect); } it('GOOGLE_STANDARD_SQL should have created the database', async () => { - void createDatabase(DATABASE, 'GOOGLE_STANDARD_SQL'); + await createDatabase(DATABASE, 'GOOGLE_STANDARD_SQL'); + }); + + it('POSTGRESQL should have created the database', async () => { + await createDatabase(PG_DATABASE, 'POSTGRESQL'); }); }); }); @@ -2411,14 +2409,7 @@ describe('Spanner', () => { assert.ifError(err); assert.strictEqual(metadata!.name, database.formattedName_); assert.strictEqual(metadata!.state, 'READY'); - if (IS_EMULATOR_ENABLED) { - assert.strictEqual( - metadata!.databaseDialect, - 'DATABASE_DIALECT_UNSPECIFIED', - ); - } else { - assert.strictEqual(metadata!.databaseDialect, dialect); - } + assert.strictEqual(metadata!.databaseDialect, dialect); done(); }); }; From 1b3931a799bdd052adc91703e59e1d0c83270065 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Jul 2025 23:24:46 +0530 Subject: [PATCH 19/31] feat: add methods from gax to cache proto root and process custom error details (#2330) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add methods from gax to cache proto root and process custom error details fix: distinguish plural and singular path templates PiperOrigin-RevId: 772168312 Source-Link: https://github.com/googleapis/googleapis/commit/873d84ec93f0f7606f5e5c8f11d06f1ebb198a6b Source-Link: https://github.com/googleapis/googleapis-gen/commit/f448c1b4eaaa5fdc7021a682068c313d6f2f104a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjQ0OGMxYjRlYWFhNWZkYzcwMjFhNjgyMDY4YzMxM2Q2ZjJmMTA0YSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> --- src/v1/database_admin_client.ts | 256 ++++++++++++++++++++++-- src/v1/instance_admin_client.ts | 166 ++++++++++++++- src/v1/spanner_client.ts | 218 ++++++++++++++++++-- src/v1/spanner_executor_proxy_client.ts | 2 +- 4 files changed, 601 insertions(+), 41 deletions(-) diff --git a/src/v1/database_admin_client.ts b/src/v1/database_admin_client.ts index d0e38d49d..6e18d1482 100644 --- a/src/v1/database_admin_client.ts +++ b/src/v1/database_admin_client.ts @@ -31,7 +31,7 @@ import type { import {Transform} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); -import {loggingUtils as logging} from 'google-gax'; +import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -267,7 +267,7 @@ export class DatabaseAdminClient { ), }; - const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); + const protoFilesRoot = this._gaxModule.protobufFromJSON(jsonProtos); // This API contains "long-running operations", which return a // an Operation object that allows for tracking of the operation, // rather than holding a request open. @@ -710,7 +710,23 @@ export class DatabaseAdminClient { this._log.info('getDatabase response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Drops (aka deletes) a Cloud Spanner database. @@ -833,7 +849,23 @@ export class DatabaseAdminClient { this._log.info('dropDatabase response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Returns the schema of a Cloud Spanner database as a list of formatted @@ -962,7 +994,23 @@ export class DatabaseAdminClient { this._log.info('getDatabaseDdl response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Sets the access control policy on a database or backup resource. @@ -1086,7 +1134,23 @@ export class DatabaseAdminClient { this._log.info('setIamPolicy response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Gets the access control policy for a database or backup resource. @@ -1203,7 +1267,23 @@ export class DatabaseAdminClient { this._log.info('getIamPolicy response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Returns permissions that the caller has on the specified database or backup @@ -1324,7 +1404,23 @@ export class DatabaseAdminClient { this._log.info('testIamPermissions response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Gets metadata on a pending or completed @@ -1443,7 +1539,23 @@ export class DatabaseAdminClient { this._log.info('getBackup response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Updates a pending or completed @@ -1572,7 +1684,23 @@ export class DatabaseAdminClient { this._log.info('updateBackup response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Deletes a pending or completed @@ -1694,7 +1822,23 @@ export class DatabaseAdminClient { this._log.info('deleteBackup response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Adds split points to specified tables, indexes of a database. @@ -1829,7 +1973,23 @@ export class DatabaseAdminClient { this._log.info('addSplitPoints response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Creates a new backup schedule. @@ -1960,7 +2120,23 @@ export class DatabaseAdminClient { this._log.info('createBackupSchedule response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Gets backup schedule for the input schedule name. @@ -2087,7 +2263,23 @@ export class DatabaseAdminClient { this._log.info('getBackupSchedule response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Updates a backup schedule. @@ -2220,7 +2412,23 @@ export class DatabaseAdminClient { this._log.info('updateBackupSchedule response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Deletes a backup schedule. @@ -2347,7 +2555,23 @@ export class DatabaseAdminClient { this._log.info('deleteBackupSchedule response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** diff --git a/src/v1/instance_admin_client.ts b/src/v1/instance_admin_client.ts index a1d1adb7e..f41a235c7 100644 --- a/src/v1/instance_admin_client.ts +++ b/src/v1/instance_admin_client.ts @@ -31,7 +31,7 @@ import type { import {Transform} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); -import {loggingUtils as logging} from 'google-gax'; +import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -270,7 +270,7 @@ export class InstanceAdminClient { ), }; - const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); + const protoFilesRoot = this._gaxModule.protobufFromJSON(jsonProtos); // This API contains "long-running operations", which return a // an Operation object that allows for tracking of the operation, // rather than holding a request open. @@ -759,7 +759,23 @@ export class InstanceAdminClient { this._log.info('getInstanceConfig response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Deletes the instance configuration. Deletion is only allowed when no @@ -904,7 +920,23 @@ export class InstanceAdminClient { this._log.info('deleteInstanceConfig response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Gets information about a particular instance. @@ -1029,7 +1061,23 @@ export class InstanceAdminClient { this._log.info('getInstance response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Deletes an instance. @@ -1165,7 +1213,23 @@ export class InstanceAdminClient { this._log.info('deleteInstance response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Sets the access control policy on an instance resource. Replaces any @@ -1287,7 +1351,23 @@ export class InstanceAdminClient { this._log.info('setIamPolicy response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Gets the access control policy for an instance resource. Returns an empty @@ -1401,7 +1481,23 @@ export class InstanceAdminClient { this._log.info('getIamPolicy response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Returns permissions that the caller has on the specified instance resource. @@ -1518,7 +1614,23 @@ export class InstanceAdminClient { this._log.info('testIamPermissions response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Gets information about a particular instance partition. @@ -1645,7 +1757,23 @@ export class InstanceAdminClient { this._log.info('getInstancePartition response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Deletes an existing instance partition. Requires that the @@ -1783,7 +1911,23 @@ export class InstanceAdminClient { this._log.info('deleteInstancePartition response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** diff --git a/src/v1/spanner_client.ts b/src/v1/spanner_client.ts index 7af869322..71a41a11e 100644 --- a/src/v1/spanner_client.ts +++ b/src/v1/spanner_client.ts @@ -29,7 +29,7 @@ import type { import {Transform, PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); -import {loggingUtils as logging} from 'google-gax'; +import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -566,7 +566,23 @@ export class SpannerClient { this._log.info('createSession response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Creates multiple new sessions. @@ -688,7 +704,23 @@ export class SpannerClient { this._log.info('batchCreateSessions response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Gets a session. Returns `NOT_FOUND` if the session does not exist. @@ -796,7 +828,23 @@ export class SpannerClient { this._log.info('getSession response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Ends a session, releasing server resources associated with it. This will @@ -904,7 +952,23 @@ export class SpannerClient { this._log.info('deleteSession response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Executes an SQL statement, returning all results in a single reply. This @@ -1111,7 +1175,23 @@ export class SpannerClient { this._log.info('executeSql response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Executes a batch of SQL DML statements. This method allows many statements @@ -1262,7 +1342,23 @@ export class SpannerClient { this._log.info('executeBatchDml response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Reads rows from the database using key lookups and scans, as a @@ -1450,7 +1546,23 @@ export class SpannerClient { this._log.info('read response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Begins a new transaction. This step can often be skipped: @@ -1575,7 +1687,23 @@ export class SpannerClient { this._log.info('beginTransaction response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Commits a transaction. The request includes the mutations to be @@ -1729,7 +1857,23 @@ export class SpannerClient { this._log.info('commit response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Rolls back a transaction, releasing any locks it holds. It is a good @@ -1845,7 +1989,23 @@ export class SpannerClient { this._log.info('rollback response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Creates a set of partition tokens that can be used to execute a query @@ -2003,7 +2163,23 @@ export class SpannerClient { this._log.info('partitionQuery response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** * Creates a set of partition tokens that can be used to execute a read @@ -2150,7 +2326,23 @@ export class SpannerClient { this._log.info('partitionRead response %j', response); return [response, options, rawResponse]; }, - ); + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); } /** diff --git a/src/v1/spanner_executor_proxy_client.ts b/src/v1/spanner_executor_proxy_client.ts index e381a7e45..8f64dbfc8 100644 --- a/src/v1/spanner_executor_proxy_client.ts +++ b/src/v1/spanner_executor_proxy_client.ts @@ -27,7 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); -import {loggingUtils as logging} from 'google-gax'; +import {loggingUtils as logging, decodeAnyProtosInArray} from 'google-gax'; /** * Client JSON configuration object, loaded from From c54657f22fd8fa22bc74ea6fcea2112c707fe60b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 4 Jul 2025 07:04:34 +0200 Subject: [PATCH 20/31] chore(deps): update dependency sinon to v21 (#2327) --- .github/scripts/package.json | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/scripts/package.json b/.github/scripts/package.json index 2c2e5207d..26ab7802a 100644 --- a/.github/scripts/package.json +++ b/.github/scripts/package.json @@ -16,6 +16,6 @@ "devDependencies": { "@octokit/rest": "^19.0.0", "mocha": "^10.0.0", - "sinon": "^18.0.0" + "sinon": "^21.0.0" } } \ No newline at end of file diff --git a/package.json b/package.json index 4496ae972..8c1cd01f5 100644 --- a/package.json +++ b/package.json @@ -126,7 +126,7 @@ "p-limit": "^3.0.1", "path-to-regexp": "^8.2.0", "proxyquire": "^2.1.3", - "sinon": "^20.0.0", + "sinon": "^21.0.0", "stats-lite": "^2.2.0", "time-span": "4.0.0", "tmp": "^0.2.3", From a381cab92c31373a6a10edca0f8a8bdfc4415e4b Mon Sep 17 00:00:00 2001 From: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Date: Fri, 4 Jul 2025 09:15:15 +0000 Subject: [PATCH 21/31] feat(spanner): add tpc support (#2333) --- src/index.ts | 74 +++++++++++++++++++++-- system-test/tpc-test.ts | 126 ++++++++++++++++++++++++++++++++++++++++ test/index.ts | 98 +++++++++++++++++++++++++++++++ 3 files changed, 294 insertions(+), 4 deletions(-) create mode 100644 system-test/tpc-test.ts diff --git a/src/index.ts b/src/index.ts index fd28a5699..a78691246 100644 --- a/src/index.ts +++ b/src/index.ts @@ -156,6 +156,17 @@ export interface SpannerOptions extends GrpcClientOptions { defaultTransactionOptions?: Pick; observabilityOptions?: ObservabilityOptions; interceptors?: any[]; + /** + * The Trusted Cloud Domain (TPC) DNS of the service used to make requests. + * Defaults to `googleapis.com`. + * We support both camelCase and snake_case for the universe domain. + * Customer may set any of these as both the options are same, + * they both points to universe endpoint. + * There is no preference for any of these option; however exception will be + * thrown if both are set to different values. + */ + universe_domain?: string; + universeDomain?: string; } export interface RequestConfig { client: string; @@ -206,6 +217,45 @@ export type TranslateEnumKeys< [P in keyof T]: P extends U ? EnumKey | null | undefined : T[P]; }; +/** + * Retrieves the universe domain. + * + * This function checks for a universe domain in the following order: + * 1. The `universeDomain` property within the provided spanner options. + * 2. The `universe_domain` property within the provided spanner options. + * 3. The `GOOGLE_CLOUD_UNIVERSE_DOMAIN` environment variable. + * 4. If none of the above properties will be set, it will fallback to `googleapis.com`. + * + * For consistency with the Auth client, if the `universe_domain` option or the + * `GOOGLE_CLOUD_UNIVERSE_DOMAIN` env variable is used, this function will also set the + * `universeDomain` property within the provided `SpannerOptions` object. This ensures the + * Spanner client's universe domain aligns with the universe configured for authentication. + * + * @param {SpannerOptions} options - The Spanner client options. + * @returns {string} The universe domain. + */ +function getUniverseDomain(options: SpannerOptions): string { + const universeDomainEnvVar = + typeof process === 'object' && typeof process.env === 'object' + ? process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] + : undefined; + const universeDomain = + options?.universeDomain ?? + options?.universe_domain ?? + universeDomainEnvVar ?? + 'googleapis.com'; + // if the options.universe_domain/GOOGLE_CLOUD_UNIVERSE_DOMAIN env variable is set, + // set its value to the Spanner `universeDomain` options + // to match it with the universe from Auth Client + if ( + !options?.universeDomain && + (options?.universe_domain || process.env.GOOGLE_CLOUD_UNIVERSE_DOMAIN) + ) { + options.universeDomain = universeDomain; + } + return universeDomain; +} + /** * [Cloud Spanner](https://cloud.google.com/spanner) is a highly scalable, * transactional, managed, NewSQL database service. Cloud Spanner solves the @@ -259,6 +309,7 @@ class Spanner extends GrpcService { directedReadOptions: google.spanner.v1.IDirectedReadOptions | null; defaultTransactionOptions: RunTransactionOptions; _observabilityOptions: ObservabilityOptions | undefined; + private _universeDomain: string; readonly _nthClientId: number; /** @@ -351,6 +402,16 @@ class Spanner extends GrpcService { }; delete options.defaultTransactionOptions; + if ( + options?.universe_domain && + options?.universeDomain && + options?.universe_domain !== options?.universeDomain + ) { + throw new Error( + 'Please set either universe_domain or universeDomain, but not both.', + ); + } + const emulatorHost = Spanner.getSpannerEmulatorHost(); if ( emulatorHost && @@ -361,12 +422,12 @@ class Spanner extends GrpcService { options.port = emulatorHost.port; options.sslCreds = grpc.credentials.createInsecure(); } + + const universeEndpoint = getUniverseDomain(options); + const spannerUniverseEndpoint = 'spanner.' + universeEndpoint; const config = { baseUrl: - options.apiEndpoint || - options.servicePath || - // TODO: for TPC, this needs to support universeDomain - 'spanner.googleapis.com', + options.apiEndpoint || options.servicePath || spannerUniverseEndpoint, protosDir: path.resolve(__dirname, '../protos'), protoServices: { Operations: { @@ -399,6 +460,11 @@ class Spanner extends GrpcService { ); ensureInitialContextManagerSet(); this._nthClientId = nextSpannerClientId(); + this._universeDomain = universeEndpoint; + } + + get universeDomain() { + return this._universeDomain; } /** diff --git a/system-test/tpc-test.ts b/system-test/tpc-test.ts new file mode 100644 index 000000000..225910c9e --- /dev/null +++ b/system-test/tpc-test.ts @@ -0,0 +1,126 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {describe, it} from 'mocha'; +import {MutationSet, Spanner} from '../src'; +import * as assert from 'assert'; + +// INSTRUCTIONS FOR RUNNING TEST: +// 1. Change describe.skip to describe.only below. +// 2. Reassign process.env.GOOGLE_APPLICATION_CREDENTIALS to local key file. +// 3. Reassign UNIVERSE_DOMAIN_CONSTANT to the universe domain to test. +// 4. Run `npm run system-test`. + +describe.skip('Universe domain tests', () => { + // These tests are only designed to pass when using the service account + // credentials for the universe domain environment so we skip them in the CI pipeline. + + before(() => { + process.env.GOOGLE_APPLICATION_CREDENTIALS = 'path to your credential file'; + }); + + async function runTest(spanner: Spanner, instanceId, databaseId) { + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + const tableName = 'VenueDetails'; + const table = database.table(tableName); + + const schema = `CREATE TABLE ${tableName} ( + VenueId INT64 NOT NULL, + VenueName STRING(100), + Capacity INT64, + ) PRIMARY KEY (VenueId)`; + + console.log(`Creating table ${table.name}`); + const [, operation] = await table.create(schema); + + await operation.promise(); + + console.log(`${table.name} create successfully.`); + + const venuesTable = database.table(tableName); + console.log(`Inserting data into the table ${table.name}`); + await venuesTable.insert([ + {VenueId: 1, VenueName: 'Marc', Capacity: 100}, + {VenueId: 2, VenueName: 'Marc', Capacity: 200}, + ]); + + const mutations = new MutationSet(); + + mutations.insert(tableName, { + VenueId: '3', + VenueName: 'Marc', + Capacity: 700, + }); + mutations.insert(tableName, { + VenueId: '4', + VenueName: 'Marc', + Capacity: 800, + }); + mutations.update(tableName, { + VenueId: '3', + VenueName: 'Marc', + Capacity: 300, + }); + mutations.update(tableName, { + VenueId: '4', + VenueName: 'Marc', + Capacity: 400, + }); + + await database.writeAtLeastOnce(mutations); + + const query = { + columns: ['VenueId', 'VenueName', 'Capacity'], + keySet: { + all: true, + }, + }; + + const [rows] = await venuesTable.read(query); + + console.log(`Inserted ${rows.length} rows into the table ${table.name}`); + + console.log(`Reading rows in the table ${table.name}`); + + rows.forEach(row => { + const json = row.toJSON(); + console.log( + `VenueId: ${json.VenueId}, VenueName: ${json.VenueName}, Capacity: ${json.Capacity}`, + ); + }); + + console.log(`deleting table ${table.name}`); + await table.delete(); + console.log(`deleted table ${table.name}`); + } + it('should be able to run apis successfully against TPC environment', async () => { + const UNIVERSE_DOMAIN_CONSTANT = 'my-universe-domain'; + const projectId = 'tpc-project-id'; + const universeDomain = UNIVERSE_DOMAIN_CONSTANT; + const options = { + projectId, + universeDomain, + }; + const spanner = new Spanner(options); + const instanceId = 'your-test-instance-id'; + const databaseId = 'your-test-database-id'; + + try { + await runTest(spanner, instanceId, databaseId); + } catch (e) { + assert.ifError(e); + } + }); +}); diff --git a/test/index.ts b/test/index.ts index bd1f9b372..5b9bddc59 100644 --- a/test/index.ts +++ b/test/index.ts @@ -434,6 +434,104 @@ describe('Spanner', () => { }); }); + describe('TPC tests', () => { + const UNIVERSE_DOMAIN_CONSTANT = 'fake-universe-domain'; + + it('should have default universe domain set to `googleapis.com`', () => { + try { + const spanner = new Spanner(); + // get default universe domain from spanner object when + // neither of univserDomain and universe_domain are set + // nor env GOOGLE_CLOUD_UNIVERSE_DOMAIN is set + assert.strictEqual(spanner.universeDomain, 'googleapis.com'); + // GoogleAuthOption's univserseDomain property must be undefined here + // as it will get configure to default value in the gax library + // please see: https://github.com/googleapis/gax-nodejs/blob/de43edd3524b7f995bd3cf5c34ddead03828b546/gax/src/grpc.ts#L431 + assert.strictEqual(spanner.options.universeDomain, undefined); + } catch (err) { + assert.ifError(err); + } + }); + + it('should optionally accept universeDomain', () => { + const fakeOption = { + universeDomain: UNIVERSE_DOMAIN_CONSTANT, + }; + + try { + const spanner = new Spanner(fakeOption); + // get universe domain from spanner object + assert.strictEqual(spanner.universeDomain, fakeOption.universeDomain); + // GoogleAuthOption's univserseDomain property must be set + // to match it with the universe from Auth Client + assert.strictEqual( + spanner.options.universeDomain, + fakeOption.universeDomain, + ); + } catch (err) { + assert.ifError(err); + } + }); + + it('should optionally accept universe_domain', () => { + const fakeOption = { + universe_domain: UNIVERSE_DOMAIN_CONSTANT, + }; + + try { + const spanner = new Spanner(fakeOption); + // get universe domain from spanner object + assert.strictEqual(spanner.universeDomain, fakeOption.universe_domain); + // GoogleAuthOption's univserseDomain property must be set + // to match it with the universe from Auth Client + assert.strictEqual( + spanner.options.universeDomain, + fakeOption.universe_domain, + ); + } catch (err) { + assert.ifError(err); + } + }); + + it('should set universe domain upon setting env GOOGLE_CLOUD_UNIVERSE_DOMAIN', () => { + process.env.GOOGLE_CLOUD_UNIVERSE_DOMAIN = UNIVERSE_DOMAIN_CONSTANT; + + try { + const spanner = new Spanner(); + // get universe domain from spanner object + assert.strictEqual(spanner.universeDomain, UNIVERSE_DOMAIN_CONSTANT); + // GoogleAuthOption's univserseDomain property must be set + // to match it with the universe from Auth Client + assert.strictEqual( + spanner.options.universeDomain, + UNIVERSE_DOMAIN_CONSTANT, + ); + } catch (err) { + assert.ifError(err); + } + delete process.env.GOOGLE_CLOUD_UNIVERSE_DOMAIN; + }); + + it('should throw an error if universe_domain and universeDomain both are set to different values', () => { + const fakeOption = { + universeDomain: 'fake-universe-domain-1', + universe_domain: 'fake-universe-domain-2', + }; + const fakeError = new Error( + 'Please set either universe_domain or universeDomain, but not both.', + ); + + try { + const spanner = new Spanner(fakeOption); + // this line should never reach client must throw an error. + throw new Error('should never reach this line'); + } catch (err) { + assert.deepStrictEqual(err, fakeError); + } + delete process.env.GOOGLE_CLOUD_UNIVERSE_DOMAIN; + }); + }); + describe('date', () => { it('should create a default SpannerDate instance', () => { const customValue = {}; From 68cc27c66c171391e26ecfee8c95a02d80c3975c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 9 Jul 2025 12:30:08 +0530 Subject: [PATCH 22/31] chore: add node 24 in node ci test (#2344) Source-Link: https://github.com/googleapis/synthtool/commit/1218bc231201438192c962136303b95f0a11a4f5 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest@sha256:66c44f0ad8f6caaa4eb3fbe74f8c2b4de5a97c2b930cee069e712c447723ba95 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/workflows/ci.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 4b14618e1..2a0311b85 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:6062c519ce78ee08490e7ac7330eca80f00f139ef1a241c5c2b306550b60c728 -# created: 2025-06-26T22:34:58.583582089Z + digest: sha256:66c44f0ad8f6caaa4eb3fbe74f8c2b4de5a97c2b930cee069e712c447723ba95 +# created: 2025-07-08T20:57:17.642848562Z diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 883082c0b..ba80cb2c9 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [18, 20, 22] + node: [18, 20, 22, 24] steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 From 646e6ea6f1dc5fa1937e512ae9e81ae4d2637ed0 Mon Sep 17 00:00:00 2001 From: Lester Szeto Date: Sat, 12 Jul 2025 08:33:17 -0700 Subject: [PATCH 23/31] Feat: Operation, Attempt, and GFE metrics (#2328) * feat: Operation and Attempt metrics Added operation and attempt metrics emissions - Operation Latency - Operation Count - Attempt Latency - Attempt Count - GFE Latency - GFE Connectivity Error Count Metrics are feature gated to the env var `SPANNER_DISABLE_BUILTIN_METRICS` being set to `true` - Metrics are disabled when emulator is used * Addressed Review Comments - Disabled exporting of metrics if already exported within last 30s - Added View definitions for bucket boundaries for histograms - Fixed issue with CreateTracer reusing previously defined operation. - Moved Client Name and UID setting to exporter - Improved tracer associations with attempts by using google request id instead of the method * Added requestId to interceptor fetching of Tracer * Addressed review comments - Added periodic tracer cleanup to MetricsTracerFactory - Changed RequestStream operation completion logic to better handle error cases * Added tests for tracer pruning * Addressed cleanup intervals and running logic * testing system test cleanup * Added better cleanup to tests - Disabled metrics for unit tests * Added Env var setting changes to owlbot to persist between generations * disable metrics for unit tests - Disabled Metrics from unit tests by setting the disable ENV var in command definition - Added MetricsTracerFactory cleanup to spanner.close() * reverted package and fixed metrics permission - Disabled metrics from inside tests instead * unref the MetricsTracerFactory Interval --------- Co-authored-by: surbhigarg92 --- observability-test/spanner.ts | 65 ++- src/index.ts | 115 ++++- src/metrics/constants.ts | 50 +- src/metrics/interceptor.ts | 79 +++ src/metrics/metrics-tracer-factory.ts | 347 ++++++++++---- src/metrics/metrics-tracer.ts | 302 ++++++++---- src/metrics/spanner-metrics-exporter.ts | 15 +- src/metrics/transform.ts | 48 +- system-test/install.ts | 2 +- system-test/spanner.ts | 8 +- test/index.ts | 26 +- test/metrics/interceptor.ts | 170 +++++++ test/metrics/metrics-tracer-factory.ts | 255 ++++++---- test/metrics/metrics-tracer.ts | 144 +++--- test/metrics/metrics.ts | 587 +++++++++++++++++++++++ test/metrics/spanner-metrics-exporter.ts | 16 +- test/metrics/transform.ts | 71 ++- test/spanner.ts | 19 +- 18 files changed, 1911 insertions(+), 408 deletions(-) create mode 100644 src/metrics/interceptor.ts create mode 100644 test/metrics/interceptor.ts create mode 100644 test/metrics/metrics.ts diff --git a/observability-test/spanner.ts b/observability-test/spanner.ts index 95e93ffb8..b11c6cf22 100644 --- a/observability-test/spanner.ts +++ b/observability-test/spanner.ts @@ -27,6 +27,7 @@ import * as mockDatabaseAdmin from '../test/mockserver/mockdatabaseadmin'; import * as sinon from 'sinon'; import {Row} from '../src/partial-result-stream'; import {END_TO_END_TRACING_HEADER} from '../src/common'; +import {MetricsTracerFactory} from '../src/metrics/metrics-tracer-factory'; const { AlwaysOnSampler, NodeTracerProvider, @@ -52,6 +53,23 @@ const {ObservabilityOptions} = require('../src/instrument'); const selectSql = 'SELECT 1'; const updateSql = 'UPDATE FOO SET BAR=1 WHERE BAZ=2'; +async function disableMetrics(sandbox?: sinon.SinonSandbox) { + if (sandbox) { + if ( + Object.prototype.hasOwnProperty.call( + process.env, + 'SPANNER_DISABLE_BUILTIN_METRICS', + ) + ) { + sandbox.replace(process.env, 'SPANNER_DISABLE_BUILTIN_METRICS', 'true'); + } else { + sandbox.define(process.env, 'SPANNER_DISABLE_BUILTIN_METRICS', 'true'); + } + } + await MetricsTracerFactory.resetInstance(); + MetricsTracerFactory.enabled = false; +} + /** A simple result set for SELECT 1. */ function createSelect1ResultSet(): protobuf.ResultSet { const fields = [ @@ -79,6 +97,7 @@ interface setupResults { async function setup( observabilityOptions?: typeof ObservabilityOptions, + sandbox?: sinon.SinonSandbox, ): Promise { const server = new grpc.Server(); @@ -109,6 +128,7 @@ async function setup( mock.StatementResult.updateCount(1), ); + await disableMetrics(sandbox); const spanner = new Spanner({ projectId: 'observability-project-id', servicePath: 'localhost', @@ -125,6 +145,7 @@ async function setup( } describe('EndToEnd', async () => { + const sandbox = sinon.createSandbox(); const contextManager = new AsyncHooksContextManager(); setGlobalContextManager(contextManager); afterEach(() => { @@ -139,10 +160,13 @@ describe('EndToEnd', async () => { spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - const setupResult = await setup({ - tracerProvider: tracerProvider, - enableExtendedTracing: false, - }); + const setupResult = await setup( + { + tracerProvider: tracerProvider, + enableExtendedTracing: false, + }, + sandbox, + ); const server = setupResult.server; const spannerMock = setupResult.spannerMock; @@ -152,6 +176,7 @@ describe('EndToEnd', async () => { after(async () => { spanner.close(); await server.tryShutdown(() => {}); + sandbox.restore(); }); afterEach(async () => { @@ -397,6 +422,16 @@ describe('EndToEnd', async () => { }); describe('ObservabilityOptions injection and propagation', async () => { + let sandbox; + + beforeEach(() => { + sandbox = sinon.createSandbox(); + }); + + afterEach(() => { + sandbox.restore(); + }); + it('Passed into Spanner, Instance and Database', async () => { const traceExporter = new InMemorySpanExporter(); const tracerProvider = new NodeTracerProvider({ @@ -410,7 +445,7 @@ describe('ObservabilityOptions injection and propagation', async () => { enableExtendedTracing: true, }; - const setupResult = await setup(observabilityOptions); + const setupResult = await setup(observabilityOptions, sandbox); const spanner = setupResult.spanner; const server = setupResult.server; const spannerMock = setupResult.spannerMock; @@ -421,6 +456,7 @@ describe('ObservabilityOptions injection and propagation', async () => { spannerMock.resetRequests(); spanner.close(); server.tryShutdown(() => {}); + sandbox.restore(); }); // Ensure that the same observability configuration is set on the Spanner client. @@ -470,7 +506,7 @@ describe('ObservabilityOptions injection and propagation', async () => { tracerProvider: tracerProvider, enableExtendedTracing: true, }; - const setupResult = await setup(observabilityOptions); + const setupResult = await setup(observabilityOptions, sandbox); const spanner = setupResult.spanner; const server = setupResult.server; const spannerMock = setupResult.spannerMock; @@ -746,7 +782,7 @@ describe('ObservabilityOptions injection and propagation', async () => { tracerProvider: injectedTracerProvider, enableExtendedTracing: true, }; - const setupResult = await setup(observabilityOptions); + const setupResult = await setup(observabilityOptions, sandbox); const spanner = setupResult.spanner; const server = setupResult.server; const spannerMock = setupResult.spannerMock; @@ -832,8 +868,10 @@ describe('E2E traces with async/await', async () => { let traceExporter: typeof InMemorySpanExporter; let provider: typeof NodeTracerProvider; let observabilityOptions: typeof ObservabilityOptions; + let sandbox; beforeEach(async () => { + sandbox = sinon.createSandbox(); traceExporter = new InMemorySpanExporter(); provider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), @@ -845,7 +883,7 @@ describe('E2E traces with async/await', async () => { tracerProvider: provider, enableExtendedTracing: true, }; - const setupResult = await setup(observabilityOptions); + const setupResult = await setup(observabilityOptions, sandbox); spanner = setupResult.spanner; server = setupResult.server; spannerMock = setupResult.spannerMock; @@ -857,6 +895,7 @@ describe('E2E traces with async/await', async () => { spannerMock.resetRequests(); spanner.close(); server.tryShutdown(() => {}); + sandbox.restore(); }); function assertAsyncAwaitExpectations() { @@ -1021,6 +1060,7 @@ describe('Negative cases', async () => { let traceExporter: typeof InMemorySpanExporter; let provider: typeof NodeTracerProvider; let observabilityOptions: typeof ObservabilityOptions; + let sandbox; const selectSql1p = 'SELECT 1p'; const messageBadSelect1p = `Missing whitespace between literal and alias [at 1:9] @@ -1032,6 +1072,7 @@ SELECT 1p 'Failed to insert row with primary key ({pk#SingerId:1}) due to previously existing row'; beforeEach(async () => { + sandbox = sinon.createSandbox(); traceExporter = new InMemorySpanExporter(); provider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), @@ -1043,7 +1084,7 @@ SELECT 1p tracerProvider: provider, enableExtendedTracing: true, }; - const setupResult = await setup(observabilityOptions); + const setupResult = await setup(observabilityOptions, sandbox); spanner = setupResult.spanner; server = setupResult.server; spannerMock = setupResult.spannerMock; @@ -1073,6 +1114,7 @@ SELECT 1p spannerMock.resetRequests(); spanner.close(); server.tryShutdown(() => {}); + sandbox.restore(); }); function assertRunBadSyntaxExpectations() { @@ -1930,17 +1972,19 @@ describe('Traces for ExecuteStream broken stream retries', () => { }); describe('End to end tracing headers', () => { + let sandbox; let server: grpc.Server; let spanner: Spanner; let spannerMock: mock.MockSpanner; let observabilityOptions: typeof ObservabilityOptions; beforeEach(async () => { + sandbox = sinon.createSandbox(); observabilityOptions = { enableEndToEndTracing: true, }; - const setupResult = await setup(observabilityOptions); + const setupResult = await setup(observabilityOptions, sandbox); spanner = setupResult.spanner; server = setupResult.server; spannerMock = setupResult.spannerMock; @@ -1950,6 +1994,7 @@ describe('End to end tracing headers', () => { spannerMock.resetRequests(); spanner.close(); server.tryShutdown(() => {}); + sandbox.restore(); }); it('run', done => { diff --git a/src/index.ts b/src/index.ts index a78691246..7bc1d14ff 100644 --- a/src/index.ts +++ b/src/index.ts @@ -94,6 +94,11 @@ import { injectRequestIDIntoError, nextSpannerClientId, } from './request_id_header'; +import {PeriodicExportingMetricReader} from '@opentelemetry/sdk-metrics'; +import {MetricInterceptor} from './metrics/interceptor'; +import {CloudMonitoringMetricsExporter} from './metrics/spanner-metrics-exporter'; +import {MetricsTracerFactory} from './metrics/metrics-tracer-factory'; +import {MetricsTracer} from './metrics/metrics-tracer'; // eslint-disable-next-line @typescript-eslint/no-var-requires const gcpApiConfig = require('./spanner_grpc_config.json'); @@ -310,6 +315,7 @@ class Spanner extends GrpcService { defaultTransactionOptions: RunTransactionOptions; _observabilityOptions: ObservabilityOptions | undefined; private _universeDomain: string; + private _isEmulatorEnabled: boolean; readonly _nthClientId: number; /** @@ -412,12 +418,14 @@ class Spanner extends GrpcService { ); } + let isEmulatorEnabled = false; const emulatorHost = Spanner.getSpannerEmulatorHost(); if ( emulatorHost && emulatorHost.endpoint && emulatorHost.endpoint.length > 0 ) { + isEmulatorEnabled = true; options.servicePath = emulatorHost.endpoint; options.port = emulatorHost.port; options.sslCreds = grpc.credentials.createInsecure(); @@ -444,6 +452,7 @@ class Spanner extends GrpcService { this.routeToLeaderEnabled = false; } + this._isEmulatorEnabled = isEmulatorEnabled; this.options = options; this.auth = new GoogleAuth(this.options); this.clients_ = new Map(); @@ -461,6 +470,7 @@ class Spanner extends GrpcService { ensureInitialContextManagerSet(); this._nthClientId = nextSpannerClientId(); this._universeDomain = universeEndpoint; + this.configureMetrics_(); } get universeDomain() { @@ -525,6 +535,9 @@ class Spanner extends GrpcService { } client.close(); }); + cleanup().catch(err => { + console.error('Error occured during cleanup: ', err); + }); } /** @@ -1573,6 +1586,25 @@ class Spanner extends GrpcService { return this.instanceConfigs_.get(key)!; } + /** + * Setup the OpenTelemetry metrics capturing for service metrics to Google Cloud Monitoring. + */ + configureMetrics_() { + const metricsEnabled = + process.env.SPANNER_DISABLE_BUILTIN_METRICS !== 'true' && + !this._isEmulatorEnabled; + MetricsTracerFactory.enabled = metricsEnabled; + if (metricsEnabled) { + const factory = MetricsTracerFactory.getInstance(this.projectId); + const periodicReader = new PeriodicExportingMetricReader({ + exporter: new CloudMonitoringMetricsExporter({auth: this.auth}), + exportIntervalMillis: 60000, + }); + // Retrieve the MeterProvider to trigger construction + factory!.getMeterProvider([periodicReader]); + } + } + /** * Prepare a gapic request. This will cache the GAX client and replace * {{projectId}} placeholders, if necessary. @@ -1635,6 +1667,10 @@ class Spanner extends GrpcService { }); // Attach the x-goog-spanner-request-id to the currently active span. attributeXGoogSpannerRequestIdToActiveSpan(config); + const interceptors: any[] = []; + if (MetricsTracerFactory.enabled) { + interceptors.push(MetricInterceptor); + } const requestFn = gaxClient[config.method].bind( gaxClient, reqOpts, @@ -1642,6 +1678,9 @@ class Spanner extends GrpcService { extend(true, {}, config.gaxOpts, { otherArgs: { headers: config.headers, + options: { + interceptors: interceptors, + }, }, }), ); @@ -1711,21 +1750,51 @@ class Spanner extends GrpcService { */ // eslint-disable-next-line @typescript-eslint/no-explicit-any request(config: any, callback?: any): any { + let metricsTracer: MetricsTracer | null = null; + if (config.client === 'SpannerClient') { + metricsTracer = + MetricsTracerFactory?.getInstance()?.createMetricsTracer( + config.method, + config.reqOpts.session ?? config.reqOpts.database, + config.headers['x-goog-spanner-request-id'], + ) ?? null; + } + metricsTracer?.recordOperationStart(); if (typeof callback === 'function') { this.prepareGapicRequest_(config, (err, requestFn) => { if (err) { callback(err); + metricsTracer?.recordOperationCompletion(); } else { - requestFn(callback); + const wrappedCallback = (...args) => { + metricsTracer?.recordOperationCompletion(); + callback(...args); + }; + requestFn(wrappedCallback); } }); } else { return new Promise((resolve, reject) => { this.prepareGapicRequest_(config, (err, requestFn) => { if (err) { + metricsTracer?.recordOperationCompletion(); reject(err); } else { - resolve(requestFn()); + const result = requestFn(); + if (result && typeof result.then === 'function') { + result + .then(val => { + metricsTracer?.recordOperationCompletion(); + resolve(val); + }) + .catch(error => { + metricsTracer?.recordOperationCompletion(); + reject(error); + }); + } else { + metricsTracer?.recordOperationCompletion(); + resolve(result); + } } }); }); @@ -1745,6 +1814,16 @@ class Spanner extends GrpcService { */ // eslint-disable-next-line @typescript-eslint/no-explicit-any requestStream(config): any { + let metricsTracer: MetricsTracer | null = null; + if (config.client === 'SpannerClient') { + metricsTracer = + MetricsTracerFactory?.getInstance()?.createMetricsTracer( + config.method, + config.reqOpts.session ?? config.reqOpts.database, + config.headers['x-goog-spanner-request-id'], + ) ?? null; + } + metricsTracer?.recordOperationStart(); const stream = streamEvents(through.obj()); stream.once('reading', () => { this.prepareGapicRequest_(config, (err, requestFn) => { @@ -1759,6 +1838,12 @@ class Spanner extends GrpcService { .pipe(stream); }); }); + stream.on('finish', () => { + stream.destroy(); + }); + stream.on('close', () => { + metricsTracer?.recordOperationCompletion(); + }); return stream; } @@ -2044,6 +2129,32 @@ class Spanner extends GrpcService { } } +let cleanupCalled = false; +const cleanup = async () => { + if (cleanupCalled) return; + cleanupCalled = true; + await MetricsTracerFactory.resetInstance(); +}; + +// For signals (let process exit naturally) +process.on('SIGINT', async () => { + await cleanup(); +}); +process.on('SIGTERM', async () => { + await cleanup(); +}); + +// For natural exit (Node will NOT wait for async, so we must block the event loop) +process.on('beforeExit', () => { + const done = cleanup(); + if (done && typeof done.then === 'function') { + // Handle promise rejection + done.catch(err => { + console.error('Cleanup error before exit:', err); + }); + } +}); + /*! Developer Documentation * * All async methods (except for streams) will return a Promise in the event diff --git a/src/metrics/constants.ts b/src/metrics/constants.ts index 068ae80b7..cd2d22fdc 100644 --- a/src/metrics/constants.ts +++ b/src/metrics/constants.ts @@ -11,15 +11,24 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +import { + View, + ExplicitBucketHistogramAggregation, +} from '@opentelemetry/sdk-metrics'; export const SPANNER_METER_NAME = 'spanner-nodejs'; export const CLIENT_METRICS_PREFIX = 'spanner.googleapis.com/internal/client'; export const SPANNER_RESOURCE_TYPE = 'spanner_instance_client'; - +// Maximum time to keep MetricsTracers before considering them stale, and stop tracking them. +export const TRACER_CLEANUP_THRESHOLD_MS = 60 * 60 * 1000; // 60 minutes +export const TRACER_CLEANUP_INTERVAL_MS = 30 * 60 * 1000; // 30 Minutes // OTel semantic conventions // See https://github.com/open-telemetry/opentelemetry-js/blob/main/semantic-conventions/README.md#unstable-semconv export const ATTR_CLOUD_REGION = 'cloud.region'; +// Minimum period that must past between metric exports +export const MIN_EXPORT_FREQUENCY_MS = 30 * 1000; + // Monitored resource labels export const MONITORED_RES_LABEL_KEY_PROJECT = 'project_id'; export const MONITORED_RES_LABEL_KEY_INSTANCE = 'instance_id'; @@ -64,3 +73,42 @@ export const METRIC_NAMES = new Set([ METRIC_NAME_ATTEMPT_COUNT, METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, ]); + +export const UNKNOWN_ATTRIBUTE = 'unknown'; + +// Histogram Bucket boundaries +export const HISTOGRAM_BUCKET_BOUNDARIES = [ + 0.0, 0.5, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, + 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 25.0, 30.0, 40.0, 50.0, 65.0, 80.0, + 100.0, 130.0, 160.0, 200.0, 250.0, 300.0, 400.0, 500.0, 650.0, 800.0, 1000.0, + 2000.0, 5000.0, 10000.0, 20000.0, 50000.0, 100000.0, 200000.0, 400000.0, + 800000.0, 1600000.0, 3200000.0, +]; + +// Defined Views for metric aggregation +export const OPERATION_LATENCY_VIEW = new View({ + instrumentName: METRIC_NAME_OPERATION_LATENCIES, + aggregation: new ExplicitBucketHistogramAggregation( + HISTOGRAM_BUCKET_BOUNDARIES, + ), +}); + +export const ATTEMPT_LATENCY_VIEW = new View({ + instrumentName: METRIC_NAME_ATTEMPT_LATENCIES, + aggregation: new ExplicitBucketHistogramAggregation( + HISTOGRAM_BUCKET_BOUNDARIES, + ), +}); + +export const GFE_LATENCY_VIEW = new View({ + instrumentName: METRIC_NAME_GFE_LATENCIES, + aggregation: new ExplicitBucketHistogramAggregation( + HISTOGRAM_BUCKET_BOUNDARIES, + ), +}); + +export const METRIC_VIEWS = [ + OPERATION_LATENCY_VIEW, + ATTEMPT_LATENCY_VIEW, + GFE_LATENCY_VIEW, +]; diff --git a/src/metrics/interceptor.ts b/src/metrics/interceptor.ts new file mode 100644 index 000000000..21ec30883 --- /dev/null +++ b/src/metrics/interceptor.ts @@ -0,0 +1,79 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {grpc} from 'google-gax'; +import {MetricsTracerFactory} from './metrics-tracer-factory'; + +/** + * Interceptor for recording metrics on gRPC calls. + * + * This interceptor records attempt metrics at the start and completion of each gRPC call, + * and also records Google Front End (GFE) metrics such as latency and connectivity errors + * based on the presence and value of the 'server-timing' header in the response metadata. + * + * @param {object} options - The gRPC call options, including method definition. + * @param {function} nextCall - The next function to call in the gRPC request chain. + * @returns {grpc.InterceptingCall} - The intercepting call with added metrics recording. + */ +export const MetricInterceptor = (options, nextCall) => { + return new grpc.InterceptingCall(nextCall(options), { + start: function (metadata, listener, next) { + // Record attempt metric on request start + const factory = MetricsTracerFactory.getInstance(); + const requestId = metadata.get('x-goog-spanner-request-id')[0] as string; + const metricsTracer = factory?.getCurrentTracer(requestId); + metricsTracer?.recordAttemptStart(); + const newListener = { + onReceiveMetadata: function (metadata, next) { + // Record GFE Metrics + // GFE latency if available, + // or else increase the GFE connectivity error count + if (metricsTracer) { + const gfeHeader = metadata.getMap()['server-timing']; + const timing = metricsTracer?.extractGfeLatency(gfeHeader); + metricsTracer.gfeLatency = timing ?? null; + } + + next(metadata); + }, + onReceiveMessage: function (message, next) { + next(message); + }, + onReceiveStatus: function (status, next) { + next(status); + + // Record attempt metric completion + metricsTracer?.recordAttemptCompletion(status.code); + if (metricsTracer?.gfeLatency) { + metricsTracer?.recordGfeLatency(status.code); + } else { + metricsTracer?.recordGfeConnectivityErrorCount(status.code); + } + }, + }; + next(metadata, newListener); + }, + sendMessage: function (message, next) { + next(message); + }, + + halfClose: function (next) { + next(); + }, + + cancel: function (next) { + next(); + }, + }); +}; diff --git a/src/metrics/metrics-tracer-factory.ts b/src/metrics/metrics-tracer-factory.ts index f8f82c9bb..2372a26be 100644 --- a/src/metrics/metrics-tracer-factory.ts +++ b/src/metrics/metrics-tracer-factory.ts @@ -16,7 +16,7 @@ import * as crypto from 'crypto'; import * as os from 'os'; import * as process from 'process'; import {v4 as uuidv4} from 'uuid'; -import {MeterProvider} from '@opentelemetry/sdk-metrics'; +import {MeterProvider, MetricReader} from '@opentelemetry/sdk-metrics'; import {Counter, Histogram} from '@opentelemetry/api'; import {detectResources, Resource} from '@opentelemetry/resources'; import {GcpDetectorSync} from '@google-cloud/opentelemetry-resource-util'; @@ -24,160 +24,320 @@ import * as Constants from './constants'; import {MetricsTracer} from './metrics-tracer'; const version = require('../../../package.json').version; +/** + * Factory class for creating and managing MetricsTracer instances and OTEL metric instruments. + * + * The MetricsTracerFactory is responsible for: + * - Creating and managing a singleton instance for metrics collection. + * - Initializing and providing OTEL MeterProvider. + * - Generating and storing client-specific metadata (UID, hash, name, location, projectId). + * - Creating and tracking MetricsTracer instances for individual gRPC Spanner operations. + * - Providing utility methods for extracting resource attributes and managing tracers. + * + * This class is designed to be used as a singleton. Metrics collection can be enabled or disabled + * globally via the static `enabled` property, that is set from the SpannerClient. + */ export class MetricsTracerFactory { private static _instance: MetricsTracerFactory | null = null; - private static _meterProvider: MeterProvider | null = null; - private _clientAttributes: {[key: string]: string}; + private _meterProvider: MeterProvider | null = null; private _instrumentAttemptCounter!: Counter; private _instrumentAttemptLatency!: Histogram; private _instrumentOperationCounter!: Counter; private _instrumentOperationLatency!: Histogram; private _instrumentGfeConnectivityErrorCount!: Counter; private _instrumentGfeLatency!: Histogram; + private _clientHash: string; + private _clientName: string; private _clientUid: string; - public enabled: boolean; - - private constructor(enabled = false) { - this.enabled = enabled; - this._createMetricInstruments(); + private _location = 'global'; + private _projectId: string; + private _currentOperationTracers = new Map(); + private _currentOperationLastUpdatedMs = new Map(); + private _intervalTracerCleanup: NodeJS.Timeout; + public static _readers: MetricReader[] = []; + public static enabled = true; + /** + * Private constructor to enforce singleton pattern. + * Initializes client metadata and detects client location if metrics are enabled. + * Location will default to global if host machine is not a GCE or GKE instance. + * @param projectId The GCP project ID used by the Spanner Client. + */ + private constructor(projectId: string) { + this._projectId = projectId; this._clientUid = MetricsTracerFactory._generateClientUId(); - this._clientAttributes = this.createClientAttributes(); - } + this._clientName = `${Constants.SPANNER_METER_NAME}/${version}`; + + // Only perform async call to retrieve location is metrics are enabled. + if (MetricsTracerFactory.enabled) { + (async () => { + const location = await MetricsTracerFactory._detectClientLocation(); + this._location = location.length > 0 ? location : 'global'; + })().catch(error => { + throw error; + }); + } - private createClientAttributes(): {[key: string]: string} { - const clientName = `${Constants.SPANNER_METER_NAME}/${version}`; - return { - [Constants.METRIC_LABEL_KEY_CLIENT_NAME]: clientName, - [Constants.METRIC_LABEL_KEY_CLIENT_UID]: this._clientUid, - }; + this._clientHash = MetricsTracerFactory._generateClientHash( + this._clientUid, + ); + + // Start the Tracer cleanup task at an interval + this._intervalTracerCleanup = setInterval( + this._cleanMetricsTracers.bind(this), + Constants.TRACER_CLEANUP_INTERVAL_MS, + ); + // unref the interval to prevent it from blocking app termination + // in the event loop + this._intervalTracerCleanup.unref(); } /** - Create set of attributes for resource metrics + * Returns the singleton instance of MetricsTracerFactory. + * If metrics are disabled, returns null. + * The instance is created only once, and enabling/disabling metrics can only be done on the initial call. + * @param projectId Optional GCP project ID for the factory instantiation. Does nothing for subsequent calls. + * @returns The singleton MetricsTracerFactory instance or null if disabled. */ - public async createResourceAttributes( - projectId: string, - ): Promise<{[key: string]: string}> { - const clientHash = MetricsTracerFactory._generateClientHash( - this._clientUid, - ); - const location = await MetricsTracerFactory._detectClientLocation(); - return { - [Constants.MONITORED_RES_LABEL_KEY_PROJECT]: projectId, - [Constants.MONITORED_RES_LABEL_KEY_INSTANCE]: 'unknown', - [Constants.MONITORED_RES_LABEL_KEY_CLIENT_HASH]: clientHash, - // Skipping instance config until we have a way to get it - [Constants.MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG]: 'unknown', - [Constants.MONITORED_RES_LABEL_KEY_LOCATION]: location, - }; - } + public static getInstance(projectId = ''): MetricsTracerFactory | null { + if (!MetricsTracerFactory.enabled) { + return null; + } - public static getInstance(enabled: boolean) { // Create a singleton instance, enabling/disabling metrics can only be done on the initial call if (MetricsTracerFactory._instance === null) { - MetricsTracerFactory._instance = new MetricsTracerFactory(enabled); + MetricsTracerFactory._instance = new MetricsTracerFactory(projectId); } - return MetricsTracerFactory._instance; + return MetricsTracerFactory!._instance; } - public static getMeterProvider( - resourceAttributes: {[key: string]: string} = {}, - ): MeterProvider { - if (MetricsTracerFactory._meterProvider === null) { - const resource = new Resource(resourceAttributes); - MetricsTracerFactory._meterProvider = new MeterProvider({ + /** + * Returns the MeterProvider, creating it and metric instruments if not already initialized. + * Client-wide attributes that are known at this time are cached to be provided to all MetricsTracers. + * @param readers Optional array of MetricReader instances to attach to the MeterProvider. + * @returns The OTEL MeterProvider instance. + */ + public getMeterProvider(readers: MetricReader[] = []): MeterProvider { + if (this._meterProvider === null) { + const resource = new Resource({ + [Constants.MONITORED_RES_LABEL_KEY_PROJECT]: this._projectId, + [Constants.MONITORED_RES_LABEL_KEY_CLIENT_HASH]: this._clientHash, + [Constants.MONITORED_RES_LABEL_KEY_LOCATION]: this._location, + [Constants.MONITORED_RES_LABEL_KEY_INSTANCE]: 'unknown', + [Constants.MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG]: 'unknown', + }); + MetricsTracerFactory._readers = readers; + this._meterProvider = new MeterProvider({ resource: resource, + readers: readers, + views: Constants.METRIC_VIEWS, }); + this._createMetricInstruments(); } + return this._meterProvider; + } - return MetricsTracerFactory._meterProvider; + /** + * Resets the singleton instance of the MetricsTracerFactory. + */ + public static async resetInstance() { + clearInterval(MetricsTracerFactory._instance?._intervalTracerCleanup); + await MetricsTracerFactory._instance?.resetMeterProvider(); + MetricsTracerFactory._instance = null; } - public static resetMeterProvider() { - MetricsTracerFactory._meterProvider = null; + /** + * Resets the MeterProvider. + */ + public async resetMeterProvider() { + if (this._meterProvider !== null) { + await this._meterProvider!.shutdown(); + } + this._meterProvider = null; + this._currentOperationTracers = new Map(); + this._currentOperationLastUpdatedMs = new Map(); } + /** + * Returns the attempt latency histogram instrument. + */ get instrumentAttemptLatency(): Histogram { return this._instrumentAttemptLatency; } + /** + * Returns the attempt counter instrument. + */ get instrumentAttemptCounter(): Counter { return this._instrumentAttemptCounter; } + /** + * Returns the operation latency histogram instrument. + */ get instrumentOperationLatency(): Histogram { return this._instrumentOperationLatency; } + /** + * Returns the operation counter instrument. + */ get instrumentOperationCounter(): Counter { return this._instrumentOperationCounter; } + /** + * Returns the GFE connectivity error count counter instrument. + */ get instrumentGfeConnectivityErrorCount(): Counter { return this._instrumentGfeConnectivityErrorCount; } + /** + * Returns the GFE latency histogram instrument. + */ get instrumentGfeLatency(): Histogram { return this._instrumentGfeLatency; } - get clientAttributes(): Record { - return this._clientAttributes; + /** + * Returns the Client UID. + */ + get clientUid(): string { + return this._clientUid; } - set project(project: string) { - this._clientAttributes[Constants.MONITORED_RES_LABEL_KEY_PROJECT] = project; + /** + * Returns the Client Name. + */ + get clientName(): string { + return this._clientName; } - set instance(instance: string) { - this._clientAttributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE] = - instance; - } + /** + * Creates a new MetricsTracer for a given resource name and method, and stores it for later retrieval. + * Returns null if metrics are disabled. + * @param formattedName The formatted resource name (e.g., full database path). + * @param method The gRPC method name. + * @returns A new MetricsTracer instance or null if metrics are disabled. + */ + public createMetricsTracer( + method: string, + formattedName: string, + requestId: string, + ): MetricsTracer | null { + if (!MetricsTracerFactory.enabled) { + return null; + } + const operationRequest = this._extractOperationRequest(requestId); - set instanceConfig(instanceConfig: string) { - this._clientAttributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG] = - instanceConfig; - } + if (this._currentOperationTracers.has(operationRequest)) { + return this._currentOperationTracers.get(operationRequest); + } - set location(location: string) { - this._clientAttributes[Constants.MONITORED_RES_LABEL_KEY_LOCATION] = - location; + const {instance, database} = this.getInstanceAttributes(formattedName); + const tracer = new MetricsTracer( + this._instrumentAttemptCounter, + this._instrumentAttemptLatency, + this._instrumentOperationCounter, + this._instrumentOperationLatency, + this._instrumentGfeConnectivityErrorCount, + this._instrumentGfeLatency, + MetricsTracerFactory.enabled, + database, + instance, + method, + operationRequest, + ); + this._currentOperationTracers.set(operationRequest, tracer); + this._currentOperationLastUpdatedMs.set(operationRequest, Date.now()); + return tracer; } - set clientHash(hash: string) { - this._clientAttributes[Constants.MONITORED_RES_LABEL_KEY_CLIENT_HASH] = - hash; + /** + * Takes a formatted name and parses the project, instance, and database. + * @param formattedName The formatted resource name (e.g., full database path). + * @returns An object containing project, instance, and database strings. + */ + public getInstanceAttributes(formattedName: string) { + if (typeof formattedName !== 'string' || formattedName === '') { + return { + project: Constants.UNKNOWN_ATTRIBUTE, + instance: Constants.UNKNOWN_ATTRIBUTE, + database: Constants.UNKNOWN_ATTRIBUTE, + }; + } + const regex = + /projects\/(?[^/]+)\/instances\/(?[^/]+)(?:\/databases\/(?[^/]+))?/; + const match = formattedName.match(regex); + const project = match?.groups?.projectId || Constants.UNKNOWN_ATTRIBUTE; + const instance = match?.groups?.instanceId || Constants.UNKNOWN_ATTRIBUTE; + const database = match?.groups?.databaseId || Constants.UNKNOWN_ATTRIBUTE; + return {project: project, instance: instance, database: database}; } - set clientUid(clientUid: string) { - this._clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_UID] = clientUid; - } + /** + * Retrieves the current MetricsTracer for a given request id. + * Returns null if no tracer exists for the request. + * Does not implicitly create MetricsTracers as that should be done + * explicitly using the createMetricsTracer function. + * request id is expected to be as set in the gRPC metadata. + * @param requestId The request id of the gRPC call set under 'x-goog-spanner-request-id'. + * @returns The MetricsTracer instance or null if not found. + */ + public getCurrentTracer(requestId: string): MetricsTracer | null { + const operationRequest: string = this._extractOperationRequest(requestId); + if (!this._currentOperationTracers.has(operationRequest)) { + // Attempting to retrieve tracer that doesn't exist. + return null; + } + this._currentOperationLastUpdatedMs.set(operationRequest, Date.now()); - set clientName(clientName: string) { - this._clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_NAME] = clientName; + return this._currentOperationTracers.get(operationRequest) ?? null; } - set database(database: string) { - this._clientAttributes[Constants.METRIC_LABEL_KEY_DATABASE] = database; + /** + * Removes the MetricsTracer associated with the given request id. + * @param requestId The request id of the gRPC call set under 'x-goog-spanner-request-id'. + */ + public clearCurrentTracer(requestId: string) { + const operationRequest = this._extractOperationRequest(requestId); + if (!this._currentOperationTracers.has(operationRequest)) { + return; + } + this._currentOperationTracers.delete(operationRequest); + this._currentOperationLastUpdatedMs.delete(operationRequest); } - public createMetricsTracer(): MetricsTracer { - return new MetricsTracer( - this._clientAttributes, - this._instrumentAttemptCounter, - this._instrumentAttemptLatency, - this._instrumentOperationCounter, - this._instrumentOperationLatency, - this._instrumentGfeConnectivityErrorCount, - this._instrumentGfeLatency, - this.enabled, - ); + private _extractOperationRequest(requestId: string): string { + if (!requestId) { + return ''; + } + + const regex = /^(\d+\.[a-z0-9]+\.\d+\.\d+\.\d+)\.\d+$/i; + const match = requestId.match(regex); + + if (!match) { + return ''; + } + + const request = match[1]; + return request; } + /** + * Creates and initializes all metric instruments (counters and histograms) for the MeterProvider. + * Instruments are only created if metrics are enabled. + */ private _createMetricInstruments() { - const meterProvider = MetricsTracerFactory.getMeterProvider(); - const meter = meterProvider.getMeter(Constants.SPANNER_METER_NAME, version); + if (!MetricsTracerFactory.enabled) { + return; + } + + const meter = this.getMeterProvider().getMeter( + Constants.SPANNER_METER_NAME, + version, + ); this._instrumentAttemptLatency = meter.createHistogram( Constants.METRIC_NAME_ATTEMPT_LATENCIES, @@ -225,6 +385,7 @@ export class MetricsTracerFactory { /** * Generates a unique identifier for the client_uid metric field. The identifier is composed of a * UUID, the process ID (PID), and the hostname. + * @returns A unique string identifier for the client. */ private static _generateClientUId(): string { const identifier = uuidv4(); @@ -249,6 +410,8 @@ export class MetricsTracerFactory { * enough to keep the cardinality of the Resource targets under control. Note: If at later time * the range needs to be increased, it can be done by increasing the value of `kPrefixLength` to * up to 24 bits without changing the format of the returned value. + * @param clientUid The client UID string to hash. + * @returns A 6-digit hexadecimal hash string. */ private static _generateClientHash(clientUid: string): string { if (clientUid === null || clientUid === undefined) { @@ -267,6 +430,8 @@ export class MetricsTracerFactory { /** * Gets the location (region) of the client, otherwise returns to the "global" region. + * Uses GcpDetectorSync to detect the region from the environment. + * @returns The detected region string, or "global" if not found. */ private static async _detectClientLocation(): Promise { const defaultRegion = 'global'; @@ -286,4 +451,20 @@ export class MetricsTracerFactory { } return defaultRegion; } + + private _cleanMetricsTracers() { + if (this._currentOperationLastUpdatedMs.size === 0) { + return; + } + + for (const [ + operationTracer, + lastUpdated, + ] of this._currentOperationLastUpdatedMs.entries()) { + if (Date.now() - lastUpdated >= Constants.TRACER_CLEANUP_THRESHOLD_MS) { + this._currentOperationTracers.delete(operationTracer); + this._currentOperationLastUpdatedMs.delete(operationTracer); + } + } + } } diff --git a/src/metrics/metrics-tracer.ts b/src/metrics/metrics-tracer.ts index ac1e92a35..61749e59c 100644 --- a/src/metrics/metrics-tracer.ts +++ b/src/metrics/metrics-tracer.ts @@ -14,234 +14,330 @@ import {status as Status} from '@grpc/grpc-js'; import {Counter, Histogram} from '@opentelemetry/api'; +import {MetricsTracerFactory} from './metrics-tracer-factory'; import { - METRIC_LABEL_KEY_CLIENT_NAME, - METRIC_LABEL_KEY_CLIENT_UID, METRIC_LABEL_KEY_DATABASE, METRIC_LABEL_KEY_METHOD, METRIC_LABEL_KEY_STATUS, - MONITORED_RES_LABEL_KEY_CLIENT_HASH, MONITORED_RES_LABEL_KEY_INSTANCE, - MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG, - MONITORED_RES_LABEL_KEY_LOCATION, - MONITORED_RES_LABEL_KEY_PROJECT, } from './constants'; +/** + * MetricAttemptTracer tracks the start time and status of a single gRPC attempt. + * + * This class is used to record the timestamp when an attempt begins and to store + * the status code of the attempt upon completion. It is to be used + * by MetricsTracer to monitor and report metrics for each individual gRPC call attempt. + */ class MetricAttemptTracer { private _startTime: Date; - public status: number; + public status: string; constructor() { this._startTime = new Date(Date.now()); - this.status = -1; + this.status = Status[Status.UNKNOWN]; } + /** + * Returns the start time of the attempt. + */ get startTime() { return this._startTime; } } +/** + * MetricOperationTracer tracks the lifecycle and metadata of a single gRPC spanner operation, + * which may consist of multiple attempts. + * + * This class is responsible for: + * - Recording the start time of the operation. + * - Tracking the number of attempts made for the operation. + * - Holding a reference to the current attempt's tracer (MetricAttemptTracer). + * - Storing the final status code of the operation. + * + * Usage: + * - Call `start()` to reset the operation's start time. + * - Call `createNewAttempt()` to begin tracking a new attempt within the operation. + * - Access `currentAttempt` to retrieve the current MetricAttemptTracer instance. + * - Access `attemptCount` to get the number of attempts made so far. + * - Access `startTime` to get the operation's start time. + * - Set or read `status` to track the operation's final status code. + */ class MetricOperationTracer { private _attemptCount: number; private _startTime: Date; - private _currentAttempt; - public status: number; + private _currentAttempt: MetricAttemptTracer | null; constructor() { this._attemptCount = 0; this._startTime = new Date(Date.now()); this._currentAttempt = null; - this.status = -1; } + /** + * Returns the number of attempts made for this operation. + */ get attemptCount() { return this._attemptCount; } + /** + * Returns the current MetricAttemptTracer instance for the ongoing attempt. + */ get currentAttempt() { return this._currentAttempt; } + /** + * Returns the start time of the operation. + */ get startTime() { return this._startTime; } - public start() { - this._startTime = new Date(Date.now()); - } - + /** + * Increments the attempt count and creates a new MetricAttemptTracer + * for tracking the next attempt within this operation. + */ public createNewAttempt() { this._attemptCount += 1; this._currentAttempt = new MetricAttemptTracer(); } } +/** + * MetricsTracer is responsible for recording and managing metrics related to + * gRPC Spanner operations and attempts counters, and latencies, + * as well as Google Front End (GFE) metrics such as latency and connectivity errors. + * + * This class provides methods to record the start and completion of operations + * and attempts, extract GFE latency from response headers. + * It also handles setting of required Spanner metric attributes to + * be later consumed by the SpannerMetricsExporter. + */ export class MetricsTracer { - public currentOperation: MetricOperationTracer = new MetricOperationTracer(); - + /** + * The current MetricOperationTracer instance tracking the ongoing operation. + */ + public currentOperation: MetricOperationTracer | null = null; + + /** + * Stores client and resource attributes for labeling metrics. + */ + private _clientAttributes: {[key: string]: string} = {}; + + /* + * The current GFE latency associated with this tracer. + */ + public gfeLatency: number | null = null; + /** + * Constructs a new MetricsTracer. + * + * @param _instrumentAttemptCounter Counter for attempt count metrics. + * @param _instrumentAttemptLatency Histogram for attempt latency metrics. + * @param _instrumentOperationCounter Counter for operation count metrics. + * @param _instrumentOperationLatency Histogram for operation latency metrics. + * @param _instrumentGfeConnectivityErrorCount Counter for GFE connectivity errors. + * @param _instrumentGfeLatency Histogram for GFE latency metrics. + * @param enabled Whether metrics recording is enabled. + */ constructor( - private _clientAttributes: {[key: string]: string}, - private _instrumentAttemptCounter: Counter, - private _instrumentAttemptLatency: Histogram, - private _instrumentOperationCounter: Counter, - private _instrumentOperationLatency: Histogram, - private _instrumentGfeConnectivityErrorCount: Counter, - private _instrumentGfeLatency: Histogram, + private _instrumentAttemptCounter: Counter | null, + private _instrumentAttemptLatency: Histogram | null, + private _instrumentOperationCounter: Counter | null, + private _instrumentOperationLatency: Histogram | null, + private _instrumentGfeConnectivityErrorCount: Counter | null, + private _instrumentGfeLatency: Histogram | null, public enabled: boolean, - ) {} + private _database: string, + private _instance: string, + private _methodName: string, + private _request: string, + ) { + this._clientAttributes[METRIC_LABEL_KEY_DATABASE] = _database; + this._clientAttributes[METRIC_LABEL_KEY_METHOD] = _methodName; + this._clientAttributes[MONITORED_RES_LABEL_KEY_INSTANCE] = _instance; + } + /** + * Returns the difference in milliseconds between two Date objects. + * @param start The start time. + * @param end The end time. + * @returns The time difference in milliseconds. + */ private _getMillisecondTimeDifference(start: Date, end: Date): number { return end.valueOf() - start.valueOf(); } + /** + * Gets the current client and resource attributes for metrics. + */ get clientAttributes() { return this._clientAttributes; } + /** + * Gets the attempt counter OTEL instrument. + */ get instrumentAttemptCounter() { return this._instrumentAttemptCounter; } + /** + * Gets the attempt latency histogram OTEL instrument. + */ get instrumentAttemptLatency() { return this._instrumentAttemptLatency; } + /** + * Gets the operation counter OTEL instrument. + */ get instrumentOperationCounter() { return this._instrumentOperationCounter; } + /** + * Gets the operation latency histogram OTEL instrument. + */ get instrumentOperationLatency() { return this._instrumentOperationLatency; } + /** + * Records the start of a new attempt within the current operation. + * Increments the attempt count and creates a new MetricAttemptTracer. + */ public recordAttemptStart() { if (!this.enabled) return; - this.currentOperation.createNewAttempt(); + this.currentOperation!.createNewAttempt(); } - public recordAttemptCompletion(status: number = Status.OK) { + /** + * Records the completion of the current attempt, including its status and latency. + * These statuses code are defined in grpc.status + * @param status The status code of the attempt (default: Status.OK). + */ + public recordAttemptCompletion(statusCode: Status = Status.OK) { if (!this.enabled) return; - this.currentOperation.currentAttempt.status = status; + this.currentOperation!.currentAttempt!.status = Status[statusCode]; const attemptAttributes = this._createAttemptOtelAttributes(); const endTime = new Date(Date.now()); const attemptLatencyMilliseconds = this._getMillisecondTimeDifference( - this.currentOperation.currentAttempt.startTime, + this.currentOperation!.currentAttempt!.startTime, endTime, ); - this.instrumentAttemptLatency.record( + this.instrumentAttemptLatency?.record( attemptLatencyMilliseconds, attemptAttributes, ); + + this.instrumentAttemptCounter?.add(1, attemptAttributes); } + /** + * Records the start of a new operation, resetting the operation tracer and start time. + */ public recordOperationStart() { if (!this.enabled) return; + if (this.currentOperation !== null) { + return; // Don't re-start an already started operation + } this.currentOperation = new MetricOperationTracer(); - this.currentOperation.start(); } + /** + * Records the completion of the current operation, including its status, + * latency, and attempt count. Also clears the current tracer from the factory. + */ public recordOperationCompletion() { - if (!this.enabled) return; + if (!this.enabled || !this.currentOperation) return; const endTime = new Date(Date.now()); const operationAttributes = this._createOperationOtelAttributes(); - const attemptAttributes = this._createAttemptOtelAttributes(); const operationLatencyMilliseconds = this._getMillisecondTimeDifference( - this.currentOperation.startTime, + this.currentOperation!.startTime, endTime, ); - this.instrumentOperationCounter.add(1, operationAttributes); - this.instrumentOperationLatency.record( + this.instrumentOperationCounter?.add(1, operationAttributes); + this.instrumentOperationLatency?.record( operationLatencyMilliseconds, operationAttributes, ); - this.instrumentAttemptCounter.add( - this.currentOperation.attemptCount, - attemptAttributes, - ); + MetricsTracerFactory.getInstance()!.clearCurrentTracer(this._request); } - public recordGfeLatency(latency: number) { + /** + * Extracts the GFE latency value (in milliseconds) from a 'server-timing' header string. + * Returns null if the header is missing or does not contain a valid latency value. + * + * @param header The 'server-timing' header string. + * @returns The extracted GFE latency in milliseconds, or null if not found. + */ + public extractGfeLatency(header: string): number | null { + const regex = /gfet4t7; dur=([0-9]+).*/; + if (header === undefined) return null; + const match = header.match(regex); + if (!match) return null; + return Number(match[1]); + } + + /** + * Records the provided GFE latency. + * @param latency The GFE latency in milliseconds. + */ + public recordGfeLatency(statusCode: Status) { if (!this.enabled) return; - this._instrumentGfeLatency.record(latency, this.clientAttributes); + if (!this.gfeLatency) { + console.error( + 'ERROR: Attempted to record GFE metric with no latency value.', + ); + return; + } + + const attributes = {...this._clientAttributes}; + attributes[METRIC_LABEL_KEY_STATUS] = Status[statusCode]; + + this._instrumentGfeLatency?.record(this.gfeLatency, attributes); + this.gfeLatency = null; // Reset latency value } - public recordGfeConnectivityErrorCount() { + /** + * Increments the GFE connectivity error count metric. + */ + public recordGfeConnectivityErrorCount(statusCode: Status) { if (!this.enabled) return; - this._instrumentGfeConnectivityErrorCount.add(1, this.clientAttributes); + const attributes = {...this._clientAttributes}; + attributes[METRIC_LABEL_KEY_STATUS] = Status[statusCode]; + this._instrumentGfeConnectivityErrorCount?.add(1, attributes); } + /** + * Creates and returns a set of OTEL attributes for operation-level metrics. + * @returns The operation attributes object. + */ private _createOperationOtelAttributes() { if (!this.enabled) return {}; const attributes = {...this._clientAttributes}; attributes[METRIC_LABEL_KEY_STATUS] = - this.currentOperation.status.toString(); - + this.currentOperation!.currentAttempt?.status ?? Status[Status.UNKNOWN]; return attributes; } + /** + * Creates and returns a set of OTEL attributes for attempt-level metrics. + * The overall operation status is set at this time based on the last + * attempt's status. + * @returns The attempt attributes object. + */ private _createAttemptOtelAttributes() { if (!this.enabled) return {}; const attributes = {...this._clientAttributes}; - if (this.currentOperation.currentAttempt === null) return attributes; + if (this.currentOperation?.currentAttempt === null) return attributes; attributes[METRIC_LABEL_KEY_STATUS] = - this.currentOperation.currentAttempt.status.toString(); + this.currentOperation!.currentAttempt.status; return attributes; } - - set project(project: string) { - if (!(MONITORED_RES_LABEL_KEY_PROJECT in this._clientAttributes)) { - this._clientAttributes[MONITORED_RES_LABEL_KEY_PROJECT] = project; - } - } - - set instance(instance: string) { - if (!(MONITORED_RES_LABEL_KEY_INSTANCE in this._clientAttributes)) { - this._clientAttributes[MONITORED_RES_LABEL_KEY_INSTANCE] = instance; - } - } - - set instanceConfig(instanceConfig: string) { - if (!(MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG in this._clientAttributes)) { - this._clientAttributes[MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG] = - instanceConfig; - } - } - - set location(location: string) { - if (!(MONITORED_RES_LABEL_KEY_LOCATION in this._clientAttributes)) { - this._clientAttributes[MONITORED_RES_LABEL_KEY_LOCATION] = location; - } - } - - set clientHash(clientHash: string) { - if (!(MONITORED_RES_LABEL_KEY_CLIENT_HASH in this._clientAttributes)) { - this._clientAttributes[MONITORED_RES_LABEL_KEY_CLIENT_HASH] = clientHash; - } - } - - set clientUid(clientUid: string) { - if (!(METRIC_LABEL_KEY_CLIENT_UID in this._clientAttributes)) { - this._clientAttributes[METRIC_LABEL_KEY_CLIENT_UID] = clientUid; - } - } - - set clientName(clientName: string) { - if (!(METRIC_LABEL_KEY_CLIENT_NAME in this._clientAttributes)) { - this._clientAttributes[METRIC_LABEL_KEY_CLIENT_NAME] = clientName; - } - } - - set database(database: string) { - if (!(METRIC_LABEL_KEY_DATABASE in this._clientAttributes)) { - this._clientAttributes[METRIC_LABEL_KEY_DATABASE] = database; - } - } - - set methodName(methodName: string) { - if (!(METRIC_LABEL_KEY_METHOD in this._clientAttributes)) { - this._clientAttributes[METRIC_LABEL_KEY_METHOD] = methodName; - } - } } diff --git a/src/metrics/spanner-metrics-exporter.ts b/src/metrics/spanner-metrics-exporter.ts index 98d291933..8846a5782 100644 --- a/src/metrics/spanner-metrics-exporter.ts +++ b/src/metrics/spanner-metrics-exporter.ts @@ -18,6 +18,7 @@ import {ExporterOptions} from './external-types'; import {MetricServiceClient} from '@google-cloud/monitoring'; import {transformResourceMetricToTimeSeriesArray} from './transform'; import {status} from '@grpc/grpc-js'; +import {MIN_EXPORT_FREQUENCY_MS} from './constants'; // Stackdriver Monitoring v3 only accepts up to 200 TimeSeries per // CreateTimeSeries call. @@ -28,7 +29,7 @@ export const MAX_BATCH_EXPORT_SIZE = 200; */ export class CloudMonitoringMetricsExporter implements PushMetricExporter { private _projectId: string | void | Promise; - + private _lastExported: Date = new Date(0); private readonly _client: MetricServiceClient; constructor({auth}: ExporterOptions) { @@ -46,13 +47,23 @@ export class CloudMonitoringMetricsExporter implements PushMetricExporter { * Calls the async wrapper method {@link _exportAsync} and * assures no rejected promises bubble up to the caller. * - * @param metrics Metrics to be sent to the Google Cloud Monitoring backend + * @param metrics Metrics to be sent to the Google Cloud 3Monitoring backend * @param resultCallback result callback to be called on finish */ export( metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void, ): void { + // Do not export metrics if we've already exported within the last 30s + const now = new Date(); + if ( + now.getTime() - this._lastExported.getTime() <= + MIN_EXPORT_FREQUENCY_MS + ) { + return; + } + + this._lastExported = now; this._exportAsync(metrics).then(resultCallback, err => { console.error(err.message); resultCallback({code: ExportResultCode.FAILED, error: err}); diff --git a/src/metrics/transform.ts b/src/metrics/transform.ts index 4cba27f23..865ffff3e 100644 --- a/src/metrics/transform.ts +++ b/src/metrics/transform.ts @@ -20,6 +20,7 @@ import { ExponentialHistogram, ResourceMetrics, } from '@opentelemetry/sdk-metrics'; +import {Resource} from '@opentelemetry/resources'; import {MonitoredResource} from '@google-cloud/opentelemetry-resource-util'; import * as path from 'path'; import {MetricKind, ValueType} from './external-types'; @@ -30,7 +31,11 @@ import { METRIC_LABELS, MONITORED_RESOURCE_LABELS, METRIC_NAMES, + METRIC_LABEL_KEY_CLIENT_UID, + METRIC_LABEL_KEY_CLIENT_NAME, + UNKNOWN_ATTRIBUTE, } from './constants'; +import {MetricsTracerFactory} from './metrics-tracer-factory'; /** Transforms a OpenTelemetry instrument type to a GCM MetricKind. */ function _transformMetricKind(metric: MetricData): MetricKind { @@ -83,9 +88,11 @@ function _transformValueType(metric: MetricData): ValueType { /** * Convert the metrics data to a list of Google Cloud Monitoring time series. */ -export function transformResourceMetricToTimeSeriesArray({ - scopeMetrics, -}: ResourceMetrics) { +export function transformResourceMetricToTimeSeriesArray( + resourceMetrics: ResourceMetrics, +) { + const resource = resourceMetrics?.resource; + const scopeMetrics = resourceMetrics?.scopeMetrics; if (!scopeMetrics) return []; return ( @@ -100,18 +107,37 @@ export function transformResourceMetricToTimeSeriesArray({ // Flatmap the data points in each metric to create a TimeSeries for each point .flatMap(metric => metric.dataPoints.flatMap(dataPoint => - _createTimeSeries(metric, dataPoint), + _createTimeSeries(metric, dataPoint, resource), ), ) ); } + /** * Creates a GCM TimeSeries. */ -function _createTimeSeries(metric: MetricData, dataPoint: DataPoint) { +function _createTimeSeries( + metric: MetricData, + dataPoint: DataPoint, + resource?: Resource, +) { const type = path.posix.join(CLIENT_METRICS_PREFIX, metric.descriptor.name); - const {metricLabels: labels, monitoredResourceLabels} = - _extractLabels(dataPoint); + const resourceLabels = resource + ? _extractLabels(resource) + : {metricLabels: {}, monitoredResourceLabels: {}}; + + const dataLabels = _extractLabels(dataPoint); + + const labels = { + ...resourceLabels.metricLabels, + ...dataLabels.metricLabels, + }; + + const monitoredResourceLabels = { + ...resourceLabels.monitoredResourceLabels, + ...dataLabels.monitoredResourceLabels, + }; + const transformedMetric = { type, labels, @@ -179,7 +205,13 @@ function _transformPoint(metric: MetricData, dataPoint: DataPoint) { } /** Extracts metric and monitored resource labels from data point */ -function _extractLabels({attributes = {}}: DataPoint) { +function _extractLabels({attributes = {}}: DataPoint | Resource) { + const factory = MetricsTracerFactory.getInstance(); + // Add Client name and Client UID metric labels + attributes[METRIC_LABEL_KEY_CLIENT_UID] = + factory?.clientUid ?? UNKNOWN_ATTRIBUTE; + attributes[METRIC_LABEL_KEY_CLIENT_NAME] = + factory?.clientName ?? UNKNOWN_ATTRIBUTE; return Object.entries(attributes).reduce( (result, [key, value]) => { const normalizedKey = _normalizeLabelKey(key); diff --git a/system-test/install.ts b/system-test/install.ts index 56c82f7c5..85d721144 100644 --- a/system-test/install.ts +++ b/system-test/install.ts @@ -50,7 +50,7 @@ describe('📦 pack and install', () => { /** * CLEAN UP - remove the staging directory when done. */ - after('cleanup staging', () => { + after('cleanup staging', async () => { if (!keep) { stagingDir.removeCallback(); } diff --git a/system-test/spanner.ts b/system-test/spanner.ts index 219b72eb2..d8125695b 100644 --- a/system-test/spanner.ts +++ b/system-test/spanner.ts @@ -53,6 +53,7 @@ import CreateInstanceConfigMetadata = google.spanner.admin.instance.v1.CreateIns const singer = require('../test/data/singer'); const music = singer.examples.spanner.music; import {util} from 'protobufjs'; +import {MetricsTracerFactory} from '../src/metrics/metrics-tracer-factory'; import Long = util.Long; import { CreateQueryPartitionsResponse, @@ -231,6 +232,7 @@ describe('Spanner', () => { } before(async () => { + await MetricsTracerFactory.resetInstance(); await deleteOldTestInstances(); if (generateInstanceForTest) { await createInstance(instanceId!); @@ -267,6 +269,7 @@ describe('Spanner', () => { }); after(async () => { + await MetricsTracerFactory.resetInstance(); try { if (generateInstanceForTest) { // Sleep for 30 seconds before cleanup, just in case @@ -300,6 +303,7 @@ describe('Spanner', () => { ), ); } + await MetricsTracerFactory.resetInstance(); } catch (err) { console.error('Cleanup failed:', err); } @@ -3021,7 +3025,7 @@ describe('Spanner', () => { CustomerId INT64 NOT NULL, CustomerName STRING(62) NOT NULL, CONSTRAINT FKShoppingCartsCustomerId FOREIGN KEY (CustomerId) - REFERENCES Customers (CustomerId) ON DELETE CASCADE, + REFERENCES Customers (CustomerId) ON DELETE CASCADE, ) PRIMARY KEY (CartId)`, ]; const fkadc_pg_schema = [ @@ -7116,7 +7120,7 @@ describe('Spanner', () => { const postgreSqlCreateTable = await postgreSqlTable.create( ` CREATE TABLE ${TABLE_NAME} ( - "Key" VARCHAR NOT NULL PRIMARY KEY, + "Key" VARCHAR NOT NULL PRIMARY KEY, "StringValue" VARCHAR )`, GAX_OPTIONS, diff --git a/test/index.ts b/test/index.ts index 5b9bddc59..3159fa10c 100644 --- a/test/index.ts +++ b/test/index.ts @@ -38,6 +38,7 @@ import { GetInstancesOptions, } from '../src'; import {CLOUD_RESOURCE_HEADER} from '../src/common'; +import {MetricsTracerFactory} from '../src/metrics/metrics-tracer-factory'; import IsolationLevel = protos.google.spanner.v1.TransactionOptions.IsolationLevel; const singer = require('./data/singer'); const music = singer.examples.spanner.music; @@ -48,6 +49,21 @@ assert.strictEqual(CLOUD_RESOURCE_HEADER, 'google-cloud-resource-prefix'); // eslint-disable-next-line @typescript-eslint/no-var-requires const apiConfig = require('../src/spanner_grpc_config.json'); +async function disableMetrics(sandbox: sinon.SinonSandbox) { + if ( + Object.prototype.hasOwnProperty.call( + process.env, + 'SPANNER_DISABLE_BUILTIN_METRICS', + ) + ) { + sandbox.replace(process.env, 'SPANNER_DISABLE_BUILTIN_METRICS', 'true'); + } else { + sandbox.define(process.env, 'SPANNER_DISABLE_BUILTIN_METRICS', 'true'); + } + await MetricsTracerFactory.resetInstance(); + MetricsTracerFactory.enabled = false; +} + function getFake(obj: {}) { return obj as { calledWith_: IArguments; @@ -116,6 +132,7 @@ const fakeV1: any = { function fakeGoogleAuth() { return { calledWith_: arguments, + getProjectId: () => Promise.resolve('project-id'), }; } @@ -183,7 +200,7 @@ describe('Spanner', () => { }).Spanner; }); - beforeEach(() => { + beforeEach(async () => { sandbox = sinon.createSandbox(); fakeGapicClient = util.noop; // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -193,6 +210,7 @@ describe('Spanner', () => { fakeV1.SpannerClient = fakeGapicClient; fakeCodec.SpannerDate = util.noop; fakeCodec.Int = util.noop; + await disableMetrics(sandbox); spanner = new Spanner(OPTIONS); spanner.projectId = OPTIONS.projectId; replaceProjectIdTokenOverride = null; @@ -2175,7 +2193,11 @@ describe('Spanner', () => { assert.strictEqual(this, FAKE_GAPIC_CLIENT); assert.deepStrictEqual(reqOpts, CONFIG.reqOpts); assert.notStrictEqual(reqOpts, CONFIG.reqOpts); - assert.deepStrictEqual(gaxOpts, expectedGaxOpts); + + // Check that gaxOpts has the expected structure + assert.ok(gaxOpts.otherArgs); + assert.deepStrictEqual(gaxOpts.otherArgs.headers, CONFIG.headers); + arg(); // done() }; diff --git a/test/metrics/interceptor.ts b/test/metrics/interceptor.ts new file mode 100644 index 000000000..bbaa5896c --- /dev/null +++ b/test/metrics/interceptor.ts @@ -0,0 +1,170 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {grpc} from 'google-gax'; +import {status as Status} from '@grpc/grpc-js'; +import {MetricsTracerFactory} from '../../src/metrics/metrics-tracer-factory'; +import {MetricsTracer} from '../../src/metrics/metrics-tracer'; +import {MetricInterceptor} from '../../src/metrics/interceptor'; + +describe('MetricInterceptor', () => { + let sandbox: sinon.SinonSandbox; + let mockMetricsTracer: sinon.SinonStubbedInstance; + let mockFactory: sinon.SinonStubbedInstance; + let mockNextCall: sinon.SinonStub; + let mockInterceptingCall: any; + let mockListener: any; + let gfeMetadata: any; + let emptyMetadata: any; + let mockStatus: any; + let mockOptions: any; + let capturedListener: any; + let testMetadata: grpc.Metadata; + + beforeEach(() => { + sandbox = sinon.createSandbox(); + + // Mock MetricsTracer + mockMetricsTracer = sandbox.createStubInstance(MetricsTracer); + mockMetricsTracer.recordAttemptStart = sandbox.stub<[], void>(); + mockMetricsTracer.recordAttemptCompletion = sandbox.stub< + [status?: number], + void + >(); + mockMetricsTracer.extractGfeLatency = sandbox + .stub() + .callsFake((header: string) => { + if (header === 'gfet4t7; dur=90') { + return 90; + } + return null; + }) as sinon.SinonStub<[string], number | null>; + mockMetricsTracer.recordGfeLatency = sandbox.stub< + [latency: number], + void + >(); + mockMetricsTracer.recordGfeConnectivityErrorCount = sandbox.stub< + [statusCode: number], + void + >(); + + // Mock MetricsTracerFactory + mockFactory = sandbox.createStubInstance(MetricsTracerFactory); + mockFactory.getCurrentTracer = sandbox + .stub() + .returns(mockMetricsTracer) as sinon.SinonStub< + [string], + MetricsTracer | null + >; + sandbox.stub(MetricsTracerFactory, 'getInstance').returns(mockFactory); + + // Mock GRPC call components + mockInterceptingCall = { + start: sinon.spy((metadata: grpc.Metadata, listener: grpc.Listener) => { + capturedListener = listener; + }), + }; + + mockNextCall = sinon.stub().returns(mockInterceptingCall); + + mockListener = { + onReceiveMetadata: sandbox.stub(), + onReceiveMessage: sandbox.stub(), + onReceiveStatus: sandbox.stub(), + }; + + gfeMetadata = new grpc.Metadata(); + gfeMetadata.set('content-type', 'application/grpc'); + gfeMetadata.set('date', 'Thu, 19 Jun 2020 00:01:02 GMT'); + gfeMetadata.set('server-timing', 'gfet4t7; dur=90'); + gfeMetadata.set( + 'alt-svc', + 'h3=":443"; ma=2592000,h3-29=":443"; ma=2592000', + ); + + emptyMetadata = new grpc.Metadata(); + + mockStatus = { + code: Status.OK, + details: 'OK', + metadata: new grpc.Metadata(), + }; + + mockOptions = { + method_definition: { + path: '/google.spanner.v1.Spanner/ExecuteSql', + }, + }; + testMetadata = new grpc.Metadata(); + }); + + afterEach(() => { + sandbox.restore(); + }); + + describe('Metrics recorded from interceptor', () => { + it('AttemptMetrics', () => { + const interceptingCall = MetricInterceptor(mockOptions, mockNextCall); + + // Start recording attempt metrics at the beginning of the gRPC call + interceptingCall.start(testMetadata, mockListener); + assert.equal(mockMetricsTracer.recordAttemptStart.callCount, 1); + assert.equal(mockMetricsTracer.recordAttemptCompletion.callCount, 0); + + capturedListener.onReceiveStatus(mockStatus); + + // Complete attempt recording when status is received back from the call + assert.equal(mockMetricsTracer.recordAttemptStart.callCount, 1); + assert.equal(mockMetricsTracer.recordAttemptCompletion.callCount, 1); + }); + + it('GFE Metrics - Latency', () => { + const interceptingCall = MetricInterceptor(mockOptions, mockNextCall); + interceptingCall.start(testMetadata, mockListener); + + // duration value from the header's gfet4t7 value should be recorded as GFE latency + capturedListener.onReceiveMetadata(gfeMetadata); + capturedListener.onReceiveStatus(mockStatus); + assert.equal(mockMetricsTracer.recordGfeLatency.callCount, 1); + assert.equal( + mockMetricsTracer.recordGfeLatency.getCall(0).args, + Status.OK, + ); + assert.equal( + mockMetricsTracer.recordGfeConnectivityErrorCount.callCount, + 0, + ); + }); + + it('GFE Metrics - Connectivity Error Count', () => { + const interceptingCall = MetricInterceptor(mockOptions, mockNextCall); + interceptingCall.start(testMetadata, mockListener); + + // Calls received without latency values should increase connectivity error count + capturedListener.onReceiveMetadata(emptyMetadata); + capturedListener.onReceiveStatus(mockStatus); + assert.equal(mockMetricsTracer.recordGfeLatency.callCount, 0); + assert.equal( + mockMetricsTracer.recordGfeConnectivityErrorCount.callCount, + 1, + ); + assert.equal( + mockMetricsTracer.recordGfeConnectivityErrorCount.getCall(0).args, + Status.OK, + ); + }); + }); +}); diff --git a/test/metrics/metrics-tracer-factory.ts b/test/metrics/metrics-tracer-factory.ts index 911991e3a..ab5dcccf5 100644 --- a/test/metrics/metrics-tracer-factory.ts +++ b/test/metrics/metrics-tracer-factory.ts @@ -68,7 +68,19 @@ describe('MetricsTracerFactory', () => { // metrics provider and related objects mockExporter = sandbox.createStubInstance(CloudMonitoringMetricsExporter); - const provider = MetricsTracerFactory.getMeterProvider(); + }); + + after(async () => { + sandbox.restore(); + await MetricsTracerFactory.resetInstance(); + }); + + beforeEach(async () => { + MetricsTracerFactory.enabled = true; + sandbox.resetHistory(); + await MetricsTracerFactory.resetInstance(); + const provider = + MetricsTracerFactory.getInstance('project-id')!.getMeterProvider(); const reader = new PeriodicExportingMetricReader({ exporter: mockExporter, exportIntervalMillis: 60000, @@ -76,32 +88,31 @@ describe('MetricsTracerFactory', () => { provider.addMetricReader(reader); }); - after(() => { - sandbox.restore(); - MetricsTracerFactory.resetMeterProvider(); - }); - - beforeEach(() => { - sandbox.resetHistory(); + afterEach(async () => { + await MetricsTracerFactory.resetInstance(); }); - it('should use the globally set meter provider', async () => { - const factory = MetricsTracerFactory.getInstance(true); - const tracer = factory.createMetricsTracer(); + it('should use the set meter provider', async () => { + const factory = MetricsTracerFactory.getInstance(); + const tracer = factory!.createMetricsTracer( + 'some-method', + 'projects/project/instances/instance/databases/database', + '1.1a2bc3d4.1.1.1.1', + ); const operations = 3; const attempts = 5; for (let i = 0; i < operations; i++) { - tracer.recordOperationStart(); + tracer!.recordOperationStart(); for (let j = 0; j < attempts; j++) { - tracer.recordAttemptStart(); + tracer!.recordAttemptStart(); // Simulate processing time during attempt await new Promise(resolve => { setTimeout(resolve, 50); }); - tracer.recordAttemptCompletion(); + tracer!.recordAttemptCompletion(); } - tracer.recordOperationCompletion(); + tracer!.recordOperationCompletion(); } assert.ok(recordOperationLatencyStub.calledWith(sinon.match.number)); @@ -115,130 +126,174 @@ describe('MetricsTracerFactory', () => { }); it('should initialize metric instruments when enabled', () => { - const factory = MetricsTracerFactory.getInstance(true); + const factory = MetricsTracerFactory.getInstance(); - assert.deepStrictEqual(factory.instrumentAttemptLatency, { + assert.deepStrictEqual(factory!.instrumentAttemptLatency, { record: recordAttemptLatencyStub, }); - assert.deepStrictEqual(factory.instrumentAttemptCounter, { + assert.deepStrictEqual(factory!.instrumentAttemptCounter, { add: addAttemptCounterStub, }); - assert.deepStrictEqual(factory.instrumentOperationLatency, { + assert.deepStrictEqual(factory!.instrumentOperationLatency, { record: recordOperationLatencyStub, }); - assert.deepStrictEqual(factory.instrumentOperationCounter, { + assert.deepStrictEqual(factory!.instrumentOperationCounter, { add: addOperationCounterStub, }); - assert.deepStrictEqual(factory.instrumentGfeLatency, { + assert.deepStrictEqual(factory!.instrumentGfeLatency, { record: recordGfeLatencyStub, }); - assert.deepStrictEqual(factory.instrumentGfeConnectivityErrorCount, { + assert.deepStrictEqual(factory!.instrumentGfeConnectivityErrorCount, { add: addGfeConnectivityErrorCountStub, }); }); it('should create a MetricsTracer instance', () => { - const factory = MetricsTracerFactory.getInstance(true); - const tracer = factory.createMetricsTracer(); + const factory = MetricsTracerFactory.getInstance(); + const tracer = factory!.createMetricsTracer( + 'some-method', + 'method-name', + '1.1a2bc3d4.1.1.1.1', + ); assert.ok(tracer); }); it('should correctly set default attributes', () => { - const factory = MetricsTracerFactory.getInstance(true); - assert.ok(factory.clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_NAME]); - assert.ok(factory.clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_UID]); - }); - - it('should correctly create resource attributes', async () => { - const factory = MetricsTracerFactory.getInstance(true); - const resourceAttributes = - await factory.createResourceAttributes('test-proj-id'); - + const factory = MetricsTracerFactory.getInstance(); + const tracer = factory!.createMetricsTracer( + 'test-method', + 'projects/project/instances/instance/databases/database', + '1.1a2bc3d4.1.1.1.1', + ); assert.strictEqual( - resourceAttributes[Constants.MONITORED_RES_LABEL_KEY_PROJECT], - 'test-proj-id', + tracer!.clientAttributes[Constants.METRIC_LABEL_KEY_DATABASE], + 'database', ); - assert.ok(resourceAttributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE]); - assert.ok( - resourceAttributes[Constants.MONITORED_RES_LABEL_KEY_CLIENT_HASH], + assert.strictEqual( + tracer!.clientAttributes[Constants.METRIC_LABEL_KEY_METHOD], + 'test-method', ); - assert.ok( - resourceAttributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG], + assert.strictEqual( + tracer!.clientAttributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE], + 'instance', ); - assert.ok(resourceAttributes[Constants.MONITORED_RES_LABEL_KEY_LOCATION]); }); +}); - it('should correctly set project attribute', () => { - const factory = MetricsTracerFactory.getInstance(true); - factory.project = 'test-project'; - assert.strictEqual( - factory.clientAttributes[Constants.MONITORED_RES_LABEL_KEY_PROJECT], - 'test-project', - ); +describe('getInstanceAttributes', () => { + let factory: MetricsTracerFactory; + beforeEach(() => { + factory = new (MetricsTracerFactory as any)(); }); - it('should correctly set instance attribute', () => { - const factory = MetricsTracerFactory.getInstance(true); - factory.instance = 'my-instance'; - assert.strictEqual( - factory.clientAttributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE], - 'my-instance', - ); + afterEach(async () => { + await factory.resetMeterProvider(); + clearInterval(factory['_intervalTracerCleanup']); }); - it('should correctly set instanceConfig attribute', () => { - const factory = MetricsTracerFactory.getInstance(true); - factory.instanceConfig = 'my-config'; - assert.strictEqual( - factory.clientAttributes[ - Constants.MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG - ], - 'my-config', - ); + it('should extract project, instance, and database from full resource path', () => { + const formattedName = 'projects/proj1/instances/inst1/databases/db1'; + const attrs = factory.getInstanceAttributes(formattedName); + assert.deepStrictEqual(attrs, { + project: 'proj1', + instance: 'inst1', + database: 'db1', + }); }); - it('should correctly set location attribute', () => { - const factory = MetricsTracerFactory.getInstance(true); - factory.location = 'us-central1'; - assert.strictEqual( - factory.clientAttributes[Constants.MONITORED_RES_LABEL_KEY_LOCATION], - 'us-central1', - ); + it('should extract project and instance, and unknown database if database is missing', () => { + const formattedName = 'projects/proj2/instances/inst2'; + const attrs = factory.getInstanceAttributes(formattedName); + assert.deepStrictEqual(attrs, { + project: 'proj2', + instance: 'inst2', + database: 'unknown', + }); }); - it('should correctly set clientHash attribute', () => { - const factory = MetricsTracerFactory.getInstance(true); - factory.clientHash = 'abc123'; - assert.strictEqual( - factory.clientAttributes[Constants.MONITORED_RES_LABEL_KEY_CLIENT_HASH], - 'abc123', - ); + it('should return unknown strings for all if input is empty', () => { + const attrs = factory.getInstanceAttributes(''); + assert.deepStrictEqual(attrs, { + project: 'unknown', + instance: 'unknown', + database: 'unknown', + }); }); - it('should correctly set clientUid attribute', () => { - const factory = MetricsTracerFactory.getInstance(true); - factory.clientUid = 'uid123'; - assert.strictEqual( - factory.clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_UID], - 'uid123', - ); + it('should return unknown strings for all if input is malformed', () => { + const attrs = factory.getInstanceAttributes('foo/bar/baz'); + assert.deepStrictEqual(attrs, { + project: 'unknown', + instance: 'unknown', + database: 'unknown', + }); }); +}); - it('should correctly set clientName attribute', () => { - const factory = MetricsTracerFactory.getInstance(true); - factory.clientName = 'client-app'; - assert.strictEqual( - factory.clientAttributes[Constants.METRIC_LABEL_KEY_CLIENT_NAME], - 'client-app', - ); +describe('MetricsTracerFactory with set clock', () => { + let clock: sinon.SinonFakeTimers; + + beforeEach(async () => { + MetricsTracerFactory.enabled = true; + await MetricsTracerFactory.resetInstance(); + // Use fake timers to control the clock + clock = sinon.useFakeTimers(); }); - it('should correctly set database attribute', () => { - const factory = MetricsTracerFactory.getInstance(true); - factory.database = 'my-database'; - assert.strictEqual( - factory.clientAttributes[Constants.METRIC_LABEL_KEY_DATABASE], - 'my-database', - ); + afterEach(() => { + // Restore the real timers + clock.restore(); + }); + + describe('_cleanMetricTracers', () => { + it('should prune stale tracers', () => { + const factory = MetricsTracerFactory.getInstance('test-project'); + assert(factory); + + factory.createMetricsTracer( + 'method1', + 'projects/p/instances/i/databases/d', + '1.1a2b3c.1.1.1.1', + ); + + // Advance the clock to make the tracer stale + clock.tick(Constants.TRACER_CLEANUP_THRESHOLD_MS); + + // Add another tracer to trigger pruning + factory.createMetricsTracer( + 'method2', + 'projects/p/instances/i/databases/d', + '2.1a2b3c.1.1.1.1', + ); + // Only most recent tracer should remain + assert.strictEqual(factory['_currentOperationTracers'].size, 1); + assert.ok(factory['_currentOperationTracers'].has('2.1a2b3c.1.1.1')); + }); + + it('should not prune recent tracers', () => { + const factory = MetricsTracerFactory.getInstance('test-project'); + assert(factory); + + factory.createMetricsTracer( + 'method1', + 'projects/p/instances/i/databases/d', + '1.1a2b3c.1.1.1.1', + ); + + // Advance the clock, but not enough to hit the threshold + clock.tick(Constants.TRACER_CLEANUP_INTERVAL_MS); + + // Add another tracer to trigger pruning + factory.createMetricsTracer( + 'method2', + 'projects/p/instances/i/databases/d', + '2.1a2b3c.1.1.1.1', + ); + + // Both tracers should be available + assert.strictEqual(factory['_currentOperationTracers'].size, 2); + assert.ok(factory['_currentOperationTracers'].has('1.1a2b3c.1.1.1')); + assert.ok(factory['_currentOperationTracers'].has('2.1a2b3c.1.1.1')); + }); }); }); diff --git a/test/metrics/metrics-tracer.ts b/test/metrics/metrics-tracer.ts index 9333d7221..9d93d9197 100644 --- a/test/metrics/metrics-tracer.ts +++ b/test/metrics/metrics-tracer.ts @@ -18,7 +18,12 @@ import * as sinon from 'sinon'; import * as Constants from '../../src/metrics/constants'; import {MetricsTracer} from '../../src/metrics/metrics-tracer'; -const PROJECT_ID = 'test-project'; +import {MetricsTracerFactory} from '../../src/metrics/metrics-tracer-factory'; + +const DATABASE = 'test-db'; +const INSTANCE = 'instance'; +const METHOD = 'test-method'; +const REQUEST = 'test-request'; describe('MetricsTracer', () => { let tracer: MetricsTracer; @@ -28,13 +33,9 @@ describe('MetricsTracer', () => { let fakeOperationLatency: any; let fakeGfeCounter: any; let fakeGfeLatency: any; - let attributes: {[key: string]: string}; - + let sandbox: sinon.SinonSandbox; beforeEach(() => { - attributes = { - [Constants.MONITORED_RES_LABEL_KEY_PROJECT]: PROJECT_ID, - }; - + sandbox = sinon.createSandbox(); fakeAttemptCounter = { add: sinon.spy(), }; @@ -60,36 +61,40 @@ describe('MetricsTracer', () => { }; tracer = new MetricsTracer( - attributes, fakeAttemptCounter, fakeAttemptLatency, fakeOperationCounter, fakeOperationLatency, fakeGfeCounter, fakeGfeLatency, - true, // enabled + true, // enabled, + DATABASE, + INSTANCE, + METHOD, + REQUEST, ); }); + afterEach(() => { + sandbox.restore(); + }); + describe('recordAttemptCompletion', () => { it('should record attempt latency when enabled', () => { + tracer.recordOperationStart(); tracer.recordAttemptStart(); - assert.ok(tracer.currentOperation.currentAttempt); - assert.ok(tracer.currentOperation.currentAttempt.startTime); - assert.strictEqual(tracer.currentOperation.attemptCount, 1); + assert.ok(tracer.currentOperation!.currentAttempt); + assert.ok(tracer.currentOperation!.currentAttempt.startTime); + assert.strictEqual(tracer.currentOperation!.attemptCount, 1); tracer.recordAttemptCompletion(Status.OK); assert.strictEqual(fakeAttemptLatency.record.calledOnce, true); const [[latency, otelAttrs]] = fakeAttemptLatency.record.args; assert.strictEqual(typeof latency, 'number'); - assert.strictEqual( - otelAttrs[Constants.MONITORED_RES_LABEL_KEY_PROJECT], - PROJECT_ID, - ); assert.strictEqual( otelAttrs[Constants.METRIC_LABEL_KEY_STATUS], - Status.OK.toString(), + Status[Status.OK], ); }); @@ -103,9 +108,15 @@ describe('MetricsTracer', () => { describe('recordOperationCompletion', () => { it('should record operation and attempt metrics when enabled', () => { + const factory = sandbox + .stub(MetricsTracerFactory, 'getInstance') + .returns({ + clearCurrentTracer: sinon.spy(), + } as any); tracer.recordOperationStart(); - assert.ok(tracer.currentOperation.startTime); + assert.ok(tracer.currentOperation!.startTime); tracer.recordAttemptStart(); + tracer.recordAttemptCompletion(Status.OK); tracer.recordOperationCompletion(); assert.strictEqual(fakeOperationCounter.add.calledOnce, true); @@ -113,7 +124,7 @@ describe('MetricsTracer', () => { assert.strictEqual(fakeOperationLatency.record.calledOnce, true); const [[_, opAttrs]] = fakeOperationLatency.record.args; - assert.strictEqual(opAttrs[Constants.METRIC_LABEL_KEY_STATUS], '-1'); + assert.strictEqual(opAttrs[Constants.METRIC_LABEL_KEY_STATUS], 'OK'); }); it('should do nothing if disabled', () => { @@ -127,76 +138,71 @@ describe('MetricsTracer', () => { describe('recordGfeLatency', () => { it('should record GFE latency if enabled', () => { tracer.enabled = true; - tracer.recordGfeLatency(123); + tracer.gfeLatency = 123; + tracer.recordGfeLatency(Status.OK); assert.strictEqual(fakeGfeLatency.record.calledOnce, true); }); it('should not record if disabled', () => { tracer.enabled = false; - tracer.recordGfeLatency(123); + tracer.gfeLatency = 123; + tracer.recordGfeLatency(Status.OK); assert.strictEqual(fakeGfeLatency.record.called, false); }); }); describe('recordGfeConnectivityErrorCount', () => { it('should increment GFE error counter if enabled', () => { - tracer.recordGfeConnectivityErrorCount(); + tracer.recordGfeConnectivityErrorCount(Status.OK); assert.strictEqual(fakeGfeCounter.add.calledOnce, true); }); it('should not increment if disabled', () => { tracer.enabled = false; - tracer.recordGfeConnectivityErrorCount(); + tracer.recordGfeConnectivityErrorCount(Status.OK); assert.strictEqual(fakeGfeCounter.add.called, false); }); }); - it('should not overwrite project if already set', () => { - tracer.project = 'new-project'; - assert.strictEqual( - attributes[Constants.MONITORED_RES_LABEL_KEY_PROJECT], - PROJECT_ID, - ); - }); + describe('extractGfeLatency', () => { + let tracer: MetricsTracer; + beforeEach(() => { + tracer = new MetricsTracer( + null, + null, + null, + null, + null, + null, + true, + DATABASE, + INSTANCE, + METHOD, + REQUEST, + ); + }); - it('should set all other attribute setters', () => { - tracer.instance = 'test-instance'; - tracer.instanceConfig = 'config'; - tracer.location = 'us-central1'; - tracer.clientHash = 'hash123'; - tracer.clientUid = 'uid123'; - tracer.clientName = 'name123'; - tracer.database = 'db123'; - tracer.methodName = 'method'; - - assert.strictEqual( - attributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE], - 'test-instance', - ); - assert.strictEqual( - attributes[Constants.MONITORED_RES_LABEL_KEY_INSTANCE_CONFIG], - 'config', - ); - assert.strictEqual( - attributes[Constants.MONITORED_RES_LABEL_KEY_LOCATION], - 'us-central1', - ); - assert.strictEqual( - attributes[Constants.MONITORED_RES_LABEL_KEY_CLIENT_HASH], - 'hash123', - ); - assert.strictEqual( - attributes[Constants.METRIC_LABEL_KEY_CLIENT_UID], - 'uid123', - ); - assert.strictEqual( - attributes[Constants.METRIC_LABEL_KEY_CLIENT_NAME], - 'name123', - ); - assert.strictEqual( - attributes[Constants.METRIC_LABEL_KEY_DATABASE], - 'db123', - ); - assert.strictEqual(attributes[Constants.METRIC_LABEL_KEY_METHOD], 'method'); + it('should extract latency from a valid server-timing header', () => { + const header = 'gfet4t7; dur=123'; + const latency = tracer.extractGfeLatency(header); + assert.strictEqual(latency, 123); + }); + + it('should return null if header is undefined', () => { + const latency = tracer.extractGfeLatency(undefined as any); + assert.strictEqual(latency, null); + }); + + it('should return null if header does not match expected format', () => { + const header = 'some-other-header'; + const latency = tracer.extractGfeLatency(header); + assert.strictEqual(latency, null); + }); + + it('should extract only the first number if extra data is present', () => { + const header = 'gfet4t7; dur=456; other=value'; + const latency = tracer.extractGfeLatency(header); + assert.strictEqual(latency, 456); + }); }); }); diff --git a/test/metrics/metrics.ts b/test/metrics/metrics.ts new file mode 100644 index 000000000..2cc291e45 --- /dev/null +++ b/test/metrics/metrics.ts @@ -0,0 +1,587 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as sinon from 'sinon'; +import * as assert from 'assert'; +import {grpc} from 'google-gax'; +import * as mock from '../mockserver/mockspanner'; +import {MockError, SimulatedExecutionTime} from '../mockserver/mockspanner'; +import {Database, Instance, Spanner} from '../../src'; +import {MetricsTracerFactory} from '../../src/metrics/metrics-tracer-factory'; +import {MetricsTracer} from '../../src/metrics/metrics-tracer'; +import {MetricReader} from '@opentelemetry/sdk-metrics'; +import {CloudMonitoringMetricsExporter} from '../../src/metrics/spanner-metrics-exporter'; +import { + METRIC_NAME_OPERATION_LATENCIES, + METRIC_NAME_ATTEMPT_LATENCIES, + METRIC_NAME_OPERATION_COUNT, + METRIC_NAME_ATTEMPT_COUNT, + METRIC_NAME_GFE_LATENCIES, + METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, +} from '../../src/metrics/constants'; + +describe('Test metrics with mock server', () => { + let sandbox: sinon.SinonSandbox; + let instance: Instance; + let spanner: Spanner; + let port: number; + let dbCounter = 0; + const selectSql = 'SELECT NUM, NAME FROM NUMBERS'; + const server = new grpc.Server(); + const spannerMock = mock.createMockSpanner(server); + + class InMemoryMetricReader extends MetricReader { + protected async onForceFlush(): Promise {} + protected async onShutdown(): Promise {} + } + + function newTestDatabase(): Database { + return instance.database(`database-${++dbCounter}`, undefined); + } + + function assertApprox(expected: number, actual: number, delta: number) { + assert.ok( + Math.abs(expected - actual) <= delta, + `Expected value of ${expected} and actual value of ${actual} is greater than the approximation delta (${delta})`, + ); + } + + function compareAttributes(expected: object, actual: object): boolean { + // Check that all expected keys match in actual + for (const key of Object.keys(expected)) { + if ((actual as any)[key] !== (expected as any)[key]) { + return false; + } + } + // Check that actual does not contain extra keys + for (const key of Object.keys(actual)) { + // Check if the key in 'actual' is not present in 'expected' + if (!Object.prototype.hasOwnProperty.call(expected, key)) { + return false; + } + } + return true; + } + + function getMetricData(resourceMetrics, metricName: string) { + const filteredMetrics = resourceMetrics.scopeMetrics.flatMap(scopeMetric => + scopeMetric.metrics.filter( + metric => metric.descriptor.name === metricName, + ), + ); + assert.ok( + filteredMetrics.length > 0, + `No metric entry found with name: ${metricName}`, + ); + assert.strictEqual( + filteredMetrics.length, + 1, + `Found multiple metrics with name: ${metricName}`, + ); + return filteredMetrics[0]; + } + + function hasMetricData(resourceMetrics, metricName: string): boolean { + const filteredMetrics = resourceMetrics.scopeMetrics.flatMap(scopeMetric => + scopeMetric.metrics.filter( + metric => metric.descriptor.name === metricName, + ), + ); + return filteredMetrics.length > 0; + } + + function getAggregatedValue(metricsData: any, attributes: any) { + const dataPoint = metricsData.dataPoints.filter(dp => + compareAttributes(dp.attributes, attributes), + ); + assert.strictEqual( + dataPoint.length, + 1, + 'Failed to filter for attribute values.', + ); + switch (metricsData.descriptor.type) { + case 'HISTOGRAM': + return dataPoint[0].value.sum / dataPoint[0].value.count; + case 'COUNTER': + return dataPoint[0].value; + default: + return 0; + } + } + + async function setupMockSpanner() { + sandbox = sinon.createSandbox(); + port = await new Promise((resolve, reject) => { + server.bindAsync( + '0.0.0.0:0', + grpc.ServerCredentials.createInsecure(), + (err, assignedPort) => { + if (err) { + reject(err); + } else { + resolve(assignedPort); + } + }, + ); + }); + spannerMock.putStatementResult( + selectSql, + mock.StatementResult.resultSet(mock.createSimpleResultSet()), + ); + sandbox + .stub(MetricsTracerFactory as any, '_detectClientLocation') + .resolves('test-location'); + await MetricsTracerFactory.resetInstance(); + if ( + Object.prototype.hasOwnProperty.call( + process.env, + 'SPANNER_DISABLE_BUILTIN_METRICS', + ) + ) { + sandbox.replace(process.env, 'SPANNER_DISABLE_BUILTIN_METRICS', 'false'); + } else { + sandbox.define(process.env, 'SPANNER_DISABLE_BUILTIN_METRICS', 'false'); + } + await MetricsTracerFactory.resetInstance(); + MetricsTracerFactory.enabled = true; + spanner = new Spanner({ + projectId: 'test-project', + servicePath: 'localhost', + port, + sslCreds: grpc.credentials.createInsecure(), + }); + instance = spanner.instance('instance'); + } + + before(async () => { + await MetricsTracerFactory.resetInstance(); + await setupMockSpanner(); + }); + + after(async () => { + spanner.close(); + server.tryShutdown(() => {}); + sandbox.restore(); + await MetricsTracerFactory.resetInstance(); + MetricsTracerFactory.enabled = false; + }); + + describe('With InMemMetricReader', async () => { + let reader: InMemoryMetricReader; + let factory: MetricsTracerFactory | null; + let gfeStub; + let exporterStub; + const MIN_LATENCY = 0; + const commonAttributes = { + instance_id: 'instance', + status: 'OK', + }; + + before(() => { + exporterStub = sinon.stub( + CloudMonitoringMetricsExporter.prototype, + 'export', + ); + }); + + after(() => { + exporterStub.restore(); + }); + + beforeEach(async function () { + // Increase the timeout because the MeterProvider shutdown exceed + // the default 10s timeout. + this.timeout(50000); + spannerMock.resetRequests(); + spannerMock.removeExecutionTimes(); + // Reset the MetricsFactoryReader to an in-memory reader for the tests + factory = MetricsTracerFactory.getInstance(); + await factory!.resetMeterProvider(); + reader = new InMemoryMetricReader(); + factory!.getMeterProvider([reader]); + }); + + afterEach(async () => { + gfeStub?.restore(); + await factory?.resetMeterProvider(); + await MetricsTracerFactory.resetInstance(); + }); + + it('should have correct latency values in metrics', async () => { + gfeStub = sandbox + .stub(MetricsTracer.prototype, 'extractGfeLatency') + .callsFake(() => 123); + const database = newTestDatabase(); + const startTime = new Date(); + await database.run(selectSql); + const endTime = new Date(); + + const elapsedTime = endTime.valueOf() - startTime.valueOf(); + + const methods = ['batchCreateSessions', 'executeStreamingSql']; + + const {resourceMetrics} = await reader.collect(); + const operationCountData = getMetricData( + resourceMetrics, + METRIC_NAME_OPERATION_COUNT, + ); + const gfeLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_GFE_LATENCIES, + ); + const attemptCountData = getMetricData( + resourceMetrics, + METRIC_NAME_ATTEMPT_COUNT, + ); + const operationLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_OPERATION_LATENCIES, + ); + const attemptLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_ATTEMPT_LATENCIES, + ); + + let totalOperationLatency = 0; + methods.forEach(method => { + const attributes = { + ...commonAttributes, + database: `database-${dbCounter}`, + method: method, + }; + const operationCount = getAggregatedValue( + operationCountData, + attributes, + ); + assert.strictEqual(operationCount, 1); + + const attemptCount = getAggregatedValue(attemptCountData, attributes); + assert.strictEqual(attemptCount, 1); + + const operationLatency = getAggregatedValue( + operationLatenciesData, + attributes, + ); + totalOperationLatency += operationLatency; + + const attemptLatency = getAggregatedValue( + attemptLatenciesData, + attributes, + ); + // Since we only have one attempt, the attempt latency should be fairly close to the operation latency + assertApprox(MIN_LATENCY, attemptLatency, 30); + + const gfeLatency = getAggregatedValue(gfeLatenciesData, attributes); + assert.strictEqual(gfeLatency, 123); + }); + + // check that the latency matches up with the measured elapsed time within 10ms + assertApprox(elapsedTime, totalOperationLatency, 10); + + // Make sure no GFE connectivity errors ar emitted since we got GFE latencies + const gfeMissingData = hasMetricData( + resourceMetrics, + METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, + ); + + assert.ok(!gfeMissingData); + + await database.close(); + }); + + it('should increase attempts on retries', async () => { + gfeStub = sandbox + .stub(MetricsTracer.prototype, 'extractGfeLatency') + .callsFake(() => 123); + const database = newTestDatabase(); + const err = { + message: 'Temporary unavailable', + code: grpc.status.UNAVAILABLE, + } as MockError; + spannerMock.setExecutionTime( + spannerMock.executeStreamingSql, + SimulatedExecutionTime.ofError(err), + ); + + await database.run(selectSql); + const {resourceMetrics} = await reader.collect(); + + const operationCountData = getMetricData( + resourceMetrics, + METRIC_NAME_OPERATION_COUNT, + ); + const attemptCountData = getMetricData( + resourceMetrics, + METRIC_NAME_ATTEMPT_COUNT, + ); + const operationLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_OPERATION_LATENCIES, + ); + const attemptLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_ATTEMPT_LATENCIES, + ); + const gfeLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_GFE_LATENCIES, + ); + + const sessionAttributes = { + ...commonAttributes, + database: `database-${dbCounter}`, + method: 'batchCreateSessions', + }; + // Verify batchCreateSession metrics are unaffected + assert.strictEqual( + getAggregatedValue(operationCountData, sessionAttributes), + 1, + ); + getAggregatedValue(operationLatenciesData, sessionAttributes); + assert.strictEqual( + getAggregatedValue(attemptCountData, sessionAttributes), + 1, + ); + getAggregatedValue(attemptLatenciesData, sessionAttributes); + assert.strictEqual( + getAggregatedValue(gfeLatenciesData, sessionAttributes), + 123, + ); + + const executeAttributes = { + ...commonAttributes, + database: `database-${dbCounter}`, + method: 'executeStreamingSql', + }; + const executeUnavailableAttributes = { + ...commonAttributes, + database: `database-${dbCounter}`, + method: 'executeStreamingSql', + status: 'UNAVAILABLE', + }; + // Verify executeStreamingSql has 2 attempts and 1 operation + assert.strictEqual( + 1, + getAggregatedValue(operationCountData, executeAttributes), + ); + getAggregatedValue(operationLatenciesData, executeAttributes); + assert.strictEqual( + 1, + getAggregatedValue(attemptCountData, executeAttributes), + ); + assert.strictEqual( + 1, + getAggregatedValue(attemptCountData, executeUnavailableAttributes), + ); + getAggregatedValue(attemptLatenciesData, executeAttributes); + assert.strictEqual( + 123, + getAggregatedValue(gfeLatenciesData, executeAttributes), + ); + }); + + it('should create connectivity error count metric if GFE latency is not in header', async () => { + gfeStub = sandbox + .stub(MetricsTracer.prototype, 'extractGfeLatency') + .callsFake(() => null); + const database = newTestDatabase(); + await database.run(selectSql); + const {resourceMetrics} = await reader.collect(); + + const operationCountData = getMetricData( + resourceMetrics, + METRIC_NAME_OPERATION_COUNT, + ); + const attemptCountData = getMetricData( + resourceMetrics, + METRIC_NAME_ATTEMPT_COUNT, + ); + const operationLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_OPERATION_LATENCIES, + ); + const attemptLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_ATTEMPT_LATENCIES, + ); + const connectivityErrorCountData = getMetricData( + resourceMetrics, + METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, + ); + + // Verify GFE latency doesn't exist + assert.ok(!hasMetricData(resourceMetrics, METRIC_NAME_GFE_LATENCIES)); + const methods = ['batchCreateSessions', 'executeStreamingSql']; + methods.forEach(method => { + const attributes = { + ...commonAttributes, + database: `database-${dbCounter}`, + method: method, + }; + // Verify attempt and operational metrics are unaffected + assert.strictEqual( + getAggregatedValue(operationCountData, attributes), + 1, + ); + getAggregatedValue(operationLatenciesData, attributes); + assert.strictEqual(getAggregatedValue(attemptCountData, attributes), 1); + getAggregatedValue(attemptLatenciesData, attributes); + + // Verify that GFE connectivity error count increased + assert.strictEqual( + getAggregatedValue(connectivityErrorCountData, attributes), + 1, + ); + }); + }); + + it('should increase attempts on retries for non streaming calls with gax options', async () => { + gfeStub = sandbox + .stub(MetricsTracer.prototype, 'extractGfeLatency') + .callsFake(() => 123); + const database = newTestDatabase(); + const err = { + message: 'Temporary unavailable', + code: grpc.status.UNAVAILABLE, + } as MockError; + spannerMock.setExecutionTime( + spannerMock.commit, + SimulatedExecutionTime.ofError(err), + ); + + const GAX_OPTIONS = { + retry: { + retryCodes: [4, 8, 14], + backoffSettings: { + initialRetryDelayMillis: 1000, + retryDelayMultiplier: 1.3, + maxRetryDelayMillis: 32000, + initialRpcTimeoutMillis: 60000, + rpcTimeoutMultiplier: 1, + maxRpcTimeoutMillis: 60000, + totalTimeoutMillis: 600000, + }, + }, + }; + await database.runTransactionAsync(async tx => { + await tx.run(selectSql); + // Commit RPC will be retried by GAX + await tx.commit({gaxOptions: GAX_OPTIONS}); + }); + + const {resourceMetrics} = await reader.collect(); + + const operationCountData = getMetricData( + resourceMetrics, + METRIC_NAME_OPERATION_COUNT, + ); + // Attempt count is correct here but status of attempts are not correct + const attemptCountData = getMetricData( + resourceMetrics, + METRIC_NAME_ATTEMPT_COUNT, + ); + const operationLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_OPERATION_LATENCIES, + ); + const attemptLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_ATTEMPT_LATENCIES, + ); + const gfeLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_GFE_LATENCIES, + ); + + const sessionAttributes = { + ...commonAttributes, + database: `database-${dbCounter}`, + method: 'batchCreateSessions', + }; + // Verify batchCreateSession metrics are unaffected + assert.strictEqual( + 1, + getAggregatedValue(operationCountData, sessionAttributes), + ); + assert.ok(getAggregatedValue(operationLatenciesData, sessionAttributes)); + assert.strictEqual( + 1, + getAggregatedValue(attemptCountData, sessionAttributes), + ); + assert.ok(getAggregatedValue(attemptLatenciesData, sessionAttributes)); + assert.strictEqual( + 123, + getAggregatedValue(gfeLatenciesData, sessionAttributes), + ); + + const executeAttributes = { + ...commonAttributes, + database: `database-${dbCounter}`, + method: 'executeStreamingSql', + }; + + // Verify executeStreamingSql metrics are unaffected + assert.strictEqual( + 1, + getAggregatedValue(operationCountData, executeAttributes), + ); + assert.ok(getAggregatedValue(operationLatenciesData, executeAttributes)); + assert.strictEqual( + 1, + getAggregatedValue(attemptCountData, executeAttributes), + ); + assert.ok(getAggregatedValue(attemptLatenciesData, executeAttributes)); + assert.strictEqual( + 123, + getAggregatedValue(gfeLatenciesData, executeAttributes), + ); + + // Verify that commit metrics have 2 attempts and 1 operation + const commitOkAttributes = { + ...commonAttributes, + database: `database-${dbCounter}`, + method: 'commit', + }; + const commitUnavailableAttributes = { + ...commitOkAttributes, + status: 'UNAVAILABLE', + }; + + assert.strictEqual( + getAggregatedValue(operationCountData, commitOkAttributes), + 1, + ); + + assert.ok(getAggregatedValue(operationLatenciesData, commitOkAttributes)); + assert.strictEqual( + 1, + getAggregatedValue(attemptCountData, commitOkAttributes), + '1 of 2 attempts for Commit should have status: OK.', + ); + assert.strictEqual( + 1, + getAggregatedValue(attemptCountData, commitUnavailableAttributes), + '1 of 2 attempts for Commit should have status: Unavailable.', + ); + assert.ok(getAggregatedValue(attemptLatenciesData, commitOkAttributes)); + assert.ok( + getAggregatedValue(attemptLatenciesData, commitUnavailableAttributes), + ); + assert.strictEqual( + 123, + getAggregatedValue(gfeLatenciesData, commitOkAttributes), + ); + }); + }); +}); diff --git a/test/metrics/spanner-metrics-exporter.ts b/test/metrics/spanner-metrics-exporter.ts index 8e01f915d..8f056669c 100644 --- a/test/metrics/spanner-metrics-exporter.ts +++ b/test/metrics/spanner-metrics-exporter.ts @@ -31,9 +31,12 @@ import { } from '../../src/metrics/constants'; import {Counter, Meter, Histogram} from '@opentelemetry/api'; import {ExportResult, ExportResultCode} from '@opentelemetry/core'; +import {Resource} from '@opentelemetry/resources'; const PROJECT_ID = 'test-project'; const INSTANCE_ID = 'test-instance'; +const CLIENT_HASH = 'test-hash'; +const INSTANCE_CONFIG = 'test-config'; const DATABASE_ID = 'test-db'; const LOCATION = 'test-location'; @@ -96,16 +99,19 @@ describe('Export', () => { beforeEach(() => { exporter = new CloudMonitoringMetricsExporter({auth}); reader = new InMemoryMetricReader(); + const resource = new Resource({ + ['project_id']: PROJECT_ID, + ['client_hash']: CLIENT_HASH, + ['location']: LOCATION, + ['instance_id']: INSTANCE_ID, + ['instance_config']: INSTANCE_CONFIG, + }); meterProvider = new MeterProvider({ + resource: resource, readers: [reader], }); meter = meterProvider.getMeter(SPANNER_METER_NAME); metricAttributes = { - project_id: PROJECT_ID, - instance_id: INSTANCE_ID, - instance_config: 'test_config', - location: LOCATION, - client_hash: 'test_hash', client_uid: 'test_uid', client_name: 'test_name', database: DATABASE_ID, diff --git a/test/metrics/transform.ts b/test/metrics/transform.ts index 223829812..369bf57b4 100644 --- a/test/metrics/transform.ts +++ b/test/metrics/transform.ts @@ -13,6 +13,7 @@ // limitations under the License. import * as assert from 'assert'; +import * as sinon from 'sinon'; import {_TEST_ONLY} from '../../src/metrics/transform'; import { AggregationTemporality, @@ -27,6 +28,7 @@ import { MeterProvider, MetricReader, } from '@opentelemetry/sdk-metrics'; +import {Resource} from '@opentelemetry/resources'; import { Attributes, Counter, @@ -39,6 +41,7 @@ import { METRIC_NAME_ATTEMPT_COUNT, } from '../../src/metrics/constants'; import {MetricKind, ValueType} from '../../src/metrics/external-types'; +import {MetricsTracerFactory} from '../../src/metrics/metrics-tracer-factory'; const { _normalizeLabelKey, @@ -55,6 +58,7 @@ describe('transform', () => { let reader: MetricReader; let meterProvider: MeterProvider; let attributes: Attributes; + let resource: Resource; let metricSum: SumMetricData; let metricGauge: GaugeMetricData; let metricHistogram: HistogramMetricData; @@ -64,6 +68,8 @@ describe('transform', () => { let gaugeDataPoint: DataPoint; let histogramDataPoint: DataPoint; let exponentialHistogramDataPoint: DataPoint; + let sandbox; + let mockFactory; class InMemoryMetricReader extends MetricReader { protected async onShutdown(): Promise {} @@ -71,16 +77,25 @@ describe('transform', () => { } before(() => { + sandbox = sinon.createSandbox(); + mockFactory = sandbox.createStubInstance(MetricsTracerFactory); + sandbox.stub(mockFactory, 'clientUid').get(() => 'test_uid'); + sandbox.stub(mockFactory, 'clientName').get(() => 'test_name'); + sandbox.stub(MetricsTracerFactory, 'getInstance').returns(mockFactory); + reader = new InMemoryMetricReader(); + resource = new Resource({ + ['project_id']: 'project_id', + ['client_hash']: 'test_hash', + ['location']: 'test_location', + ['instance_id']: 'instance_id', + ['instance_config']: 'test_config', + }); meterProvider = new MeterProvider({ + resource: resource, readers: [reader], }); attributes = { - project_id: 'project_id', - instance_id: 'instance_id', - instance_config: 'test_config', - location: 'test_location', - client_hash: 'test_hash', client_uid: 'test_uid', client_name: 'test_name', database: 'database_id', @@ -176,6 +191,10 @@ describe('transform', () => { }; }); + after(() => { + sandbox.restore(); + }); + it('normalizes label keys', () => { [ ['valid_key_1', 'valid_key_1'], @@ -224,29 +243,43 @@ describe('transform', () => { }); it('should extract metric and resource labels', () => { - const {metricLabels, monitoredResourceLabels} = - _extractLabels(sumDataPoint); + const dataLabels = _extractLabels(sumDataPoint); + const resourceLabels = _extractLabels(resource); // Metric Labels - assert.strictEqual(metricLabels['client_uid'], 'test_uid'); - assert.strictEqual(metricLabels['client_name'], 'test_name'); - assert.strictEqual(metricLabels['database'], 'database_id'); - assert.strictEqual(metricLabels['method'], 'test_method'); - assert.strictEqual(metricLabels['status'], 'test_status'); + assert.strictEqual(dataLabels.metricLabels['client_uid'], 'test_uid'); + assert.strictEqual(dataLabels.metricLabels['client_name'], 'test_name'); + assert.strictEqual(dataLabels.metricLabels['database'], 'database_id'); + assert.strictEqual(dataLabels.metricLabels['method'], 'test_method'); + assert.strictEqual(dataLabels.metricLabels['status'], 'test_status'); // Resource Labels - assert.strictEqual(monitoredResourceLabels['project_id'], 'project_id'); - assert.strictEqual(monitoredResourceLabels['instance_id'], 'instance_id'); assert.strictEqual( - monitoredResourceLabels['instance_config'], + resourceLabels.monitoredResourceLabels['project_id'], + 'project_id', + ); + assert.strictEqual( + resourceLabels.monitoredResourceLabels['instance_id'], + 'instance_id', + ); + assert.strictEqual( + resourceLabels.monitoredResourceLabels['instance_config'], 'test_config', ); - assert.strictEqual(monitoredResourceLabels['location'], 'test_location'); - assert.strictEqual(monitoredResourceLabels['client_hash'], 'test_hash'); + assert.strictEqual( + resourceLabels.monitoredResourceLabels['location'], + 'test_location', + ); + assert.strictEqual( + resourceLabels.monitoredResourceLabels['client_hash'], + 'test_hash', + ); // Other Labels - assert(!('other' in metricLabels)); - assert(!('other' in monitoredResourceLabels)); + assert(!('other' in dataLabels.metricLabels)); + assert(!('other' in resourceLabels.metricLabels)); + assert(!('other' in dataLabels.monitoredResourceLabels)); + assert(!('other' in resourceLabels.monitoredResourceLabels)); }); it('should transform otel value types to GCM value types', () => { diff --git a/test/spanner.ts b/test/spanner.ts index 2b3d7578f..edb609189 100644 --- a/test/spanner.ts +++ b/test/spanner.ts @@ -80,6 +80,7 @@ import IsolationLevel = google.spanner.v1.TransactionOptions.IsolationLevel; import {SessionFactory} from '../src/session-factory'; import {MultiplexedSession} from '../src/multiplexed-session'; import {WriteAtLeastOnceOptions} from '../src/database'; +import {MetricsTracerFactory} from '../src/metrics/metrics-tracer-factory'; const { AlwaysOnSampler, @@ -102,6 +103,21 @@ function numberToEnglishWord(num: number): string { } } +async function disableMetrics(sandbox: sinon.SinonSandbox) { + if ( + Object.prototype.hasOwnProperty.call( + process.env, + 'SPANNER_DISABLE_BUILTIN_METRICS', + ) + ) { + sandbox.replace(process.env, 'SPANNER_DISABLE_BUILTIN_METRICS', 'true'); + } else { + sandbox.define(process.env, 'SPANNER_DISABLE_BUILTIN_METRICS', 'true'); + } + await MetricsTracerFactory.resetInstance(); + MetricsTracerFactory.enabled = false; +} + class XGoogRequestHeaderInterceptor { private nStream: number; private nUnary: number; @@ -236,7 +252,7 @@ describe('Spanner with mock server', () => { const invalidSql = 'SELECT * FROM FOO'; const insertSql = "INSERT INTO NUMBER (NUM, NAME) VALUES (4, 'Four')"; const selectAllTypes = 'SELECT * FROM TABLE_WITH_ALL_TYPES'; - const insertSqlForAllTypes = `INSERT INTO TABLE_WITH_ALL_TYPES (COLBOOL, COLINT64, COLFLOAT64, COLNUMERIC, COLSTRING, COLBYTES, COLJSON, COLDATE, COLTIMESTAMP) + const insertSqlForAllTypes = `INSERT INTO TABLE_WITH_ALL_TYPES (COLBOOL, COLINT64, COLFLOAT64, COLNUMERIC, COLSTRING, COLBYTES, COLJSON, COLDATE, COLTIMESTAMP) VALUES (@bool, @int64, @float64, @numeric, @string, @bytes, @json, @date, @timestamp)`; const updateSql = "UPDATE NUMBER SET NAME='Unknown' WHERE NUM IN (5, 6)"; const readPartitionsQuery = { @@ -327,6 +343,7 @@ describe('Spanner with mock server', () => { // Set environment variable for SPANNER_EMULATOR_HOST to the mock server. // process.env.SPANNER_EMULATOR_HOST = `localhost:${port}`; process.env.GOOGLE_CLOUD_PROJECT = 'test-project'; + await disableMetrics(sandbox); spanner = new Spanner({ servicePath: 'localhost', port, From 453e23f5255cc3c583891b31aa49545447bf22f9 Mon Sep 17 00:00:00 2001 From: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Date: Thu, 17 Jul 2025 06:47:17 +0000 Subject: [PATCH 24/31] chore: lock order prevention (#2329) --- src/database.ts | 2 + src/session-factory.ts | 18 +++++++ src/transaction-runner.ts | 5 ++ src/transaction.ts | 53 +++++++++++++++++++++ test/database.ts | 10 ++++ test/session-factory.ts | 24 ++++++++++ test/transaction-runner.ts | 97 +++++++++++++++++++++++++++++++++++++- test/transaction.ts | 58 +++++++++++++++++++++++ 8 files changed, 266 insertions(+), 1 deletion(-) diff --git a/src/database.ts b/src/database.ts index 9d3e1e745..57c5eeefb 100644 --- a/src/database.ts +++ b/src/database.ts @@ -353,6 +353,7 @@ class Database extends common.GrpcServiceObject { pool_: SessionPoolInterface; sessionFactory_: SessionFactoryInterface; queryOptions_?: spannerClient.spanner.v1.ExecuteSqlRequest.IQueryOptions; + isMuxEnabledForRW_?: boolean; commonHeaders_: {[k: string]: string}; request: DatabaseRequest; databaseRole?: string | null; @@ -496,6 +497,7 @@ class Database extends common.GrpcServiceObject { this.requestStream = instance.requestStream as any; this.sessionFactory_ = new SessionFactory(this, name, poolOptions); this.pool_ = this.sessionFactory_.getPool(); + this.isMuxEnabledForRW_ = this.sessionFactory_.isMultiplexedEnabledForRW(); const sessionPoolInstance = this.pool_ as SessionPool; if (sessionPoolInstance) { sessionPoolInstance._observabilityOptions = diff --git a/src/session-factory.ts b/src/session-factory.ts index 794e83896..7b97e5703 100644 --- a/src/session-factory.ts +++ b/src/session-factory.ts @@ -93,6 +93,13 @@ export interface SessionFactoryInterface { * @name SessionFactoryInterface#isMultiplexedEnabled */ isMultiplexedEnabled(): boolean; + + /** + * When called returns if the multiplexed is enabled or not for read write transactions. + * + * @name SessionFactoryInterface#isMultiplexedEnabledForRW + */ + isMultiplexedEnabledForRW(): boolean; } /** @@ -243,4 +250,15 @@ export class SessionFactory isMultiplexedEnabled(): boolean { return this.isMultiplexed; } + + /** + * Returns if a multiplexed is enabled or not for read write transaction. + * + * This method returns true if multiplexed session is enabled for read write transactions, otherwise returns false + * + * @returns {boolean} + */ + isMultiplexedEnabledForRW(): boolean { + return this.isMultiplexedRW; + } } diff --git a/src/transaction-runner.ts b/src/transaction-runner.ts index ba37e851c..3d78f36c7 100644 --- a/src/transaction-runner.ts +++ b/src/transaction-runner.ts @@ -116,6 +116,7 @@ export abstract class Runner { session: Session; transaction?: Transaction; options: RunTransactionOptions; + multiplexedSessionPreviousTransactionId?: Uint8Array | string; constructor( session: Session, transaction: Transaction, @@ -210,6 +211,8 @@ export abstract class Runner { transaction!.setReadWriteTransactionOptions( this.options as RunTransactionOptions, ); + transaction.multiplexedSessionPreviousTransactionId = + this.multiplexedSessionPreviousTransactionId; if (this.attempts > 0) { await transaction.begin(); } @@ -239,6 +242,8 @@ export abstract class Runner { } catch (e) { this.session.lastError = e as grpc.ServiceError; lastError = e as grpc.ServiceError; + } finally { + this.multiplexedSessionPreviousTransactionId = transaction.id; } // Note that if the error is a 'Session not found' error, it will be diff --git a/src/transaction.ts b/src/transaction.ts index 90fdeaf80..161c08486 100644 --- a/src/transaction.ts +++ b/src/transaction.ts @@ -297,6 +297,7 @@ export class Snapshot extends EventEmitter { | undefined | null; id?: Uint8Array | string; + multiplexedSessionPreviousTransactionId?: Uint8Array | string; ended: boolean; metadata?: spannerClient.spanner.v1.ITransaction; readTimestamp?: PreciseDate; @@ -394,6 +395,27 @@ export class Snapshot extends EventEmitter { } } + /** + * Modifies transaction selector to include the multiplexed session previous + * transaction id. + * This is essential for operations that use an `inline begin`. + * @protected + * @param transaction The transaction selector object that will be mutated + * to include the multiplexed session previous transaction id. + */ + protected _setPreviousTransactionId( + transaction: spannerClient.spanner.v1.ITransactionSelector, + ): void { + transaction.begin!.readWrite! = Object.assign( + {}, + transaction.begin!.readWrite! || {}, + { + multiplexedSessionPreviousTransactionId: + this.multiplexedSessionPreviousTransactionId, + }, + ); + } + /** * @typedef {object} TransactionResponse * @property {string|Buffer} id The transaction ID. @@ -452,6 +474,13 @@ export class Snapshot extends EventEmitter { const session = this.session.formattedName_!; const options = this._options; + if ( + this.multiplexedSessionPreviousTransactionId && + (this.session.parent as Database).isMuxEnabledForRW_ + ) { + options.readWrite!.multiplexedSessionPreviousTransactionId = + this.multiplexedSessionPreviousTransactionId; + } const reqOpts: spannerClient.spanner.v1.IBeginTransactionRequest = { session, options, @@ -702,6 +731,14 @@ export class Snapshot extends EventEmitter { transaction.singleUse = this._options; } + if ( + !this.id && + this._options.readWrite && + (this.session.parent as Database).isMuxEnabledForRW_ + ) { + this._setPreviousTransactionId(transaction); + } + const directedReadOptions = this._getDirectedReadOptions( request.directedReadOptions, ); @@ -1308,6 +1345,14 @@ export class Snapshot extends EventEmitter { } else { transaction.singleUse = this._options; } + + if ( + !this.id && + this._options.readWrite && + (this.session.parent as Database).isMuxEnabledForRW_ + ) { + this._setPreviousTransactionId(transaction); + } delete query.gaxOptions; delete query.json; delete query.jsonOptions; @@ -2015,6 +2060,14 @@ export class Transaction extends Dml { transaction.begin = this._options; } + if ( + !this.id && + this._options.readWrite && + (this.session.parent as Database).isMuxEnabledForRW_ + ) { + this._setPreviousTransactionId(transaction); + } + const requestOptionsWithTag = this.configureTagOptions( false, this.requestOptions?.transactionTag ?? undefined, diff --git a/test/database.ts b/test/database.ts index 2c7f5f9c8..8ef6c6659 100644 --- a/test/database.ts +++ b/test/database.ts @@ -158,6 +158,16 @@ export class FakeSessionFactory extends EventEmitter { return true; } } + isMultiplexedEnabledForRW(): boolean { + if ( + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS === 'true' && + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW === 'true' + ) { + return true; + } else { + return false; + } + } } class FakeTable { diff --git a/test/session-factory.ts b/test/session-factory.ts index 39b4557e5..db74ac634 100644 --- a/test/session-factory.ts +++ b/test/session-factory.ts @@ -409,4 +409,28 @@ describe('SessionFactory', () => { }); }); }); + + describe('isMultiplexedEnabledForRW', () => { + describe('when multiplexed session is enabled for read/write transactions', () => { + before(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'true'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'true'; + }); + it('should have enabled the multiplexed', () => { + const sessionFactory = new SessionFactory(DATABASE, NAME, POOL_OPTIONS); + assert.strictEqual(sessionFactory.isMultiplexedEnabledForRW(), true); + }); + }); + + describe('when multiplexed session is disabled for read/write transactions', () => { + before(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'false'; + }); + it('should not have enabled the multiplexed', () => { + const sessionFactory = new SessionFactory(DATABASE, NAME, POOL_OPTIONS); + assert.strictEqual(sessionFactory.isMultiplexedEnabledForRW(), false); + }); + }); + }); }); diff --git a/test/transaction-runner.ts b/test/transaction-runner.ts index 2ed0f23d5..1aba522b2 100644 --- a/test/transaction-runner.ts +++ b/test/transaction-runner.ts @@ -24,11 +24,13 @@ import * as through from 'through2'; import {RunTransactionOptions} from '../src/transaction-runner'; import {google} from '../protos/protos'; import IsolationLevel = google.spanner.v1.TransactionOptions.IsolationLevel; +import {randomUUID} from 'crypto'; // eslint-disable-next-line @typescript-eslint/no-var-requires const concat = require('concat-stream'); class FakeTransaction extends EventEmitter { + multiplexedSessionPreviousTransactionId; async begin(): Promise {} request() {} requestStream() {} @@ -229,11 +231,38 @@ describe('TransactionRunner', () => { await runner.getTransaction(); assert.strictEqual(beginStub.callCount, 1); }); + + describe('when multiplexed session is enabled for read/write transaction', () => { + it('should set the multiplexedSessionPreviousTransactionId in the new transaction object', async () => { + const expectedTransaction = new FakeTransaction(); + const fakePreviousTransactionId = 'fake-transaction-id'; + sandbox.stub(expectedTransaction, 'begin').resolves(); + + sandbox.stub(SESSION, 'transaction').returns(expectedTransaction); + delete runner.transaction; + + runner.multiplexedSessionPreviousTransactionId = + fakePreviousTransactionId; + + // multiplexed session + runner.session = Object.assign({multiplexed: true}, SESSION); + + const transaction = await runner.getTransaction(); + + assert.strictEqual( + transaction.multiplexedSessionPreviousTransactionId, + fakePreviousTransactionId, + ); + }); + }); }); describe('run', () => { + let getTransactionStub; beforeEach(() => { - sandbox.stub(runner, 'getTransaction').resolves(fakeTransaction); + getTransactionStub = sandbox + .stub(runner, 'getTransaction') + .resolves(fakeTransaction); }); it('should run a transaction', async () => { @@ -306,6 +335,72 @@ describe('TransactionRunner', () => { done(); }); }); + + describe('when multiplexed session is enabled for read/write', () => { + it('should update the multiplexedSessionPreviousTransactionId before retrying aborted transaction', async () => { + const fakeReturnValue = 12; + const fakeError = new Error('err') as grpc.ServiceError; + fakeError.code = grpc.status.ABORTED; + + const fakeTransaction1 = Object.assign( + {id: randomUUID()}, + new FakeTransaction(), + ); + const fakeTransaction2 = Object.assign( + {id: randomUUID()}, + new FakeTransaction(), + ); + const fakeTransaction3 = Object.assign( + {id: randomUUID()}, + new FakeTransaction(), + ); + + getTransactionStub.onCall(0).resolves(fakeTransaction1); + getTransactionStub.onCall(1).resolves(fakeTransaction2); + getTransactionStub.onCall(2).resolves(fakeTransaction3); + + runFn.onCall(0).callsFake(() => { + // assert on first call the multiplexedSessionPreviousTransactionId is set to undefined + assert.strictEqual( + runner.multiplexedSessionPreviousTransactionId, + undefined, + ); + return Promise.reject(fakeError); + }); + + // first retry + runFn.onCall(1).callsFake(() => { + // assert on second call the multiplexedSessionPreviousTransactionId is set to first transaction id + assert.strictEqual( + runner.multiplexedSessionPreviousTransactionId, + fakeTransaction1.id, + ); + return Promise.reject(fakeError); + }); + + // second retry + runFn.onCall(2).callsFake(() => { + // assert on third call multiplexedSessionPreviousTransactionId is set to second transaction id + assert.strictEqual( + runner.multiplexedSessionPreviousTransactionId, + fakeTransaction2.id, + ); + return Promise.resolve(fakeReturnValue); + }); + + const delayStub = sandbox + .stub(runner, 'getNextDelay') + .withArgs(fakeError) + .returns(0); + + const returnValue = await runner.run(); + + assert.strictEqual(returnValue, fakeReturnValue); + // assert that retry happens twice + assert.strictEqual(runner.attempts, 2); + assert.strictEqual(delayStub.callCount, 2); + }); + }); }); }); diff --git a/test/transaction.ts b/test/transaction.ts index 95c9883d3..b958b3740 100644 --- a/test/transaction.ts +++ b/test/transaction.ts @@ -1702,6 +1702,64 @@ describe('Transaction', () => { ), ); }); + + describe('when multiplexed session is enabled for read/write', () => { + before(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'true'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'true'; + }); + after(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = + 'false'; + }); + it('should pass multiplexedSessionPreviousTransactionId in the BeginTransactionRequest upon retrying an aborted transaction', () => { + const fakePreviousTransactionId = 'fake-previous-transaction-id'; + const database = { + formattedName_: 'formatted-database-name', + isMuxEnabledForRW_: true, + parent: INSTANCE, + }; + const SESSION = { + parent: database, + formattedName_: SESSION_NAME, + request: REQUEST, + requestStream: REQUEST_STREAM, + }; + // multiplexed session + const multiplexedSession = Object.assign( + {multiplexed: true}, + SESSION, + ); + transaction = new Transaction(multiplexedSession); + // transaction option must contain the previous transaction id for multiplexed session + transaction.multiplexedSessionPreviousTransactionId = + fakePreviousTransactionId; + const stub = sandbox.stub(transaction, 'request'); + transaction.begin(); + + const expectedOptions = { + isolationLevel: 0, + readWrite: { + multiplexedSessionPreviousTransactionId: + fakePreviousTransactionId, + }, + }; + const {client, method, reqOpts, headers} = stub.lastCall.args[0]; + + assert.strictEqual(client, 'SpannerClient'); + assert.strictEqual(method, 'beginTransaction'); + // request options should contain the multiplexedSessionPreviousTransactionId + assert.deepStrictEqual(reqOpts.options, expectedOptions); + assert.deepStrictEqual( + headers, + Object.assign( + {[LEADER_AWARE_ROUTING_HEADER]: true}, + transaction.commonHeaders_, + ), + ); + }); + }); }); describe('commit', () => { From 0666f05d589e2f229b44dffae8e9649220bccf8b Mon Sep 17 00:00:00 2001 From: surbhigarg92 Date: Thu, 17 Jul 2025 14:34:23 +0530 Subject: [PATCH 25/31] feat: Add support for AFE latency metrics (#2348) --- src/common.ts | 11 ++ src/index.ts | 25 ++++ src/metrics/README.md | 2 + src/metrics/constants.ts | 13 ++ src/metrics/interceptor.ts | 23 ++-- src/metrics/metrics-tracer-factory.ts | 22 ++++ src/metrics/metrics-tracer.ts | 61 ++++++++- test/database.ts | 2 + test/index.ts | 3 +- test/instance.ts | 3 +- test/metrics/interceptor.ts | 60 +++++++-- test/metrics/metrics-tracer.ts | 112 +++++++++++++++-- test/metrics/metrics.ts | 173 +++++++++++++++++++++++++- test/session.ts | 2 + test/transaction.ts | 2 + 15 files changed, 479 insertions(+), 35 deletions(-) diff --git a/src/common.ts b/src/common.ts index 9688f9810..81d13fdcf 100644 --- a/src/common.ts +++ b/src/common.ts @@ -17,6 +17,7 @@ import {grpc, CallOptions, Operation as GaxOperation} from 'google-gax'; import {google as instanceAdmin} from '../protos/protos'; import {google as databaseAdmin} from '../protos/protos'; +import {Spanner} from '.'; export type IOperation = instanceAdmin.longrunning.IOperation; @@ -86,6 +87,12 @@ export const LEADER_AWARE_ROUTING_HEADER = 'x-goog-spanner-route-to-leader'; */ export const END_TO_END_TRACING_HEADER = 'x-goog-spanner-end-to-end-tracing'; +/* + * AFE SERVER TIMING header. + */ +export const AFE_SERVER_TIMING_HEADER = + 'x-goog-spanner-enable-afe-server-timing'; + /** * Add Leader aware routing header to existing header list. * @param headers Existing header list. @@ -111,6 +118,10 @@ export function getCommonHeaders( headers[END_TO_END_TRACING_HEADER] = 'true'; } + if (Spanner.isAFEServerTimingEnabled()) { + headers[AFE_SERVER_TIMING_HEADER] = 'true'; + } + headers[CLOUD_RESOURCE_HEADER] = resourceName; return headers; diff --git a/src/index.ts b/src/index.ts index 7bc1d14ff..e1361d02a 100644 --- a/src/index.ts +++ b/src/index.ts @@ -316,6 +316,7 @@ class Spanner extends GrpcService { _observabilityOptions: ObservabilityOptions | undefined; private _universeDomain: string; private _isEmulatorEnabled: boolean; + private static _isAFEServerTimingEnabled: boolean | undefined; readonly _nthClientId: number; /** @@ -331,6 +332,30 @@ class Spanner extends GrpcService { static GOOGLE_STANDARD_SQL = google.spanner.admin.database.v1.DatabaseDialect.GOOGLE_STANDARD_SQL; + /** + * Returns whether AFE (Application Frontend Extension) server timing is enabled. + * + * This method checks the value of the environment variable + * `SPANNER_DISABLE_AFE_SERVER_TIMING`. If the variable is explicitly set to the + * string `'true'`, then AFE server timing is considered disabled, and this method + * returns `false`. For all other values (including if the variable is unset), + * the method returns `true`. + * + * @returns {boolean} `true` if AFE server timing is enabled; otherwise, `false`. + */ + public static isAFEServerTimingEnabled = (): boolean => { + if (this._isAFEServerTimingEnabled === undefined) { + this._isAFEServerTimingEnabled = + process.env['SPANNER_DISABLE_AFE_SERVER_TIMING'] !== 'true'; + } + return this._isAFEServerTimingEnabled; + }; + + /** Resets the cached value (use in tests if env changes). */ + public static _resetAFEServerTimingForTest(): void { + this._isAFEServerTimingEnabled = undefined; + } + /** * Gets the configured Spanner emulator host from an environment variable. */ diff --git a/src/metrics/README.md b/src/metrics/README.md index ca78af31a..d0124b8bf 100644 --- a/src/metrics/README.md +++ b/src/metrics/README.md @@ -12,6 +12,8 @@ The exporter filters metrics based on the following conditions, utilizing values * `operation_count` * `gfe_latencies` * `gfe_connectivity_error_count` + * `afe_latencies` + * `afe_connectivity_error_count` ## Service Endpoint The exporter sends metrics to the Google Cloud Monitoring [service endpoint](https://cloud.google.com/python/docs/reference/monitoring/latest/google.cloud.monitoring_v3.services.metric_service.MetricServiceClient#google_cloud_monitoring_v3_services_metric_service_MetricServiceClient_create_service_time_series), distinct from the regular client endpoint. This service endpoint operates under a different quota limit than the user endpoint and features an additional server-side filter that only permits a predefined set of metrics to pass through. diff --git a/src/metrics/constants.ts b/src/metrics/constants.ts index cd2d22fdc..959eeb3d8 100644 --- a/src/metrics/constants.ts +++ b/src/metrics/constants.ts @@ -65,6 +65,9 @@ export const METRIC_NAME_ATTEMPT_COUNT = 'attempt_count'; export const METRIC_NAME_GFE_LATENCIES = 'gfe_latencies'; export const METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT = 'gfe_connectivity_error_count'; +export const METRIC_NAME_AFE_LATENCIES = 'afe_latencies'; +export const METRIC_NAME_AFE_CONNECTIVITY_ERROR_COUNT = + 'afe_connectivity_error_count'; export const METRIC_NAMES = new Set([ METRIC_NAME_OPERATION_LATENCIES, METRIC_NAME_ATTEMPT_LATENCIES, @@ -72,6 +75,8 @@ export const METRIC_NAMES = new Set([ METRIC_NAME_OPERATION_COUNT, METRIC_NAME_ATTEMPT_COUNT, METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, + METRIC_NAME_AFE_LATENCIES, + METRIC_NAME_AFE_CONNECTIVITY_ERROR_COUNT, ]); export const UNKNOWN_ATTRIBUTE = 'unknown'; @@ -107,8 +112,16 @@ export const GFE_LATENCY_VIEW = new View({ ), }); +export const AFE_LATENCY_VIEW = new View({ + instrumentName: METRIC_NAME_AFE_LATENCIES, + aggregation: new ExplicitBucketHistogramAggregation( + HISTOGRAM_BUCKET_BOUNDARIES, + ), +}); + export const METRIC_VIEWS = [ OPERATION_LATENCY_VIEW, ATTEMPT_LATENCY_VIEW, GFE_LATENCY_VIEW, + AFE_LATENCY_VIEW, ]; diff --git a/src/metrics/interceptor.ts b/src/metrics/interceptor.ts index 21ec30883..31dae03a9 100644 --- a/src/metrics/interceptor.ts +++ b/src/metrics/interceptor.ts @@ -19,7 +19,7 @@ import {MetricsTracerFactory} from './metrics-tracer-factory'; * Interceptor for recording metrics on gRPC calls. * * This interceptor records attempt metrics at the start and completion of each gRPC call, - * and also records Google Front End (GFE) metrics such as latency and connectivity errors + * and also records Google Front End (GFE), AFE metrics such as latency and connectivity errors * based on the presence and value of the 'server-timing' header in the response metadata. * * @param {object} options - The gRPC call options, including method definition. @@ -36,13 +36,17 @@ export const MetricInterceptor = (options, nextCall) => { metricsTracer?.recordAttemptStart(); const newListener = { onReceiveMetadata: function (metadata, next) { - // Record GFE Metrics - // GFE latency if available, - // or else increase the GFE connectivity error count + // Record GFE/AFE Metrics + // GFE/AFE latency if available, + // or else increase the GFE/AFE connectivity error count if (metricsTracer) { - const gfeHeader = metadata.getMap()['server-timing']; - const timing = metricsTracer?.extractGfeLatency(gfeHeader); - metricsTracer.gfeLatency = timing ?? null; + const serverTimingHeader = metadata.getMap()['server-timing']; + const gfeTiming = + metricsTracer?.extractGfeLatency(serverTimingHeader); + metricsTracer.gfeLatency = gfeTiming ?? null; + const afeTiming = + metricsTracer?.extractAfeLatency(serverTimingHeader); + metricsTracer.afeLatency = afeTiming ?? null; } next(metadata); @@ -60,6 +64,11 @@ export const MetricInterceptor = (options, nextCall) => { } else { metricsTracer?.recordGfeConnectivityErrorCount(status.code); } + if (metricsTracer?.afeLatency) { + metricsTracer?.recordAfeLatency(status.code); + } else { + metricsTracer?.recordAfeConnectivityErrorCount(status.code); + } }, }; next(metadata, newListener); diff --git a/src/metrics/metrics-tracer-factory.ts b/src/metrics/metrics-tracer-factory.ts index 2372a26be..5b416ac60 100644 --- a/src/metrics/metrics-tracer-factory.ts +++ b/src/metrics/metrics-tracer-factory.ts @@ -46,6 +46,8 @@ export class MetricsTracerFactory { private _instrumentOperationLatency!: Histogram; private _instrumentGfeConnectivityErrorCount!: Counter; private _instrumentGfeLatency!: Histogram; + private _instrumentAfeConnectivityErrorCount!: Counter; + private _instrumentAfeLatency!: Histogram; private _clientHash: string; private _clientName: string; private _clientUid: string; @@ -243,6 +245,8 @@ export class MetricsTracerFactory { this._instrumentOperationLatency, this._instrumentGfeConnectivityErrorCount, this._instrumentGfeLatency, + this._instrumentAfeConnectivityErrorCount, + this._instrumentAfeLatency, MetricsTracerFactory.enabled, database, instance, @@ -380,6 +384,24 @@ export class MetricsTracerFactory { 'Number of requests that failed to reach the Google network.', }, ); + + this._instrumentAfeLatency = meter.createHistogram( + Constants.METRIC_NAME_AFE_LATENCIES, + { + unit: 'ms', + description: + 'Latency between Spanner API Frontend receiving an RPC and starting to write back the response', + }, + ); + + this._instrumentAfeConnectivityErrorCount = meter.createCounter( + Constants.METRIC_NAME_AFE_CONNECTIVITY_ERROR_COUNT, + { + unit: '1', + description: + 'Number of requests that failed to reach the Spanner API Frontend.', + }, + ); } /** diff --git a/src/metrics/metrics-tracer.ts b/src/metrics/metrics-tracer.ts index 61749e59c..ede4c7f7e 100644 --- a/src/metrics/metrics-tracer.ts +++ b/src/metrics/metrics-tracer.ts @@ -21,6 +21,7 @@ import { METRIC_LABEL_KEY_STATUS, MONITORED_RES_LABEL_KEY_INSTANCE, } from './constants'; +import {Spanner} from '..'; /** * MetricAttemptTracer tracks the start time and status of a single gRPC attempt. @@ -109,10 +110,10 @@ class MetricOperationTracer { /** * MetricsTracer is responsible for recording and managing metrics related to * gRPC Spanner operations and attempts counters, and latencies, - * as well as Google Front End (GFE) metrics such as latency and connectivity errors. + * as well as Google Front End (GFE)/AFE metrics such as latency and connectivity errors. * * This class provides methods to record the start and completion of operations - * and attempts, extract GFE latency from response headers. + * and attempts, extract GFE/AFE latency from response headers. * It also handles setting of required Spanner metric attributes to * be later consumed by the SpannerMetricsExporter. */ @@ -131,6 +132,12 @@ export class MetricsTracer { * The current GFE latency associated with this tracer. */ public gfeLatency: number | null = null; + + /* + * The current AFE latency associated with this tracer. + */ + public afeLatency: number | null = null; + /** * Constructs a new MetricsTracer. * @@ -140,6 +147,8 @@ export class MetricsTracer { * @param _instrumentOperationLatency Histogram for operation latency metrics. * @param _instrumentGfeConnectivityErrorCount Counter for GFE connectivity errors. * @param _instrumentGfeLatency Histogram for GFE latency metrics. + * @param _instrumentAfeConnectivityErrorCount Counter for AFE connectivity errors. + * @param _instrumentAfeLatency Histogram for AFE latency metrics. * @param enabled Whether metrics recording is enabled. */ constructor( @@ -149,6 +158,8 @@ export class MetricsTracer { private _instrumentOperationLatency: Histogram | null, private _instrumentGfeConnectivityErrorCount: Counter | null, private _instrumentGfeLatency: Histogram | null, + private _instrumentAfeConnectivityErrorCount: Counter | null, + private _instrumentAfeLatency: Histogram | null, public enabled: boolean, private _database: string, private _instance: string, @@ -283,6 +294,22 @@ export class MetricsTracer { return Number(match[1]); } + /** + * Extracts the AFE latency value (in milliseconds) from a 'server-timing' header string. + * Returns null if the header is missing or does not contain a valid latency value. + * + * @param header The 'server-timing' header string. + * @returns The extracted AFE latency in milliseconds, or null if not found. + */ + public extractAfeLatency(header: string): number | null { + if (!Spanner.isAFEServerTimingEnabled()) return null; + const regex = /afe; dur=([0-9]+).*/; + if (header === undefined) return null; + const match = header.match(regex); + if (!match) return null; + return Number(match[1]); + } + /** * Records the provided GFE latency. * @param latency The GFE latency in milliseconds. @@ -313,6 +340,36 @@ export class MetricsTracer { this._instrumentGfeConnectivityErrorCount?.add(1, attributes); } + /** + * Increments the AFE connectivity error count metric. + */ + public recordAfeConnectivityErrorCount(statusCode: Status) { + if (!this.enabled || !Spanner.isAFEServerTimingEnabled()) return; + const attributes = {...this._clientAttributes}; + attributes[METRIC_LABEL_KEY_STATUS] = Status[statusCode]; + this._instrumentAfeConnectivityErrorCount?.add(1, attributes); + } + + /** + * Records the provided AFE latency. + * @param latency The AFE latency in milliseconds. + */ + public recordAfeLatency(statusCode: Status) { + if (!this.enabled || !Spanner.isAFEServerTimingEnabled()) return; + if (!this.afeLatency) { + console.error( + 'ERROR: Attempted to record AFE metric with no latency value.', + ); + return; + } + + const attributes = {...this._clientAttributes}; + attributes[METRIC_LABEL_KEY_STATUS] = Status[statusCode]; + + this._instrumentAfeLatency?.record(this.afeLatency, attributes); + this.afeLatency = null; // Reset latency value + } + /** * Creates and returns a set of OTEL attributes for operation-level metrics. * @returns The operation attributes object. diff --git a/test/database.ts b/test/database.ts index 8ef6c6659..da6edfcee 100644 --- a/test/database.ts +++ b/test/database.ts @@ -34,6 +34,7 @@ import {IOperation} from '../src/instance'; import { CLOUD_RESOURCE_HEADER, LEADER_AWARE_ROUTING_HEADER, + AFE_SERVER_TIMING_HEADER, } from '../src/common'; import {google} from '../protos/protos'; import {protos} from '../src'; @@ -411,6 +412,7 @@ describe('Database', () => { it('should set the commonHeaders_', () => { assert.deepStrictEqual(database.commonHeaders_, { [CLOUD_RESOURCE_HEADER]: database.formattedName_, + [AFE_SERVER_TIMING_HEADER]: 'true', }); }); }); diff --git a/test/index.ts b/test/index.ts index 3159fa10c..c41e6e147 100644 --- a/test/index.ts +++ b/test/index.ts @@ -37,7 +37,7 @@ import { GetInstanceConfigsOptions, GetInstancesOptions, } from '../src'; -import {CLOUD_RESOURCE_HEADER} from '../src/common'; +import {CLOUD_RESOURCE_HEADER, AFE_SERVER_TIMING_HEADER} from '../src/common'; import {MetricsTracerFactory} from '../src/metrics/metrics-tracer-factory'; import IsolationLevel = protos.google.spanner.v1.TransactionOptions.IsolationLevel; const singer = require('./data/singer'); @@ -371,6 +371,7 @@ describe('Spanner', () => { it('should set the commonHeaders_', () => { assert.deepStrictEqual(spanner.commonHeaders_, { [CLOUD_RESOURCE_HEADER]: spanner.projectFormattedName_, + [AFE_SERVER_TIMING_HEADER]: 'true', }); }); diff --git a/test/instance.ts b/test/instance.ts index bf287b5bf..7f9be090c 100644 --- a/test/instance.ts +++ b/test/instance.ts @@ -33,7 +33,7 @@ import {toArray} from '../src/helper'; import {SessionPoolOptions} from '../src/session-pool'; import {Backup} from '../src/backup'; import {PreciseDate} from '@google-cloud/precise-date'; -import {CLOUD_RESOURCE_HEADER} from '../src/common'; +import {CLOUD_RESOURCE_HEADER, AFE_SERVER_TIMING_HEADER} from '../src/common'; let promisified = false; const fakePfy = extend({}, pfy, { @@ -177,6 +177,7 @@ describe('Instance', () => { it('should set the commonHeaders_', () => { assert.deepStrictEqual(instance.commonHeaders_, { [CLOUD_RESOURCE_HEADER]: instance.formattedName_, + [AFE_SERVER_TIMING_HEADER]: 'true', }); }); }); diff --git a/test/metrics/interceptor.ts b/test/metrics/interceptor.ts index bbaa5896c..dd651a937 100644 --- a/test/metrics/interceptor.ts +++ b/test/metrics/interceptor.ts @@ -27,7 +27,7 @@ describe('MetricInterceptor', () => { let mockNextCall: sinon.SinonStub; let mockInterceptingCall: any; let mockListener: any; - let gfeMetadata: any; + let serverTimingMetadata: any; let emptyMetadata: any; let mockStatus: any; let mockOptions: any; @@ -47,11 +47,19 @@ describe('MetricInterceptor', () => { mockMetricsTracer.extractGfeLatency = sandbox .stub() .callsFake((header: string) => { - if (header === 'gfet4t7; dur=90') { + if (header === 'gfet4t7; dur=90, afe; dur=30') { return 90; } return null; }) as sinon.SinonStub<[string], number | null>; + mockMetricsTracer.extractAfeLatency = sandbox + .stub() + .callsFake((header: string) => { + if (header === 'gfet4t7; dur=90, afe; dur=30') { + return 30; + } + return null; + }) as sinon.SinonStub<[string], number | null>; mockMetricsTracer.recordGfeLatency = sandbox.stub< [latency: number], void @@ -86,11 +94,11 @@ describe('MetricInterceptor', () => { onReceiveStatus: sandbox.stub(), }; - gfeMetadata = new grpc.Metadata(); - gfeMetadata.set('content-type', 'application/grpc'); - gfeMetadata.set('date', 'Thu, 19 Jun 2020 00:01:02 GMT'); - gfeMetadata.set('server-timing', 'gfet4t7; dur=90'); - gfeMetadata.set( + serverTimingMetadata = new grpc.Metadata(); + serverTimingMetadata.set('content-type', 'application/grpc'); + serverTimingMetadata.set('date', 'Thu, 19 Jun 2020 00:01:02 GMT'); + serverTimingMetadata.set('server-timing', 'gfet4t7; dur=90, afe; dur=30'); + serverTimingMetadata.set( 'alt-svc', 'h3=":443"; ma=2592000,h3-29=":443"; ma=2592000', ); @@ -136,7 +144,7 @@ describe('MetricInterceptor', () => { interceptingCall.start(testMetadata, mockListener); // duration value from the header's gfet4t7 value should be recorded as GFE latency - capturedListener.onReceiveMetadata(gfeMetadata); + capturedListener.onReceiveMetadata(serverTimingMetadata); capturedListener.onReceiveStatus(mockStatus); assert.equal(mockMetricsTracer.recordGfeLatency.callCount, 1); assert.equal( @@ -149,6 +157,24 @@ describe('MetricInterceptor', () => { ); }); + it('AFE Metrics - Latency', () => { + const interceptingCall = MetricInterceptor(mockOptions, mockNextCall); + interceptingCall.start(testMetadata, mockListener); + + // duration value from the header's afe value should be recorded as AFE latency + capturedListener.onReceiveMetadata(serverTimingMetadata); + capturedListener.onReceiveStatus(mockStatus); + assert.equal(mockMetricsTracer.recordAfeLatency.callCount, 1); + assert.equal( + mockMetricsTracer.recordAfeLatency.getCall(0).args, + Status.OK, + ); + assert.equal( + mockMetricsTracer.recordAfeConnectivityErrorCount.callCount, + 0, + ); + }); + it('GFE Metrics - Connectivity Error Count', () => { const interceptingCall = MetricInterceptor(mockOptions, mockNextCall); interceptingCall.start(testMetadata, mockListener); @@ -166,5 +192,23 @@ describe('MetricInterceptor', () => { Status.OK, ); }); + + it('AFE Metrics - Connectivity Error Count', () => { + const interceptingCall = MetricInterceptor(mockOptions, mockNextCall); + interceptingCall.start(testMetadata, mockListener); + + // Calls received without latency values should increase connectivity error count + capturedListener.onReceiveMetadata(emptyMetadata); + capturedListener.onReceiveStatus(mockStatus); + assert.equal(mockMetricsTracer.recordAfeLatency.callCount, 0); + assert.equal( + mockMetricsTracer.recordAfeConnectivityErrorCount.callCount, + 1, + ); + assert.equal( + mockMetricsTracer.recordAfeConnectivityErrorCount.getCall(0).args, + Status.OK, + ); + }); }); }); diff --git a/test/metrics/metrics-tracer.ts b/test/metrics/metrics-tracer.ts index 9d93d9197..87bd90a2e 100644 --- a/test/metrics/metrics-tracer.ts +++ b/test/metrics/metrics-tracer.ts @@ -19,6 +19,7 @@ import * as Constants from '../../src/metrics/constants'; import {MetricsTracer} from '../../src/metrics/metrics-tracer'; import {MetricsTracerFactory} from '../../src/metrics/metrics-tracer-factory'; +import {Spanner} from '../../src'; const DATABASE = 'test-db'; const INSTANCE = 'instance'; @@ -33,6 +34,8 @@ describe('MetricsTracer', () => { let fakeOperationLatency: any; let fakeGfeCounter: any; let fakeGfeLatency: any; + let fakeAfeCounter: any; + let fakeAfeLatency: any; let sandbox: sinon.SinonSandbox; beforeEach(() => { sandbox = sinon.createSandbox(); @@ -60,6 +63,14 @@ describe('MetricsTracer', () => { record: sinon.spy(), }; + fakeAfeCounter = { + add: sinon.spy(), + }; + + fakeAfeLatency = { + record: sinon.spy(), + }; + tracer = new MetricsTracer( fakeAttemptCounter, fakeAttemptLatency, @@ -67,6 +78,8 @@ describe('MetricsTracer', () => { fakeOperationLatency, fakeGfeCounter, fakeGfeLatency, + fakeAfeCounter, + fakeAfeLatency, true, // enabled, DATABASE, INSTANCE, @@ -164,7 +177,64 @@ describe('MetricsTracer', () => { }); }); - describe('extractGfeLatency', () => { + describe('recordAfeLatency', () => { + afterEach(() => { + Spanner._resetAFEServerTimingForTest(); + process.env['SPANNER_DISABLE_AFE_SERVER_TIMING'] = 'false'; + }); + + it('should record AFE latency if enabled', () => { + tracer.enabled = true; + tracer.afeLatency = 123; + tracer.recordAfeLatency(Status.OK); + assert.strictEqual(fakeAfeLatency.record.calledOnce, true); + }); + + it('should not record if AFE server timing is disabled', () => { + tracer.enabled = true; + Spanner._resetAFEServerTimingForTest(); + process.env['SPANNER_DISABLE_AFE_SERVER_TIMING'] = 'true'; + tracer.afeLatency = 123; + tracer.recordAfeLatency(Status.OK); + assert.strictEqual(fakeAfeLatency.record.called, false); + }); + + it('should not record if metrics are disabled', () => { + tracer.enabled = false; + tracer.afeLatency = 123; + tracer.recordAfeLatency(Status.OK); + assert.strictEqual(fakeAfeLatency.record.called, false); + }); + }); + + describe('recordGfeConnectivityErrorCount', () => { + afterEach(() => { + Spanner._resetAFEServerTimingForTest(); + process.env['SPANNER_DISABLE_AFE_SERVER_TIMING'] = 'false'; + }); + + it('should increment AFE error counter if enabled', () => { + tracer.enabled = true; + tracer.recordAfeConnectivityErrorCount(Status.OK); + assert.strictEqual(fakeAfeCounter.add.calledOnce, true); + }); + + it('should not increment if metrics are disabled', () => { + tracer.enabled = false; + tracer.recordAfeConnectivityErrorCount(Status.OK); + assert.strictEqual(fakeAfeCounter.add.called, false); + }); + + it('should not increment if AFE server timing is disabled', () => { + tracer.enabled = true; + Spanner._resetAFEServerTimingForTest(); + process.env['SPANNER_DISABLE_AFE_SERVER_TIMING'] = 'true'; + tracer.recordAfeConnectivityErrorCount(Status.OK); + assert.strictEqual(fakeAfeCounter.add.called, false); + }); + }); + + describe('extractGfeLatency & extractAfeLatency', () => { let tracer: MetricsTracer; beforeEach(() => { tracer = new MetricsTracer( @@ -174,6 +244,8 @@ describe('MetricsTracer', () => { null, null, null, + null, + null, true, DATABASE, INSTANCE, @@ -182,27 +254,43 @@ describe('MetricsTracer', () => { ); }); - it('should extract latency from a valid server-timing header', () => { - const header = 'gfet4t7; dur=123'; - const latency = tracer.extractGfeLatency(header); - assert.strictEqual(latency, 123); + it('should extract afe and gfe latency from a valid server-timing header', () => { + const header = 'gfet4t7; dur=123, afe; dur=30, other=value'; + const gfeLatency = tracer.extractGfeLatency(header); + assert.strictEqual(gfeLatency, 123); + const afeLatency = tracer.extractAfeLatency(header); + assert.strictEqual(afeLatency, 30); }); it('should return null if header is undefined', () => { - const latency = tracer.extractGfeLatency(undefined as any); - assert.strictEqual(latency, null); + const gfeLatency = tracer.extractGfeLatency(undefined as any); + assert.strictEqual(gfeLatency, null); + const afeLatency = tracer.extractAfeLatency(undefined as any); + assert.strictEqual(afeLatency, null); }); it('should return null if header does not match expected format', () => { const header = 'some-other-header'; - const latency = tracer.extractGfeLatency(header); - assert.strictEqual(latency, null); + const gfeLatency = tracer.extractGfeLatency(header); + assert.strictEqual(gfeLatency, null); + const afeLatency = tracer.extractAfeLatency(header); + assert.strictEqual(afeLatency, null); }); - it('should extract only the first number if extra data is present', () => { + it('should extract only the gfe latency if extra data is present', () => { const header = 'gfet4t7; dur=456; other=value'; - const latency = tracer.extractGfeLatency(header); - assert.strictEqual(latency, 456); + const gfeLatency = tracer.extractGfeLatency(header); + assert.strictEqual(gfeLatency, 456); + const afeLatency = tracer.extractAfeLatency(header); + assert.strictEqual(afeLatency, null); + }); + + it('should extract only the afe latency if extra data is present', () => { + const header = 'other=value, afe; dur=30; '; + const gfeLatency = tracer.extractGfeLatency(header); + assert.strictEqual(gfeLatency, null); + const afeLatency = tracer.extractAfeLatency(header); + assert.strictEqual(afeLatency, 30); }); }); }); diff --git a/test/metrics/metrics.ts b/test/metrics/metrics.ts index 2cc291e45..041c24b68 100644 --- a/test/metrics/metrics.ts +++ b/test/metrics/metrics.ts @@ -29,6 +29,8 @@ import { METRIC_NAME_ATTEMPT_COUNT, METRIC_NAME_GFE_LATENCIES, METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, + METRIC_NAME_AFE_LATENCIES, + METRIC_NAME_AFE_CONNECTIVITY_ERROR_COUNT, } from '../../src/metrics/constants'; describe('Test metrics with mock server', () => { @@ -181,6 +183,7 @@ describe('Test metrics with mock server', () => { let reader: InMemoryMetricReader; let factory: MetricsTracerFactory | null; let gfeStub; + let afeStub; let exporterStub; const MIN_LATENCY = 0; const commonAttributes = { @@ -214,6 +217,7 @@ describe('Test metrics with mock server', () => { afterEach(async () => { gfeStub?.restore(); + afeStub?.restore(); await factory?.resetMeterProvider(); await MetricsTracerFactory.resetInstance(); }); @@ -222,6 +226,9 @@ describe('Test metrics with mock server', () => { gfeStub = sandbox .stub(MetricsTracer.prototype, 'extractGfeLatency') .callsFake(() => 123); + afeStub = sandbox + .stub(MetricsTracer.prototype, 'extractAfeLatency') + .callsFake(() => 30); const database = newTestDatabase(); const startTime = new Date(); await database.run(selectSql); @@ -240,6 +247,10 @@ describe('Test metrics with mock server', () => { resourceMetrics, METRIC_NAME_GFE_LATENCIES, ); + const afeLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_AFE_LATENCIES, + ); const attemptCountData = getMetricData( resourceMetrics, METRIC_NAME_ATTEMPT_COUNT, @@ -284,18 +295,26 @@ describe('Test metrics with mock server', () => { const gfeLatency = getAggregatedValue(gfeLatenciesData, attributes); assert.strictEqual(gfeLatency, 123); + + const afeLatency = getAggregatedValue(afeLatenciesData, attributes); + assert.strictEqual(afeLatency, 30); }); // check that the latency matches up with the measured elapsed time within 10ms assertApprox(elapsedTime, totalOperationLatency, 10); - // Make sure no GFE connectivity errors ar emitted since we got GFE latencies + // Make sure no GFE/AFE connectivity errors ar emitted since we got GFE latencies const gfeMissingData = hasMetricData( resourceMetrics, METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, ); + const afeMissingData = hasMetricData( + resourceMetrics, + METRIC_NAME_AFE_CONNECTIVITY_ERROR_COUNT, + ); assert.ok(!gfeMissingData); + assert.ok(!afeMissingData); await database.close(); }); @@ -304,6 +323,9 @@ describe('Test metrics with mock server', () => { gfeStub = sandbox .stub(MetricsTracer.prototype, 'extractGfeLatency') .callsFake(() => 123); + afeStub = sandbox + .stub(MetricsTracer.prototype, 'extractAfeLatency') + .callsFake(() => 30); const database = newTestDatabase(); const err = { message: 'Temporary unavailable', @@ -337,6 +359,10 @@ describe('Test metrics with mock server', () => { resourceMetrics, METRIC_NAME_GFE_LATENCIES, ); + const afeLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_AFE_LATENCIES, + ); const sessionAttributes = { ...commonAttributes, @@ -358,6 +384,10 @@ describe('Test metrics with mock server', () => { getAggregatedValue(gfeLatenciesData, sessionAttributes), 123, ); + assert.strictEqual( + getAggregatedValue(afeLatenciesData, sessionAttributes), + 30, + ); const executeAttributes = { ...commonAttributes, @@ -389,12 +419,19 @@ describe('Test metrics with mock server', () => { 123, getAggregatedValue(gfeLatenciesData, executeAttributes), ); + assert.strictEqual( + 30, + getAggregatedValue(afeLatenciesData, executeAttributes), + ); }); - it('should create connectivity error count metric if GFE latency is not in header', async () => { + it('should create connectivity error count metric if GFE/AFE latency is not in header', async () => { gfeStub = sandbox .stub(MetricsTracer.prototype, 'extractGfeLatency') .callsFake(() => null); + afeStub = sandbox + .stub(MetricsTracer.prototype, 'extractAfeLatency') + .callsFake(() => null); const database = newTestDatabase(); await database.run(selectSql); const {resourceMetrics} = await reader.collect(); @@ -419,9 +456,14 @@ describe('Test metrics with mock server', () => { resourceMetrics, METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, ); + const afeConnectivityErrorCountData = getMetricData( + resourceMetrics, + METRIC_NAME_AFE_CONNECTIVITY_ERROR_COUNT, + ); - // Verify GFE latency doesn't exist + // Verify GFE AFE latency doesn't exist assert.ok(!hasMetricData(resourceMetrics, METRIC_NAME_GFE_LATENCIES)); + assert.ok(!hasMetricData(resourceMetrics, METRIC_NAME_AFE_LATENCIES)); const methods = ['batchCreateSessions', 'executeStreamingSql']; methods.forEach(method => { const attributes = { @@ -438,11 +480,15 @@ describe('Test metrics with mock server', () => { assert.strictEqual(getAggregatedValue(attemptCountData, attributes), 1); getAggregatedValue(attemptLatenciesData, attributes); - // Verify that GFE connectivity error count increased + // Verify that GFE AFE connectivity error count increased assert.strictEqual( getAggregatedValue(connectivityErrorCountData, attributes), 1, ); + assert.strictEqual( + getAggregatedValue(afeConnectivityErrorCountData, attributes), + 1, + ); }); }); @@ -450,6 +496,9 @@ describe('Test metrics with mock server', () => { gfeStub = sandbox .stub(MetricsTracer.prototype, 'extractGfeLatency') .callsFake(() => 123); + afeStub = sandbox + .stub(MetricsTracer.prototype, 'extractAfeLatency') + .callsFake(() => 30); const database = newTestDatabase(); const err = { message: 'Temporary unavailable', @@ -503,6 +552,10 @@ describe('Test metrics with mock server', () => { resourceMetrics, METRIC_NAME_GFE_LATENCIES, ); + const afeLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_AFE_LATENCIES, + ); const sessionAttributes = { ...commonAttributes, @@ -524,6 +577,10 @@ describe('Test metrics with mock server', () => { 123, getAggregatedValue(gfeLatenciesData, sessionAttributes), ); + assert.strictEqual( + 30, + getAggregatedValue(afeLatenciesData, sessionAttributes), + ); const executeAttributes = { ...commonAttributes, @@ -546,6 +603,10 @@ describe('Test metrics with mock server', () => { 123, getAggregatedValue(gfeLatenciesData, executeAttributes), ); + assert.strictEqual( + 30, + getAggregatedValue(afeLatenciesData, executeAttributes), + ); // Verify that commit metrics have 2 attempts and 1 operation const commitOkAttributes = { @@ -582,6 +643,110 @@ describe('Test metrics with mock server', () => { 123, getAggregatedValue(gfeLatenciesData, commitOkAttributes), ); + assert.strictEqual( + 30, + getAggregatedValue(afeLatenciesData, commitOkAttributes), + ); + }); + + it('should have correct latency values in metrics except AFE when AFE Server timing is disabled', async () => { + Spanner._resetAFEServerTimingForTest(); + process.env['SPANNER_DISABLE_AFE_SERVER_TIMING'] = 'true'; + gfeStub = sandbox + .stub(MetricsTracer.prototype, 'extractGfeLatency') + .callsFake(() => 123); + afeStub = sandbox + .stub(MetricsTracer.prototype, 'extractAfeLatency') + .callsFake(() => 30); + const database = newTestDatabase(); + const startTime = new Date(); + await database.run(selectSql); + const endTime = new Date(); + + const elapsedTime = endTime.valueOf() - startTime.valueOf(); + + const methods = ['batchCreateSessions', 'executeStreamingSql']; + + const {resourceMetrics} = await reader.collect(); + const operationCountData = getMetricData( + resourceMetrics, + METRIC_NAME_OPERATION_COUNT, + ); + const gfeLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_GFE_LATENCIES, + ); + const attemptCountData = getMetricData( + resourceMetrics, + METRIC_NAME_ATTEMPT_COUNT, + ); + const operationLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_OPERATION_LATENCIES, + ); + const attemptLatenciesData = getMetricData( + resourceMetrics, + METRIC_NAME_ATTEMPT_LATENCIES, + ); + + let totalOperationLatency = 0; + methods.forEach(method => { + const attributes = { + ...commonAttributes, + database: `database-${dbCounter}`, + method: method, + }; + const operationCount = getAggregatedValue( + operationCountData, + attributes, + ); + assert.strictEqual(operationCount, 1); + + const attemptCount = getAggregatedValue(attemptCountData, attributes); + assert.strictEqual(attemptCount, 1); + + const operationLatency = getAggregatedValue( + operationLatenciesData, + attributes, + ); + totalOperationLatency += operationLatency; + + const attemptLatency = getAggregatedValue( + attemptLatenciesData, + attributes, + ); + // Since we only have one attempt, the attempt latency should be fairly close to the operation latency + assertApprox(MIN_LATENCY, attemptLatency, 30); + + const gfeLatency = getAggregatedValue(gfeLatenciesData, attributes); + assert.strictEqual(gfeLatency, 123); + }); + + // check that the latency matches up with the measured elapsed time within 10ms + assertApprox(elapsedTime, totalOperationLatency, 10); + + // Make sure no GFE connectivity errors are not emitted since we got GFE latencies + const gfeMissingData = hasMetricData( + resourceMetrics, + METRIC_NAME_GFE_CONNECTIVITY_ERROR_COUNT, + ); + assert.ok(!gfeMissingData); + + // Make sure no AFE metrics are not emitted since AFE is disabled. + const afeMissingData = hasMetricData( + resourceMetrics, + METRIC_NAME_AFE_CONNECTIVITY_ERROR_COUNT, + ); + const afeLatencyMissingData = hasMetricData( + resourceMetrics, + METRIC_NAME_AFE_LATENCIES, + ); + assert.ok(!afeMissingData); + assert.ok(!afeLatencyMissingData); + + await database.close(); + Spanner._resetAFEServerTimingForTest(); + process.env['SPANNER_DISABLE_AFE_SERVER_TIMING'] = 'false'; }); }); }); diff --git a/test/session.ts b/test/session.ts index fdfdff17c..ef7d78c82 100644 --- a/test/session.ts +++ b/test/session.ts @@ -24,6 +24,7 @@ import * as proxyquire from 'proxyquire'; import { CLOUD_RESOURCE_HEADER, LEADER_AWARE_ROUTING_HEADER, + AFE_SERVER_TIMING_HEADER, } from '../src/common'; import {Database, Instance, Spanner} from '../src'; import { @@ -154,6 +155,7 @@ describe('Session', () => { it('should set the commonHeaders_', () => { assert.deepStrictEqual(session.commonHeaders_, { [CLOUD_RESOURCE_HEADER]: session.parent.formattedName_, + [AFE_SERVER_TIMING_HEADER]: 'true', }); }); diff --git a/test/transaction.ts b/test/transaction.ts index b958b3740..37f68a376 100644 --- a/test/transaction.ts +++ b/test/transaction.ts @@ -28,6 +28,7 @@ import {google} from '../protos/protos'; import { CLOUD_RESOURCE_HEADER, LEADER_AWARE_ROUTING_HEADER, + AFE_SERVER_TIMING_HEADER, } from '../src/common'; import { X_GOOG_SPANNER_REQUEST_ID_HEADER, @@ -160,6 +161,7 @@ describe('Transaction', () => { it('should set the commonHeaders_', () => { assert.deepStrictEqual(snapshot.commonHeaders_, { [CLOUD_RESOURCE_HEADER]: snapshot.session.parent.formattedName_, + [AFE_SERVER_TIMING_HEADER]: 'true', }); }); }); From 2e48fde5d9dc2dcb5a05c53a47c49ed44a57ee7a Mon Sep 17 00:00:00 2001 From: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Date: Fri, 18 Jul 2025 07:40:31 +0000 Subject: [PATCH 26/31] chore: add mutation key heuristic for mutation only transaction (#2345) --- src/transaction.ts | 105 +++++++++++ test/mockserver/mockspanner.ts | 9 + test/spanner.ts | 181 +++++++++++++++++++ test/transaction.ts | 313 ++++++++++++++++++++++++++++++++- 4 files changed, 607 insertions(+), 1 deletion(-) diff --git a/src/transaction.ts b/src/transaction.ts index 161c08486..5bc3a21ee 100644 --- a/src/transaction.ts +++ b/src/transaction.ts @@ -311,6 +311,7 @@ export class Snapshot extends EventEmitter { _observabilityOptions?: ObservabilityOptions; _traceConfig: traceConfig; protected _dbName?: string; + protected _mutationKey: spannerClient.spanner.v1.Mutation | null; /** * The transaction ID. @@ -383,6 +384,7 @@ export class Snapshot extends EventEmitter { dbName: this._dbName, }; this._latestPreCommitToken = null; + this._mutationKey = null; } protected _updatePrecommitToken(resp: PrecommitTokenProvider): void { @@ -395,6 +397,102 @@ export class Snapshot extends EventEmitter { } } + /** + * Selects a single representative mutation from a list to be used as the + * transaction's `mutationKey`. + * + * This key is required by Spanner and is sent in the `BeginTransactionRequest` + * for read-write transactions that only contain mutations. The selection follows + * a two-tiered heuristic to choose the most significant mutation. + * + * The selection heuristic is as follows: + * + * 1. Priority of Operation Type: High-priority mutations (`delete`, `update`, + * `replace`, `insertOrUpdate`) are always chosen over low-priority + * (`insert`) mutations. + * + * 2. Selection Strategy: + * - If any high-priority mutations exist, one is chosen randomly from + * that group, ignoring the number of rows. + * - If only `insert` mutations exist, the one(s) with the largest number + * of rows are identified, and one is chosen randomly from that subset. + * + * @protected + * @param mutations The list of mutations from which to select the key. + */ + protected _setMutationKey(mutations: spannerClient.spanner.v1.Mutation[]) { + // return if the list is empty + if (mutations.length === 0) { + return; + } + + // maintain a set of high priority keys + const HIGH_PRIORITY_KEYS = new Set([ + 'delete', + 'update', + 'replace', + 'insertOrUpdate', + ]); + + // maintain a variable for low priority key + const LOW_PRIORITY_KEY = 'insert'; + + // Partition mutations into high and low priority groups. + const [highPriority, lowPriority] = mutations.reduce( + (acc, mutation) => { + const key = Object.keys(mutation)[0] as keyof typeof mutation; + if (HIGH_PRIORITY_KEYS.has(key)) { + acc[0].push(mutation); + } else if (key === LOW_PRIORITY_KEY) { + acc[1].push(mutation); + } + // return accumulated mutations list + return acc; + }, + [[], []] as [ + spannerClient.spanner.v1.Mutation[], + spannerClient.spanner.v1.Mutation[], + ], + ); + + // Apply the selection logic based on the rules. + if (highPriority.length > 0) { + // RULE 1: If high-priority keys exist, pick one randomly. + const randomIndex = Math.floor(Math.random() * highPriority.length); + this._mutationKey = highPriority[randomIndex]; + } else if (lowPriority.length > 0) { + // RULE 2: If only 'insert' key(s) exist, find the one with + // highest number of values + const {bestCandidates} = lowPriority.reduce( + (acc, mutation) => { + const size = mutation.insert?.values?.length || 0; + + if (size > acc.maxSize) { + // New largest size found, start a new list + return {maxSize: size, bestCandidates: [mutation]}; + } + if (size === acc.maxSize) { + // Same size as current max, add to list + acc.bestCandidates.push(mutation); + } + // return accumulated mutations list + return acc; + }, + { + maxSize: -1, + bestCandidates: [] as spannerClient.spanner.v1.Mutation[], + }, + ); + + // Pick randomly from the largest 'insert' mutation(s). + const randomIndex = Math.floor(Math.random() * bestCandidates.length); + this._mutationKey = bestCandidates[randomIndex]; + } else { + // No mutations to select from. + this._mutationKey = null; + } + } + /** * Modifies transaction selector to include the multiplexed session previous * transaction id. @@ -486,6 +584,10 @@ export class Snapshot extends EventEmitter { options, }; + if (this._mutationKey) { + reqOpts.mutationKey = this._mutationKey; + } + // Only hand crafted read-write transactions will be able to set a // transaction tag for the BeginTransaction RPC. Also, this.requestOptions // is only set in the constructor of Transaction, which is the constructor @@ -2286,6 +2388,9 @@ export class Transaction extends Dml { } else if (!this._useInRunner) { reqOpts.singleUseTransaction = this._options; } else { + if ((this.session.parent as Database).isMuxEnabledForRW_) { + this._setMutationKey(mutations); + } this.begin().then( () => { this.commit(options, (err, resp) => { diff --git a/test/mockserver/mockspanner.ts b/test/mockserver/mockspanner.ts index d27e2e60c..69cf2a54c 100644 --- a/test/mockserver/mockspanner.ts +++ b/test/mockserver/mockspanner.ts @@ -31,6 +31,7 @@ import Any = google.protobuf.Any; import QueryMode = google.spanner.v1.ExecuteSqlRequest.QueryMode; import NullValue = google.protobuf.NullValue; import {ExecuteSqlRequest, ReadRequest} from '../../src/transaction'; +import {randomInt} from 'crypto'; const PROTO_PATH = 'spanner.proto'; const IMPORT_PATH = __dirname + '/../../../protos'; @@ -1232,9 +1233,17 @@ export class MockSpanner { const transactionId = id.toString().padStart(12, '0'); const fullTransactionId = session.name + '/transactions/' + transactionId; const readTimestamp = options && options.readOnly ? now() : undefined; + const precommitToken = + session.multiplexed && options?.readWrite + ? { + precommitToken: Buffer.from('mock-precommit-token'), + seqNum: randomInt(1, 1000), + } + : null; const transaction = protobuf.Transaction.create({ id: Buffer.from(transactionId), readTimestamp, + precommitToken, }); this.transactions.set(fullTransactionId, transaction); this.transactionOptions.set(fullTransactionId, options); diff --git a/test/spanner.ts b/test/spanner.ts index edb609189..4df0c06c4 100644 --- a/test/spanner.ts +++ b/test/spanner.ts @@ -81,6 +81,7 @@ import {SessionFactory} from '../src/session-factory'; import {MultiplexedSession} from '../src/multiplexed-session'; import {WriteAtLeastOnceOptions} from '../src/database'; import {MetricsTracerFactory} from '../src/metrics/metrics-tracer-factory'; +import {randomUUID} from 'crypto'; const { AlwaysOnSampler, @@ -3917,6 +3918,124 @@ describe('Spanner with mock server', () => { await database.close(); }); }); + + // TODO: enable when mux session support is available in public methods + describe.skip('when multiplexed session is enabled for R/W', () => { + before(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'true'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'true'; + }); + + after(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'false'; + }); + + it('should select the insertOrUpdate(upsert)/delete(deleteRows) mutation key over insert', async () => { + const database = newTestDatabase(); + await database.runTransactionAsync(async tx => { + tx.upsert('foo', [ + {id: 1, name: 'One'}, + {id: 2, name: 'Two'}, + ]); + tx.insert('foo', [{id: 3, name: 'Three'}]); + tx.insert('foo', [{id: 4, name: 'Four'}]); + tx.deleteRows('foo', ['3', '4']); + await tx.commit(); + }); + + const beginTransactionRequest = spannerMock + .getRequests() + .filter(val => { + return (val as v1.BeginTransactionRequest).mutationKey; + }) as v1.BeginTransactionRequest[]; + + // assert on begin transaction request + assert.strictEqual(beginTransactionRequest.length, 1); + + // selected mutation key + const selectedMutationKey = beginTransactionRequest[0]!.mutationKey; + + // assert that mutation key have been selected + assert.ok( + selectedMutationKey, + 'A mutation key should have been selected', + ); + + // get the type of mutation key + const mutationType = Object.keys(selectedMutationKey!)[0]; + + // assert that mutation key is not insert + assert.notStrictEqual( + mutationType, + 'insert', + 'The selected mutation key should not be "insert"', + ); + + // assert that mutation key is either insertOrUpdate or delete + assert.ok( + ['insertOrUpdate', 'delete'].includes(mutationType), + "Expected either 'insertOrUpdate' or 'delete' key.", + ); + + const commitRequest = spannerMock.getRequests().filter(val => { + return (val as v1.CommitRequest).precommitToken; + }) as v1.CommitRequest[]; + + // assert on commit request + assert.strictEqual(commitRequest.length, 1); + await database.close(); + }); + + it('should select the mutation key with highest number of values when insert key(s) are present', async () => { + const database = newTestDatabase(); + await database.runTransactionAsync(async tx => { + tx.insert('foo', [ + {id: randomUUID(), name: 'One'}, + {id: randomUUID(), name: 'Two'}, + {id: randomUUID(), name: 'Three'}, + ]); + tx.insert('foo', {id: randomUUID(), name: 'Four'}); + await tx.commit(); + }); + + const beginTransactionRequest = spannerMock + .getRequests() + .filter(val => { + return (val as v1.BeginTransactionRequest).mutationKey; + }) as v1.BeginTransactionRequest[]; + + // assert on begin transaction request + assert.strictEqual(beginTransactionRequest.length, 1); + + // selected mutation key + const selectedMutationKey = beginTransactionRequest[0]!.mutationKey; + + // assert that mutation key have been selected + assert.ok( + selectedMutationKey, + 'A mutation key should have been selected', + ); + + // assert that mutation key is insert + const mutationType = Object.keys(selectedMutationKey!)[0]; + assert.ok( + ['insert'].includes(mutationType), + 'insert key must have been selected', + ); + + // assert that insert mutation key with highest number of rows has been selected + assert.strictEqual(selectedMutationKey.insert?.values?.length, 3); + + const commitRequest = spannerMock.getRequests().filter(val => { + return (val as v1.CommitRequest).precommitToken; + }) as v1.CommitRequest[]; + + // assert on commit request + assert.strictEqual(commitRequest.length, 1); + await database.close(); + }); + }); }); describe('hand-crafted transaction', () => { @@ -5013,6 +5132,68 @@ describe('Spanner with mock server', () => { await database.close(); }); + + // TODO: enable when mux session support is available in public methods + describe.skip('when multiplexed session is enabled for R/W', () => { + before(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'true'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'true'; + }); + + after(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'false'; + }); + + it('should pass the mutation key in begin transaction request in case of mutations only transactions', async () => { + const database = newTestDatabase(); + await database.table('foo').upsert({id: 1, name: randomUUID()}); + await database.table('foo').insert({id: 2, name: randomUUID()}); + await database.table('foo').deleteRows(['2']); + + const beginTransactionRequest = spannerMock + .getRequests() + .filter(val => { + return (val as v1.BeginTransactionRequest).mutationKey; + }) as v1.BeginTransactionRequest[]; + + // assert on begin transaction request + assert.strictEqual(beginTransactionRequest.length, 3); + + // assert that on first begin transaction request insertOrUpdate is being selected as mutation key + assert.ok( + ['insertOrUpdate'].includes( + Object.keys(beginTransactionRequest[0]!.mutationKey!)[0], + ), + 'insertOrUpdate key must have been selected', + ); + + // assert that on second begin transaction request insert is being selected as mutation key + assert.ok( + ['insert'].includes( + Object.keys(beginTransactionRequest[1]!.mutationKey!)[0], + ), + 'insert key must have been selected', + ); + + // assert that on third begin transaction request delete is being selected as mutation key + assert.ok( + ['delete'].includes( + Object.keys(beginTransactionRequest[2]!.mutationKey!)[0], + ), + 'delete key must have been selected', + ); + + const commitRequest = spannerMock.getRequests().filter(val => { + return (val as v1.CommitRequest).precommitToken; + }) as v1.CommitRequest[]; + + // assert on commit request + assert.strictEqual(commitRequest.length, 3); + + await database.close(); + }); + }); }); describe('chunking', () => { diff --git a/test/transaction.ts b/test/transaction.ts index 37f68a376..3e8a8f137 100644 --- a/test/transaction.ts +++ b/test/transaction.ts @@ -1733,7 +1733,7 @@ describe('Transaction', () => { {multiplexed: true}, SESSION, ); - transaction = new Transaction(multiplexedSession); + const transaction = new Transaction(multiplexedSession); // transaction option must contain the previous transaction id for multiplexed session transaction.multiplexedSessionPreviousTransactionId = fakePreviousTransactionId; @@ -1761,6 +1761,56 @@ describe('Transaction', () => { ), ); }); + + it('should send the correct options if _mutationKey is set in the transaction object', () => { + // session with multiplexed enabled + const multiplexedSession = Object.assign( + {multiplexed: true}, + SESSION, + ); + + // fake mutation key + const fakeMutationKey = { + insertOrUpdate: { + table: 'my-table-123', + columns: ['Id', 'Name'], + values: [ + { + values: [{stringValue: 'Id3'}, {stringValue: 'Name3'}], + }, + ], + }, + } as google.spanner.v1.Mutation; + + const transaction = new Transaction(multiplexedSession); + + // stub the transaction request + const stub = sandbox.stub(transaction, 'request'); + + // set the _mutationKey in the transaction object + transaction._mutationKey = fakeMutationKey; + + // make a call to begin + transaction.begin(); + + const expectedOptions = {isolationLevel: 0, readWrite: {}}; + const {client, method, reqOpts, headers} = stub.lastCall.args[0]; + + // assert on the begin transaction call + assert.strictEqual(client, 'SpannerClient'); + assert.strictEqual(method, 'beginTransaction'); + assert.deepStrictEqual(reqOpts.options, expectedOptions); + // assert that if the _mutationKey is set in the transaction object + // it is getting pass in the request as well along with request options + assert.deepStrictEqual(reqOpts.mutationKey, fakeMutationKey); + assert.deepStrictEqual( + headers, + Object.assign( + {[LEADER_AWARE_ROUTING_HEADER]: true}, + transaction.commonHeaders_, + ), + ); + }); }); }); @@ -1912,6 +1962,98 @@ describe('Transaction', () => { assert.deepStrictEqual(reqOpts.singleUseTransaction, expectedOptions); }); + describe('when multiplexed session is enabled for read write', () => { + before(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'true'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = 'true'; + }); + + after(() => { + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS = 'false'; + process.env.GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS_FOR_RW = + 'false'; + }); + + it('should call _setMutationKey when neither `id` is set nor `singleUseTransaction` is used', async () => { + // fake mutation key + const fakeMutations = [ + { + insertOrUpdate: { + table: 'my-table-123', + columns: ['Id', 'Name'], + values: [ + { + values: [{stringValue: 'Id1'}, {stringValue: 'Name1'}], + }, + ], + }, + } as google.spanner.v1.Mutation, + ]; + + // fake transaction id + const fakeTransactionId = 'fake-tx-id-12345'; + + const database = { + formattedName_: 'formatted-database-name', + isMuxEnabledForRW_: true, + parent: INSTANCE, + }; + const SESSION = { + parent: database, + formattedName_: SESSION_NAME, + request: REQUEST, + requestStream: REQUEST_STREAM, + }; + // multiplexed session + const multiplexedSession = Object.assign( + {multiplexed: true}, + SESSION, + ); + + // transaction object + const transaction = new Transaction(multiplexedSession); + + // ensure transaction is not single use transaction + transaction._useInRunner = true; + + // ensure transaction ID is not set + transaction.id = undefined; + + // set the _queuedMutations with the fakeMutations list + transaction._queuedMutations = fakeMutations; + + // spy on _setMutationKey + const setMutationKeySpy = sandbox.spy(transaction, '_setMutationKey'); + + // stub the begin method + const beginStub = sandbox.stub(transaction, 'begin').callsFake(() => { + transaction.id = fakeTransactionId; + return Promise.resolve(); + }); + + // stub transaction request + sandbox.stub(transaction, 'request'); + + // make a call to commit + transaction.commit(); + + // ensure that _setMutationKey was got called once + sinon.assert.calledOnce(setMutationKeySpy); + + // ensure that _setMutationKey got called with correct arguments + sinon.assert.calledWith(setMutationKeySpy, fakeMutations); + + // ensure begin was called + sinon.assert.calledOnce(beginStub); + + // ensure begin set the transaction id + assert.strictEqual(transaction.id, fakeTransactionId); + + // ensure _mutationKey is set + assert.strictEqual(transaction._mutationKey, fakeMutations[0]); + }); + }); + it('should call `end` once complete', () => { const endStub = sandbox.stub(transaction, 'end'); const requestStub = sandbox.stub(transaction, 'request'); @@ -2376,6 +2518,175 @@ describe('Transaction', () => { }); }); + describe('_setMutationKey', () => { + let transaction; + before(() => { + transaction = new Transaction(SESSION); + }); + + it('should have _mutationKey set to null, if mutations list is empty', () => { + // empty mutations list + const mutations: google.spanner.v1.Mutation[] = []; + // make a call to _setMutationKey + transaction._setMutationKey(mutations); + // ensure that the transaction's _mutationKey is null + assert.strictEqual(transaction._mutationKey, null); + }); + + it('should select a high-priority mutation when both types are present', () => { + // expected mutation objects + const insertMutation = { + insert: { + table: 'my-table-123', + columns: ['Id', 'Name'], + values: [ + { + values: [ + { + stringValue: 'Id1', + }, + { + stringValue: 'Name1', + }, + ], + }, + ], + }, + } as google.spanner.v1.Mutation; + + const updateMutation = { + update: { + table: 'my-table-123', + columns: ['Id', 'Name'], + values: [ + { + values: [ + { + stringValue: 'Id2', + }, + { + stringValue: 'Name2', + }, + ], + }, + ], + }, + } as google.spanner.v1.Mutation; + + const deleteMutation = { + delete: { + table: 'my-table-123', + keySet: { + keys: [ + { + values: [ + { + stringValue: 'Id1', + }, + ], + }, + ], + }, + }, + } as google.spanner.v1.Mutation; + + const mutations = [insertMutation, updateMutation, deleteMutation]; + + transaction._setMutationKey(mutations); + + // assert that _mutationKeys is not null + assert.notEqual(transaction._mutationKey, null); + + // get the selected mutation key + const selectedKey = Object.keys(transaction._mutationKey!)[0]; + + // assert that chosen key is not insert + assert.notStrictEqual( + selectedKey, + 'insert', + 'The selected mutation should not be an insert', + ); + + // assert that chosen key is either insertOrUpdate or delete + assert.ok( + ['update', 'delete'].includes(selectedKey), + 'The selected mutation should be a high-priority type', + ); + }); + + it('should select a mutation with maximum number of rows when only insert keys are present', () => { + // insert mutation objects + const insertMutation1 = { + insert: { + table: 'my-table-123', + columns: ['Id', 'Name'], + values: [ + // Row 1 + { + values: [{stringValue: 'Id1'}, {stringValue: 'Name1'}], + }, + // Row 2 + { + values: [{stringValue: 'Id2'}, {stringValue: 'Name2'}], + }, + // Row 3 + { + values: [{stringValue: 'Id3'}, {stringValue: 'Name3'}], + }, + ], + }, + } as google.spanner.v1.Mutation; + + const insertMutation2 = { + insert: { + table: 'my-table-123', + columns: ['Id', 'Name'], + values: [ + // Row 1 + { + values: [{stringValue: 'Id1'}, {stringValue: 'Name1'}], + }, + // Row 2 + { + values: [{stringValue: 'Id2'}, {stringValue: 'Name2'}], + }, + // Row 3 + { + values: [{stringValue: 'Id3'}, {stringValue: 'Name3'}], + }, + // Row 4 + { + values: [{stringValue: 'Id4'}, {stringValue: 'Name4'}], + }, + ], + }, + } as google.spanner.v1.Mutation; + + const mutations = [insertMutation1, insertMutation2]; + + transaction._setMutationKey(mutations); + + // assert that _mutationKeys is not null + assert.notEqual(transaction._mutationKey, null); + + // get the selected mutation key + const selectedKey = Object.keys(transaction._mutationKey!)[0]; + + // assert that chosen key is insert + assert.strictEqual( + selectedKey, + 'insert', + 'The selected mutation should be an insert', + ); + // assert that key with maximum of rows is selected + assert.strictEqual( + transaction._mutationKey, + insertMutation2, + 'The mutation with the most rows should have been selected', + ); + }); + }); + describe('getUniqueKeys', () => { it('should create a list of unique keys', () => { const rows = [ From 0875cd82e99fa6c95ab38807e09c5921303775f8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 21 Jul 2025 10:44:27 +0530 Subject: [PATCH 27/31] feat(spanner): A new field `snapshot_timestamp` is added to message `.google.spanner.v1.CommitResponse` (#2350) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(spanner): A new field `snapshot_timestamp` is added to message `.google.spanner.v1.CommitResponse` docs(spanner): A comment for field `commit_stats` in message `.google.spanner.v1.CommitResponse` is changed docs(spanner): A comment for field `precommit_token` in message `.google.spanner.v1.CommitResponse` is changed docs(spanner): A comment for message `.google.spanner.v1.TransactionOptions` is changed docs(spanner): A comment for enum value `READ_LOCK_MODE_UNSPECIFIED` in enum `ReadLockMode` is changed docs(spanner): A comment for enum value `PESSIMISTIC` in enum `ReadLockMode` is changed docs(spanner): A comment for enum value `OPTIMISTIC` in enum `ReadLockMode` is changed docs(spanner): A comment for field `multiplexed_session_previous_transaction_id` in message `.google.spanner.v1.TransactionOptions` is changed docs(spanner): A comment for field `exclude_txn_from_change_streams` in message `.google.spanner.v1.TransactionOptions` is changed docs(spanner): A comment for message `.google.spanner.v1.MultiplexedSessionPrecommitToken` is changed PiperOrigin-RevId: 784456709 Source-Link: https://github.com/googleapis/googleapis/commit/62babf26d128656ac94fcc73c415b5f9f85cfc45 Source-Link: https://github.com/googleapis/googleapis-gen/commit/c1d5fdd6222217236d7375534126b4f6ccdae9c9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzFkNWZkZDYyMjIyMTcyMzZkNzM3NTUzNDEyNmI0ZjZjY2RhZTljOSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/spanner/v1/commit_response.proto | 13 +- protos/google/spanner/v1/transaction.proto | 396 ++---------------- protos/protos.d.ts | 6 + protos/protos.js | 28 ++ protos/protos.json | 4 + 5 files changed, 87 insertions(+), 360 deletions(-) diff --git a/protos/google/spanner/v1/commit_response.proto b/protos/google/spanner/v1/commit_response.proto index beeb3123e..b29663501 100644 --- a/protos/google/spanner/v1/commit_response.proto +++ b/protos/google/spanner/v1/commit_response.proto @@ -44,16 +44,21 @@ message CommitResponse { // The Cloud Spanner timestamp at which the transaction committed. google.protobuf.Timestamp commit_timestamp = 1; - // The statistics about this Commit. Not returned by default. + // The statistics about this `Commit`. Not returned by default. // For more information, see // [CommitRequest.return_commit_stats][google.spanner.v1.CommitRequest.return_commit_stats]. CommitStats commit_stats = 2; - // Clients should examine and retry the commit if any of the following - // reasons are populated. + // You must examine and retry the commit if the following is populated. oneof MultiplexedSessionRetry { // If specified, transaction has not committed yet. - // Clients must retry the commit with the new precommit token. + // You must retry the commit with the new precommit token. MultiplexedSessionPrecommitToken precommit_token = 4; } + + // If `TransactionOptions.isolation_level` is set to + // `IsolationLevel.REPEATABLE_READ`, then the snapshot timestamp is the + // timestamp at which all reads in the transaction ran. This timestamp is + // never returned. + google.protobuf.Timestamp snapshot_timestamp = 5; } diff --git a/protos/google/spanner/v1/transaction.proto b/protos/google/spanner/v1/transaction.proto index 612e491a3..81e7649f4 100644 --- a/protos/google/spanner/v1/transaction.proto +++ b/protos/google/spanner/v1/transaction.proto @@ -28,330 +28,7 @@ option java_package = "com.google.spanner.v1"; option php_namespace = "Google\\Cloud\\Spanner\\V1"; option ruby_package = "Google::Cloud::Spanner::V1"; -// Transactions: -// -// Each session can have at most one active transaction at a time (note that -// standalone reads and queries use a transaction internally and do count -// towards the one transaction limit). After the active transaction is -// completed, the session can immediately be re-used for the next transaction. -// It is not necessary to create a new session for each transaction. -// -// Transaction modes: -// -// Cloud Spanner supports three transaction modes: -// -// 1. Locking read-write. This type of transaction is the only way -// to write data into Cloud Spanner. These transactions rely on -// pessimistic locking and, if necessary, two-phase commit. -// Locking read-write transactions may abort, requiring the -// application to retry. -// -// 2. Snapshot read-only. Snapshot read-only transactions provide guaranteed -// consistency across several reads, but do not allow -// writes. Snapshot read-only transactions can be configured to read at -// timestamps in the past, or configured to perform a strong read -// (where Spanner will select a timestamp such that the read is -// guaranteed to see the effects of all transactions that have committed -// before the start of the read). Snapshot read-only transactions do not -// need to be committed. -// -// Queries on change streams must be performed with the snapshot read-only -// transaction mode, specifying a strong read. Please see -// [TransactionOptions.ReadOnly.strong][google.spanner.v1.TransactionOptions.ReadOnly.strong] -// for more details. -// -// 3. Partitioned DML. This type of transaction is used to execute -// a single Partitioned DML statement. Partitioned DML partitions -// the key space and runs the DML statement over each partition -// in parallel using separate, internal transactions that commit -// independently. Partitioned DML transactions do not need to be -// committed. -// -// For transactions that only read, snapshot read-only transactions -// provide simpler semantics and are almost always faster. In -// particular, read-only transactions do not take locks, so they do -// not conflict with read-write transactions. As a consequence of not -// taking locks, they also do not abort, so retry loops are not needed. -// -// Transactions may only read-write data in a single database. They -// may, however, read-write data in different tables within that -// database. -// -// Locking read-write transactions: -// -// Locking transactions may be used to atomically read-modify-write -// data anywhere in a database. This type of transaction is externally -// consistent. -// -// Clients should attempt to minimize the amount of time a transaction -// is active. Faster transactions commit with higher probability -// and cause less contention. Cloud Spanner attempts to keep read locks -// active as long as the transaction continues to do reads, and the -// transaction has not been terminated by -// [Commit][google.spanner.v1.Spanner.Commit] or -// [Rollback][google.spanner.v1.Spanner.Rollback]. Long periods of -// inactivity at the client may cause Cloud Spanner to release a -// transaction's locks and abort it. -// -// Conceptually, a read-write transaction consists of zero or more -// reads or SQL statements followed by -// [Commit][google.spanner.v1.Spanner.Commit]. At any time before -// [Commit][google.spanner.v1.Spanner.Commit], the client can send a -// [Rollback][google.spanner.v1.Spanner.Rollback] request to abort the -// transaction. -// -// Semantics: -// -// Cloud Spanner can commit the transaction if all read locks it acquired -// are still valid at commit time, and it is able to acquire write -// locks for all writes. Cloud Spanner can abort the transaction for any -// reason. If a commit attempt returns `ABORTED`, Cloud Spanner guarantees -// that the transaction has not modified any user data in Cloud Spanner. -// -// Unless the transaction commits, Cloud Spanner makes no guarantees about -// how long the transaction's locks were held for. It is an error to -// use Cloud Spanner locks for any sort of mutual exclusion other than -// between Cloud Spanner transactions themselves. -// -// Retrying aborted transactions: -// -// When a transaction aborts, the application can choose to retry the -// whole transaction again. To maximize the chances of successfully -// committing the retry, the client should execute the retry in the -// same session as the original attempt. The original session's lock -// priority increases with each consecutive abort, meaning that each -// attempt has a slightly better chance of success than the previous. -// -// Under some circumstances (for example, many transactions attempting to -// modify the same row(s)), a transaction can abort many times in a -// short period before successfully committing. Thus, it is not a good -// idea to cap the number of retries a transaction can attempt; -// instead, it is better to limit the total amount of time spent -// retrying. -// -// Idle transactions: -// -// A transaction is considered idle if it has no outstanding reads or -// SQL queries and has not started a read or SQL query within the last 10 -// seconds. Idle transactions can be aborted by Cloud Spanner so that they -// don't hold on to locks indefinitely. If an idle transaction is aborted, the -// commit will fail with error `ABORTED`. -// -// If this behavior is undesirable, periodically executing a simple -// SQL query in the transaction (for example, `SELECT 1`) prevents the -// transaction from becoming idle. -// -// Snapshot read-only transactions: -// -// Snapshot read-only transactions provides a simpler method than -// locking read-write transactions for doing several consistent -// reads. However, this type of transaction does not support writes. -// -// Snapshot transactions do not take locks. Instead, they work by -// choosing a Cloud Spanner timestamp, then executing all reads at that -// timestamp. Since they do not acquire locks, they do not block -// concurrent read-write transactions. -// -// Unlike locking read-write transactions, snapshot read-only -// transactions never abort. They can fail if the chosen read -// timestamp is garbage collected; however, the default garbage -// collection policy is generous enough that most applications do not -// need to worry about this in practice. -// -// Snapshot read-only transactions do not need to call -// [Commit][google.spanner.v1.Spanner.Commit] or -// [Rollback][google.spanner.v1.Spanner.Rollback] (and in fact are not -// permitted to do so). -// -// To execute a snapshot transaction, the client specifies a timestamp -// bound, which tells Cloud Spanner how to choose a read timestamp. -// -// The types of timestamp bound are: -// -// - Strong (the default). -// - Bounded staleness. -// - Exact staleness. -// -// If the Cloud Spanner database to be read is geographically distributed, -// stale read-only transactions can execute more quickly than strong -// or read-write transactions, because they are able to execute far -// from the leader replica. -// -// Each type of timestamp bound is discussed in detail below. -// -// Strong: Strong reads are guaranteed to see the effects of all transactions -// that have committed before the start of the read. Furthermore, all -// rows yielded by a single read are consistent with each other -- if -// any part of the read observes a transaction, all parts of the read -// see the transaction. -// -// Strong reads are not repeatable: two consecutive strong read-only -// transactions might return inconsistent results if there are -// concurrent writes. If consistency across reads is required, the -// reads should be executed within a transaction or at an exact read -// timestamp. -// -// Queries on change streams (see below for more details) must also specify -// the strong read timestamp bound. -// -// See -// [TransactionOptions.ReadOnly.strong][google.spanner.v1.TransactionOptions.ReadOnly.strong]. -// -// Exact staleness: -// -// These timestamp bounds execute reads at a user-specified -// timestamp. Reads at a timestamp are guaranteed to see a consistent -// prefix of the global transaction history: they observe -// modifications done by all transactions with a commit timestamp less than or -// equal to the read timestamp, and observe none of the modifications done by -// transactions with a larger commit timestamp. They will block until -// all conflicting transactions that may be assigned commit timestamps -// <= the read timestamp have finished. -// -// The timestamp can either be expressed as an absolute Cloud Spanner commit -// timestamp or a staleness relative to the current time. -// -// These modes do not require a "negotiation phase" to pick a -// timestamp. As a result, they execute slightly faster than the -// equivalent boundedly stale concurrency modes. On the other hand, -// boundedly stale reads usually return fresher results. -// -// See -// [TransactionOptions.ReadOnly.read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.read_timestamp] -// and -// [TransactionOptions.ReadOnly.exact_staleness][google.spanner.v1.TransactionOptions.ReadOnly.exact_staleness]. -// -// Bounded staleness: -// -// Bounded staleness modes allow Cloud Spanner to pick the read timestamp, -// subject to a user-provided staleness bound. Cloud Spanner chooses the -// newest timestamp within the staleness bound that allows execution -// of the reads at the closest available replica without blocking. -// -// All rows yielded are consistent with each other -- if any part of -// the read observes a transaction, all parts of the read see the -// transaction. Boundedly stale reads are not repeatable: two stale -// reads, even if they use the same staleness bound, can execute at -// different timestamps and thus return inconsistent results. -// -// Boundedly stale reads execute in two phases: the first phase -// negotiates a timestamp among all replicas needed to serve the -// read. In the second phase, reads are executed at the negotiated -// timestamp. -// -// As a result of the two phase execution, bounded staleness reads are -// usually a little slower than comparable exact staleness -// reads. However, they are typically able to return fresher -// results, and are more likely to execute at the closest replica. -// -// Because the timestamp negotiation requires up-front knowledge of -// which rows will be read, it can only be used with single-use -// read-only transactions. -// -// See -// [TransactionOptions.ReadOnly.max_staleness][google.spanner.v1.TransactionOptions.ReadOnly.max_staleness] -// and -// [TransactionOptions.ReadOnly.min_read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.min_read_timestamp]. -// -// Old read timestamps and garbage collection: -// -// Cloud Spanner continuously garbage collects deleted and overwritten data -// in the background to reclaim storage space. This process is known -// as "version GC". By default, version GC reclaims versions after they -// are one hour old. Because of this, Cloud Spanner cannot perform reads -// at read timestamps more than one hour in the past. This -// restriction also applies to in-progress reads and/or SQL queries whose -// timestamp become too old while executing. Reads and SQL queries with -// too-old read timestamps fail with the error `FAILED_PRECONDITION`. -// -// You can configure and extend the `VERSION_RETENTION_PERIOD` of a -// database up to a period as long as one week, which allows Cloud Spanner -// to perform reads up to one week in the past. -// -// Querying change Streams: -// -// A Change Stream is a schema object that can be configured to watch data -// changes on the entire database, a set of tables, or a set of columns -// in a database. -// -// When a change stream is created, Spanner automatically defines a -// corresponding SQL Table-Valued Function (TVF) that can be used to query -// the change records in the associated change stream using the -// ExecuteStreamingSql API. The name of the TVF for a change stream is -// generated from the name of the change stream: READ_. -// -// All queries on change stream TVFs must be executed using the -// ExecuteStreamingSql API with a single-use read-only transaction with a -// strong read-only timestamp_bound. The change stream TVF allows users to -// specify the start_timestamp and end_timestamp for the time range of -// interest. All change records within the retention period is accessible -// using the strong read-only timestamp_bound. All other TransactionOptions -// are invalid for change stream queries. -// -// In addition, if TransactionOptions.read_only.return_read_timestamp is set -// to true, a special value of 2^63 - 2 will be returned in the -// [Transaction][google.spanner.v1.Transaction] message that describes the -// transaction, instead of a valid read timestamp. This special value should be -// discarded and not used for any subsequent queries. -// -// Please see https://cloud.google.com/spanner/docs/change-streams -// for more details on how to query the change stream TVFs. -// -// Partitioned DML transactions: -// -// Partitioned DML transactions are used to execute DML statements with a -// different execution strategy that provides different, and often better, -// scalability properties for large, table-wide operations than DML in a -// ReadWrite transaction. Smaller scoped statements, such as an OLTP workload, -// should prefer using ReadWrite transactions. -// -// Partitioned DML partitions the keyspace and runs the DML statement on each -// partition in separate, internal transactions. These transactions commit -// automatically when complete, and run independently from one another. -// -// To reduce lock contention, this execution strategy only acquires read locks -// on rows that match the WHERE clause of the statement. Additionally, the -// smaller per-partition transactions hold locks for less time. -// -// That said, Partitioned DML is not a drop-in replacement for standard DML used -// in ReadWrite transactions. -// -// - The DML statement must be fully-partitionable. Specifically, the statement -// must be expressible as the union of many statements which each access only -// a single row of the table. -// -// - The statement is not applied atomically to all rows of the table. Rather, -// the statement is applied atomically to partitions of the table, in -// independent transactions. Secondary index rows are updated atomically -// with the base table rows. -// -// - Partitioned DML does not guarantee exactly-once execution semantics -// against a partition. The statement will be applied at least once to each -// partition. It is strongly recommended that the DML statement should be -// idempotent to avoid unexpected results. For instance, it is potentially -// dangerous to run a statement such as -// `UPDATE table SET column = column + 1` as it could be run multiple times -// against some rows. -// -// - The partitions are committed automatically - there is no support for -// Commit or Rollback. If the call returns an error, or if the client issuing -// the ExecuteSql call dies, it is possible that some rows had the statement -// executed on them successfully. It is also possible that statement was -// never executed against other rows. -// -// - Partitioned DML transactions may only contain the execution of a single -// DML statement via ExecuteSql or ExecuteStreamingSql. -// -// - If any error is encountered during the execution of the partitioned DML -// operation (for instance, a UNIQUE INDEX violation, division by zero, or a -// value that cannot be stored due to schema constraints), then the -// operation is stopped at that point and an error is returned. It is -// possible that at this point, some partitions have been committed (or even -// committed multiple times), and other partitions have not been run at all. -// -// Given the above, Partitioned DML is good fit for large, database-wide, -// operations that are idempotent, such as deleting old rows from a very large -// table. +// Options to use for transactions. message TransactionOptions { // Message type to initiate a read-write transaction. Currently this // transaction type has no options. @@ -361,23 +38,26 @@ message TransactionOptions { enum ReadLockMode { // Default value. // - // * If isolation level is `REPEATABLE_READ`, then it is an error to - // specify `read_lock_mode`. Locking semantics default to `OPTIMISTIC`. - // No validation checks are done for reads, except for: + // * If isolation level is + // [REPEATABLE_READ][google.spanner.v1.TransactionOptions.IsolationLevel.REPEATABLE_READ], + // then it is an error to specify `read_lock_mode`. Locking semantics + // default to `OPTIMISTIC`. No validation checks are done for reads, + // except to validate that the data that was served at the snapshot time + // is unchanged at commit time in the following cases: // 1. reads done as part of queries that use `SELECT FOR UPDATE` // 2. reads done as part of statements with a `LOCK_SCANNED_RANGES` // hint // 3. reads done as part of DML statements - // to validate that the data that was served at the snapshot time is - // unchanged at commit time. // * At all other isolation levels, if `read_lock_mode` is the default - // value, then pessimistic read lock is used. + // value, then pessimistic read locks are used. READ_LOCK_MODE_UNSPECIFIED = 0; // Pessimistic lock mode. // // Read locks are acquired immediately on read. - // Semantics described only applies to `SERIALIZABLE` isolation. + // Semantics described only applies to + // [SERIALIZABLE][google.spanner.v1.TransactionOptions.IsolationLevel.SERIALIZABLE] + // isolation. PESSIMISTIC = 1; // Optimistic lock mode. @@ -385,7 +65,9 @@ message TransactionOptions { // Locks for reads within the transaction are not acquired on read. // Instead the locks are acquired on a commit to validate that // read/queried data has not changed since the transaction started. - // Semantics described only applies to `SERIALIZABLE` isolation. + // Semantics described only applies to + // [SERIALIZABLE][google.spanner.v1.TransactionOptions.IsolationLevel.SERIALIZABLE] + // isolation. OPTIMISTIC = 2; } @@ -395,8 +77,6 @@ message TransactionOptions { // Optional. Clients should pass the transaction ID of the previous // transaction attempt that was aborted if this transaction is being // executed on a multiplexed session. - // This feature is not yet supported and will result in an UNIMPLEMENTED - // error. bytes multiplexed_session_previous_transaction_id = 2 [(google.api.field_behavior) = OPTIONAL]; } @@ -442,7 +122,7 @@ message TransactionOptions { // Executes all reads at the given timestamp. Unlike other modes, // reads at a specific timestamp are repeatable; the same read at // the same timestamp always returns the same data. If the - // timestamp is in the future, the read will block until the + // timestamp is in the future, the read is blocked until the // specified timestamp, modulo the read's deadline. // // Useful for large scale consistent reads such as mapreduces, or @@ -491,9 +171,9 @@ message TransactionOptions { SERIALIZABLE = 1; // All reads performed during the transaction observe a consistent snapshot - // of the database, and the transaction will only successfully commit in the - // absence of conflicts between its updates and any concurrent updates that - // have occurred since that snapshot. Consequently, in contrast to + // of the database, and the transaction is only successfully committed in + // the absence of conflicts between its updates and any concurrent updates + // that have occurred since that snapshot. Consequently, in contrast to // `SERIALIZABLE` transactions, only write-write conflicts are detected in // snapshot transactions. // @@ -521,7 +201,7 @@ message TransactionOptions { // on the `session` resource. PartitionedDml partitioned_dml = 3; - // Transaction will not write. + // Transaction does not write. // // Authorization to begin a read-only transaction requires // `spanner.databases.beginReadOnlyTransaction` permission @@ -529,20 +209,24 @@ message TransactionOptions { ReadOnly read_only = 2; } - // When `exclude_txn_from_change_streams` is set to `true`: - // * Mutations from this transaction will not be recorded in change streams - // with DDL option `allow_txn_exclusion=true` that are tracking columns - // modified by these transactions. - // * Mutations from this transaction will be recorded in change streams with - // DDL option `allow_txn_exclusion=false or not set` that are tracking - // columns modified by these transactions. + // When `exclude_txn_from_change_streams` is set to `true`, it prevents read + // or write transactions from being tracked in change streams. + // + // * If the DDL option `allow_txn_exclusion` is set to `true`, then the + // updates + // made within this transaction aren't recorded in the change stream. + // + // * If you don't set the DDL option `allow_txn_exclusion` or if it's + // set to `false`, then the updates made within this transaction are + // recorded in the change stream. // // When `exclude_txn_from_change_streams` is set to `false` or not set, - // mutations from this transaction will be recorded in all change streams that - // are tracking columns modified by these transactions. - // `exclude_txn_from_change_streams` may only be specified for read-write or - // partitioned-dml transactions, otherwise the API will return an - // `INVALID_ARGUMENT` error. + // modifications from this transaction are recorded in all change streams + // that are tracking columns modified by these transactions. + // + // The `exclude_txn_from_change_streams` option can only be specified + // for read-write or partitioned DML transactions, otherwise the API returns + // an `INVALID_ARGUMENT` error. bool exclude_txn_from_change_streams = 5; // Isolation level for the transaction. @@ -569,15 +253,13 @@ message Transaction { // Example: `"2014-10-02T15:01:23.045123456Z"`. google.protobuf.Timestamp read_timestamp = 2; - // A precommit token will be included in the response of a BeginTransaction + // A precommit token is included in the response of a BeginTransaction // request if the read-write transaction is on a multiplexed session and // a mutation_key was specified in the // [BeginTransaction][google.spanner.v1.BeginTransactionRequest]. // The precommit token with the highest sequence number from this transaction // attempt should be passed to the [Commit][google.spanner.v1.Spanner.Commit] // request for this transaction. - // This feature is not yet supported and will result in an UNIMPLEMENTED - // error. MultiplexedSessionPrecommitToken precommit_token = 3; } @@ -609,8 +291,10 @@ message TransactionSelector { // When a read-write transaction is executed on a multiplexed session, // this precommit token is sent back to the client -// as a part of the [Transaction] message in the BeginTransaction response and -// also as a part of the [ResultSet] and [PartialResultSet] responses. +// as a part of the [Transaction][google.spanner.v1.Transaction] message in the +// [BeginTransaction][google.spanner.v1.BeginTransactionRequest] response and +// also as a part of the [ResultSet][google.spanner.v1.ResultSet] and +// [PartialResultSet][google.spanner.v1.PartialResultSet] responses. message MultiplexedSessionPrecommitToken { // Opaque precommit token. bytes precommit_token = 1; diff --git a/protos/protos.d.ts b/protos/protos.d.ts index 5dbcac381..d779f994b 100644 --- a/protos/protos.d.ts +++ b/protos/protos.d.ts @@ -32609,6 +32609,9 @@ export namespace google { /** CommitResponse precommitToken */ precommitToken?: (google.spanner.v1.IMultiplexedSessionPrecommitToken|null); + + /** CommitResponse snapshotTimestamp */ + snapshotTimestamp?: (google.protobuf.ITimestamp|null); } /** Represents a CommitResponse. */ @@ -32629,6 +32632,9 @@ export namespace google { /** CommitResponse precommitToken. */ public precommitToken?: (google.spanner.v1.IMultiplexedSessionPrecommitToken|null); + /** CommitResponse snapshotTimestamp. */ + public snapshotTimestamp?: (google.protobuf.ITimestamp|null); + /** CommitResponse MultiplexedSessionRetry. */ public MultiplexedSessionRetry?: "precommitToken"; diff --git a/protos/protos.js b/protos/protos.js index 31694e130..5f06a5fa4 100644 --- a/protos/protos.js +++ b/protos/protos.js @@ -84467,6 +84467,7 @@ * @property {google.protobuf.ITimestamp|null} [commitTimestamp] CommitResponse commitTimestamp * @property {google.spanner.v1.CommitResponse.ICommitStats|null} [commitStats] CommitResponse commitStats * @property {google.spanner.v1.IMultiplexedSessionPrecommitToken|null} [precommitToken] CommitResponse precommitToken + * @property {google.protobuf.ITimestamp|null} [snapshotTimestamp] CommitResponse snapshotTimestamp */ /** @@ -84508,6 +84509,14 @@ */ CommitResponse.prototype.precommitToken = null; + /** + * CommitResponse snapshotTimestamp. + * @member {google.protobuf.ITimestamp|null|undefined} snapshotTimestamp + * @memberof google.spanner.v1.CommitResponse + * @instance + */ + CommitResponse.prototype.snapshotTimestamp = null; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -84552,6 +84561,8 @@ $root.google.spanner.v1.CommitResponse.CommitStats.encode(message.commitStats, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.precommitToken != null && Object.hasOwnProperty.call(message, "precommitToken")) $root.google.spanner.v1.MultiplexedSessionPrecommitToken.encode(message.precommitToken, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.snapshotTimestamp != null && Object.hasOwnProperty.call(message, "snapshotTimestamp")) + $root.google.protobuf.Timestamp.encode(message.snapshotTimestamp, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); return writer; }; @@ -84600,6 +84611,10 @@ message.precommitToken = $root.google.spanner.v1.MultiplexedSessionPrecommitToken.decode(reader, reader.uint32()); break; } + case 5: { + message.snapshotTimestamp = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -84654,6 +84669,11 @@ return "precommitToken." + error; } } + if (message.snapshotTimestamp != null && message.hasOwnProperty("snapshotTimestamp")) { + var error = $root.google.protobuf.Timestamp.verify(message.snapshotTimestamp); + if (error) + return "snapshotTimestamp." + error; + } return null; }; @@ -84684,6 +84704,11 @@ throw TypeError(".google.spanner.v1.CommitResponse.precommitToken: object expected"); message.precommitToken = $root.google.spanner.v1.MultiplexedSessionPrecommitToken.fromObject(object.precommitToken); } + if (object.snapshotTimestamp != null) { + if (typeof object.snapshotTimestamp !== "object") + throw TypeError(".google.spanner.v1.CommitResponse.snapshotTimestamp: object expected"); + message.snapshotTimestamp = $root.google.protobuf.Timestamp.fromObject(object.snapshotTimestamp); + } return message; }; @@ -84703,6 +84728,7 @@ if (options.defaults) { object.commitTimestamp = null; object.commitStats = null; + object.snapshotTimestamp = null; } if (message.commitTimestamp != null && message.hasOwnProperty("commitTimestamp")) object.commitTimestamp = $root.google.protobuf.Timestamp.toObject(message.commitTimestamp, options); @@ -84713,6 +84739,8 @@ if (options.oneofs) object.MultiplexedSessionRetry = "precommitToken"; } + if (message.snapshotTimestamp != null && message.hasOwnProperty("snapshotTimestamp")) + object.snapshotTimestamp = $root.google.protobuf.Timestamp.toObject(message.snapshotTimestamp, options); return object; }; diff --git a/protos/protos.json b/protos/protos.json index dfcf55244..484f08bf5 100644 --- a/protos/protos.json +++ b/protos/protos.json @@ -8977,6 +8977,10 @@ "precommitToken": { "type": "MultiplexedSessionPrecommitToken", "id": 4 + }, + "snapshotTimestamp": { + "type": "google.protobuf.Timestamp", + "id": 5 } }, "nested": { From 380e7705a23a692168db386ba5426c91bf1587b6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 23 Jul 2025 10:17:10 +0530 Subject: [PATCH 28/31] feat: proto changes for an internal api (#2356) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: proto changes for an internal api PiperOrigin-RevId: 786045230 Source-Link: https://github.com/googleapis/googleapis/commit/2a2ea87fcad7327e0afcfeaa84ec4d4b014f11a3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a0a01cf19f9ee6d2c8c4f8cb11fd3790c81f2e73 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTBhMDFjZjE5ZjllZTZkMmM4YzRmOGNiMTFmZDM3OTBjODFmMmU3MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../database/v1/spanner_database_admin.proto | 30 + protos/protos.d.ts | 233 ++++++++ protos/protos.js | 513 ++++++++++++++++++ protos/protos.json | 55 ++ src/v1/database_admin_client.ts | 147 +++++ src/v1/database_admin_client_config.json | 4 + test/gapic_database_admin_v1.ts | 92 ++++ 7 files changed, 1074 insertions(+) diff --git a/protos/google/spanner/admin/database/v1/spanner_database_admin.proto b/protos/google/spanner/admin/database/v1/spanner_database_admin.proto index 36e06f1e1..d41a4114c 100644 --- a/protos/google/spanner/admin/database/v1/spanner_database_admin.proto +++ b/protos/google/spanner/admin/database/v1/spanner_database_admin.proto @@ -27,6 +27,7 @@ import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/struct.proto"; import "google/protobuf/timestamp.proto"; +import "google/rpc/status.proto"; import "google/spanner/admin/database/v1/backup.proto"; import "google/spanner/admin/database/v1/backup_schedule.proto"; import "google/spanner/admin/database/v1/common.proto"; @@ -485,6 +486,13 @@ service DatabaseAdmin { }; option (google.api.method_signature) = "parent"; } + + // This is an internal API called by Spanner Graph jobs. You should never need + // to call this API directly. + rpc InternalUpdateGraphOperation(InternalUpdateGraphOperationRequest) + returns (InternalUpdateGraphOperationResponse) { + option (google.api.method_signature) = "database,operation_id"; + } } // Information about the database restore. @@ -1282,3 +1290,25 @@ message SplitPoints { google.protobuf.Timestamp expire_time = 5 [(google.api.field_behavior) = OPTIONAL]; } + +// Internal request proto, do not use directly. +message InternalUpdateGraphOperationRequest { + // Internal field, do not use directly. + string database = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "spanner.googleapis.com/Database" + } + ]; + // Internal field, do not use directly. + string operation_id = 2 [(google.api.field_behavior) = REQUIRED]; + // Internal field, do not use directly. + string vm_identity_token = 5 [(google.api.field_behavior) = REQUIRED]; + // Internal field, do not use directly. + double progress = 3 [(google.api.field_behavior) = OPTIONAL]; + // Internal field, do not use directly. + google.rpc.Status status = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// Internal response proto, do not use directly. +message InternalUpdateGraphOperationResponse {} diff --git a/protos/protos.d.ts b/protos/protos.d.ts index d779f994b..f5acc8627 100644 --- a/protos/protos.d.ts +++ b/protos/protos.d.ts @@ -10589,6 +10589,20 @@ export namespace google { * @returns Promise */ public listBackupSchedules(request: google.spanner.admin.database.v1.IListBackupSchedulesRequest): Promise; + + /** + * Calls InternalUpdateGraphOperation. + * @param request InternalUpdateGraphOperationRequest message or plain object + * @param callback Node-style callback called with the error, if any, and InternalUpdateGraphOperationResponse + */ + public internalUpdateGraphOperation(request: google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest, callback: google.spanner.admin.database.v1.DatabaseAdmin.InternalUpdateGraphOperationCallback): void; + + /** + * Calls InternalUpdateGraphOperation. + * @param request InternalUpdateGraphOperationRequest message or plain object + * @returns Promise + */ + public internalUpdateGraphOperation(request: google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest): Promise; } namespace DatabaseAdmin { @@ -10774,6 +10788,13 @@ export namespace google { * @param [response] ListBackupSchedulesResponse */ type ListBackupSchedulesCallback = (error: (Error|null), response?: google.spanner.admin.database.v1.ListBackupSchedulesResponse) => void; + + /** + * Callback as used by {@link google.spanner.admin.database.v1.DatabaseAdmin|internalUpdateGraphOperation}. + * @param error Error, if any + * @param [response] InternalUpdateGraphOperationResponse + */ + type InternalUpdateGraphOperationCallback = (error: (Error|null), response?: google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse) => void; } /** Properties of a RestoreInfo. */ @@ -13873,6 +13894,218 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } } + + /** Properties of an InternalUpdateGraphOperationRequest. */ + interface IInternalUpdateGraphOperationRequest { + + /** InternalUpdateGraphOperationRequest database */ + database?: (string|null); + + /** InternalUpdateGraphOperationRequest operationId */ + operationId?: (string|null); + + /** InternalUpdateGraphOperationRequest vmIdentityToken */ + vmIdentityToken?: (string|null); + + /** InternalUpdateGraphOperationRequest progress */ + progress?: (number|null); + + /** InternalUpdateGraphOperationRequest status */ + status?: (google.rpc.IStatus|null); + } + + /** Represents an InternalUpdateGraphOperationRequest. */ + class InternalUpdateGraphOperationRequest implements IInternalUpdateGraphOperationRequest { + + /** + * Constructs a new InternalUpdateGraphOperationRequest. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest); + + /** InternalUpdateGraphOperationRequest database. */ + public database: string; + + /** InternalUpdateGraphOperationRequest operationId. */ + public operationId: string; + + /** InternalUpdateGraphOperationRequest vmIdentityToken. */ + public vmIdentityToken: string; + + /** InternalUpdateGraphOperationRequest progress. */ + public progress: number; + + /** InternalUpdateGraphOperationRequest status. */ + public status?: (google.rpc.IStatus|null); + + /** + * Creates a new InternalUpdateGraphOperationRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns InternalUpdateGraphOperationRequest instance + */ + public static create(properties?: google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest): google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest; + + /** + * Encodes the specified InternalUpdateGraphOperationRequest message. Does not implicitly {@link google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest.verify|verify} messages. + * @param message InternalUpdateGraphOperationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified InternalUpdateGraphOperationRequest message, length delimited. Does not implicitly {@link google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest.verify|verify} messages. + * @param message InternalUpdateGraphOperationRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an InternalUpdateGraphOperationRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns InternalUpdateGraphOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest; + + /** + * Decodes an InternalUpdateGraphOperationRequest message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns InternalUpdateGraphOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest; + + /** + * Verifies an InternalUpdateGraphOperationRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an InternalUpdateGraphOperationRequest message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns InternalUpdateGraphOperationRequest + */ + public static fromObject(object: { [k: string]: any }): google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest; + + /** + * Creates a plain object from an InternalUpdateGraphOperationRequest message. Also converts values to other types if specified. + * @param message InternalUpdateGraphOperationRequest + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this InternalUpdateGraphOperationRequest to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for InternalUpdateGraphOperationRequest + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + + /** Properties of an InternalUpdateGraphOperationResponse. */ + interface IInternalUpdateGraphOperationResponse { + } + + /** Represents an InternalUpdateGraphOperationResponse. */ + class InternalUpdateGraphOperationResponse implements IInternalUpdateGraphOperationResponse { + + /** + * Constructs a new InternalUpdateGraphOperationResponse. + * @param [properties] Properties to set + */ + constructor(properties?: google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse); + + /** + * Creates a new InternalUpdateGraphOperationResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns InternalUpdateGraphOperationResponse instance + */ + public static create(properties?: google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse): google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse; + + /** + * Encodes the specified InternalUpdateGraphOperationResponse message. Does not implicitly {@link google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse.verify|verify} messages. + * @param message InternalUpdateGraphOperationResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified InternalUpdateGraphOperationResponse message, length delimited. Does not implicitly {@link google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse.verify|verify} messages. + * @param message InternalUpdateGraphOperationResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an InternalUpdateGraphOperationResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns InternalUpdateGraphOperationResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse; + + /** + * Decodes an InternalUpdateGraphOperationResponse message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns InternalUpdateGraphOperationResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse; + + /** + * Verifies an InternalUpdateGraphOperationResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an InternalUpdateGraphOperationResponse message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns InternalUpdateGraphOperationResponse + */ + public static fromObject(object: { [k: string]: any }): google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse; + + /** + * Creates a plain object from an InternalUpdateGraphOperationResponse message. Also converts values to other types if specified. + * @param message InternalUpdateGraphOperationResponse + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this InternalUpdateGraphOperationResponse to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for InternalUpdateGraphOperationResponse + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } } } diff --git a/protos/protos.js b/protos/protos.js index 5f06a5fa4..b0e8a8db3 100644 --- a/protos/protos.js +++ b/protos/protos.js @@ -28306,6 +28306,39 @@ * @variation 2 */ + /** + * Callback as used by {@link google.spanner.admin.database.v1.DatabaseAdmin|internalUpdateGraphOperation}. + * @memberof google.spanner.admin.database.v1.DatabaseAdmin + * @typedef InternalUpdateGraphOperationCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse} [response] InternalUpdateGraphOperationResponse + */ + + /** + * Calls InternalUpdateGraphOperation. + * @function internalUpdateGraphOperation + * @memberof google.spanner.admin.database.v1.DatabaseAdmin + * @instance + * @param {google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest} request InternalUpdateGraphOperationRequest message or plain object + * @param {google.spanner.admin.database.v1.DatabaseAdmin.InternalUpdateGraphOperationCallback} callback Node-style callback called with the error, if any, and InternalUpdateGraphOperationResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(DatabaseAdmin.prototype.internalUpdateGraphOperation = function internalUpdateGraphOperation(request, callback) { + return this.rpcCall(internalUpdateGraphOperation, $root.google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest, $root.google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse, request, callback); + }, "name", { value: "InternalUpdateGraphOperation" }); + + /** + * Calls InternalUpdateGraphOperation. + * @function internalUpdateGraphOperation + * @memberof google.spanner.admin.database.v1.DatabaseAdmin + * @instance + * @param {google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest} request InternalUpdateGraphOperationRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + return DatabaseAdmin; })(); @@ -36039,6 +36072,486 @@ return SplitPoints; })(); + v1.InternalUpdateGraphOperationRequest = (function() { + + /** + * Properties of an InternalUpdateGraphOperationRequest. + * @memberof google.spanner.admin.database.v1 + * @interface IInternalUpdateGraphOperationRequest + * @property {string|null} [database] InternalUpdateGraphOperationRequest database + * @property {string|null} [operationId] InternalUpdateGraphOperationRequest operationId + * @property {string|null} [vmIdentityToken] InternalUpdateGraphOperationRequest vmIdentityToken + * @property {number|null} [progress] InternalUpdateGraphOperationRequest progress + * @property {google.rpc.IStatus|null} [status] InternalUpdateGraphOperationRequest status + */ + + /** + * Constructs a new InternalUpdateGraphOperationRequest. + * @memberof google.spanner.admin.database.v1 + * @classdesc Represents an InternalUpdateGraphOperationRequest. + * @implements IInternalUpdateGraphOperationRequest + * @constructor + * @param {google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest=} [properties] Properties to set + */ + function InternalUpdateGraphOperationRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * InternalUpdateGraphOperationRequest database. + * @member {string} database + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @instance + */ + InternalUpdateGraphOperationRequest.prototype.database = ""; + + /** + * InternalUpdateGraphOperationRequest operationId. + * @member {string} operationId + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @instance + */ + InternalUpdateGraphOperationRequest.prototype.operationId = ""; + + /** + * InternalUpdateGraphOperationRequest vmIdentityToken. + * @member {string} vmIdentityToken + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @instance + */ + InternalUpdateGraphOperationRequest.prototype.vmIdentityToken = ""; + + /** + * InternalUpdateGraphOperationRequest progress. + * @member {number} progress + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @instance + */ + InternalUpdateGraphOperationRequest.prototype.progress = 0; + + /** + * InternalUpdateGraphOperationRequest status. + * @member {google.rpc.IStatus|null|undefined} status + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @instance + */ + InternalUpdateGraphOperationRequest.prototype.status = null; + + /** + * Creates a new InternalUpdateGraphOperationRequest instance using the specified properties. + * @function create + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @static + * @param {google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest=} [properties] Properties to set + * @returns {google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest} InternalUpdateGraphOperationRequest instance + */ + InternalUpdateGraphOperationRequest.create = function create(properties) { + return new InternalUpdateGraphOperationRequest(properties); + }; + + /** + * Encodes the specified InternalUpdateGraphOperationRequest message. Does not implicitly {@link google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest.verify|verify} messages. + * @function encode + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @static + * @param {google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest} message InternalUpdateGraphOperationRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + InternalUpdateGraphOperationRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.database != null && Object.hasOwnProperty.call(message, "database")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.database); + if (message.operationId != null && Object.hasOwnProperty.call(message, "operationId")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.operationId); + if (message.progress != null && Object.hasOwnProperty.call(message, "progress")) + writer.uint32(/* id 3, wireType 1 =*/25).double(message.progress); + if (message.vmIdentityToken != null && Object.hasOwnProperty.call(message, "vmIdentityToken")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.vmIdentityToken); + if (message.status != null && Object.hasOwnProperty.call(message, "status")) + $root.google.rpc.Status.encode(message.status, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + return writer; + }; + + /** + * Encodes the specified InternalUpdateGraphOperationRequest message, length delimited. Does not implicitly {@link google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @static + * @param {google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest} message InternalUpdateGraphOperationRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + InternalUpdateGraphOperationRequest.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an InternalUpdateGraphOperationRequest message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest} InternalUpdateGraphOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + InternalUpdateGraphOperationRequest.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + case 1: { + message.database = reader.string(); + break; + } + case 2: { + message.operationId = reader.string(); + break; + } + case 5: { + message.vmIdentityToken = reader.string(); + break; + } + case 3: { + message.progress = reader.double(); + break; + } + case 6: { + message.status = $root.google.rpc.Status.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an InternalUpdateGraphOperationRequest message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest} InternalUpdateGraphOperationRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + InternalUpdateGraphOperationRequest.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an InternalUpdateGraphOperationRequest message. + * @function verify + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + InternalUpdateGraphOperationRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.database != null && message.hasOwnProperty("database")) + if (!$util.isString(message.database)) + return "database: string expected"; + if (message.operationId != null && message.hasOwnProperty("operationId")) + if (!$util.isString(message.operationId)) + return "operationId: string expected"; + if (message.vmIdentityToken != null && message.hasOwnProperty("vmIdentityToken")) + if (!$util.isString(message.vmIdentityToken)) + return "vmIdentityToken: string expected"; + if (message.progress != null && message.hasOwnProperty("progress")) + if (typeof message.progress !== "number") + return "progress: number expected"; + if (message.status != null && message.hasOwnProperty("status")) { + var error = $root.google.rpc.Status.verify(message.status); + if (error) + return "status." + error; + } + return null; + }; + + /** + * Creates an InternalUpdateGraphOperationRequest message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest} InternalUpdateGraphOperationRequest + */ + InternalUpdateGraphOperationRequest.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest) + return object; + var message = new $root.google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest(); + if (object.database != null) + message.database = String(object.database); + if (object.operationId != null) + message.operationId = String(object.operationId); + if (object.vmIdentityToken != null) + message.vmIdentityToken = String(object.vmIdentityToken); + if (object.progress != null) + message.progress = Number(object.progress); + if (object.status != null) { + if (typeof object.status !== "object") + throw TypeError(".google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest.status: object expected"); + message.status = $root.google.rpc.Status.fromObject(object.status); + } + return message; + }; + + /** + * Creates a plain object from an InternalUpdateGraphOperationRequest message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @static + * @param {google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest} message InternalUpdateGraphOperationRequest + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + InternalUpdateGraphOperationRequest.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) { + object.database = ""; + object.operationId = ""; + object.progress = 0; + object.vmIdentityToken = ""; + object.status = null; + } + if (message.database != null && message.hasOwnProperty("database")) + object.database = message.database; + if (message.operationId != null && message.hasOwnProperty("operationId")) + object.operationId = message.operationId; + if (message.progress != null && message.hasOwnProperty("progress")) + object.progress = options.json && !isFinite(message.progress) ? String(message.progress) : message.progress; + if (message.vmIdentityToken != null && message.hasOwnProperty("vmIdentityToken")) + object.vmIdentityToken = message.vmIdentityToken; + if (message.status != null && message.hasOwnProperty("status")) + object.status = $root.google.rpc.Status.toObject(message.status, options); + return object; + }; + + /** + * Converts this InternalUpdateGraphOperationRequest to JSON. + * @function toJSON + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @instance + * @returns {Object.} JSON object + */ + InternalUpdateGraphOperationRequest.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for InternalUpdateGraphOperationRequest + * @function getTypeUrl + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + InternalUpdateGraphOperationRequest.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest"; + }; + + return InternalUpdateGraphOperationRequest; + })(); + + v1.InternalUpdateGraphOperationResponse = (function() { + + /** + * Properties of an InternalUpdateGraphOperationResponse. + * @memberof google.spanner.admin.database.v1 + * @interface IInternalUpdateGraphOperationResponse + */ + + /** + * Constructs a new InternalUpdateGraphOperationResponse. + * @memberof google.spanner.admin.database.v1 + * @classdesc Represents an InternalUpdateGraphOperationResponse. + * @implements IInternalUpdateGraphOperationResponse + * @constructor + * @param {google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse=} [properties] Properties to set + */ + function InternalUpdateGraphOperationResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Creates a new InternalUpdateGraphOperationResponse instance using the specified properties. + * @function create + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse + * @static + * @param {google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse=} [properties] Properties to set + * @returns {google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse} InternalUpdateGraphOperationResponse instance + */ + InternalUpdateGraphOperationResponse.create = function create(properties) { + return new InternalUpdateGraphOperationResponse(properties); + }; + + /** + * Encodes the specified InternalUpdateGraphOperationResponse message. Does not implicitly {@link google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse.verify|verify} messages. + * @function encode + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse + * @static + * @param {google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse} message InternalUpdateGraphOperationResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + InternalUpdateGraphOperationResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + return writer; + }; + + /** + * Encodes the specified InternalUpdateGraphOperationResponse message, length delimited. Does not implicitly {@link google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse.verify|verify} messages. + * @function encodeDelimited + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse + * @static + * @param {google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse} message InternalUpdateGraphOperationResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + InternalUpdateGraphOperationResponse.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an InternalUpdateGraphOperationResponse message from the specified reader or buffer. + * @function decode + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse} InternalUpdateGraphOperationResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + InternalUpdateGraphOperationResponse.decode = function decode(reader, length, error) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + if (tag === error) + break; + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an InternalUpdateGraphOperationResponse message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse} InternalUpdateGraphOperationResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + InternalUpdateGraphOperationResponse.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an InternalUpdateGraphOperationResponse message. + * @function verify + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + InternalUpdateGraphOperationResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + return null; + }; + + /** + * Creates an InternalUpdateGraphOperationResponse message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse + * @static + * @param {Object.} object Plain object + * @returns {google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse} InternalUpdateGraphOperationResponse + */ + InternalUpdateGraphOperationResponse.fromObject = function fromObject(object) { + if (object instanceof $root.google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse) + return object; + return new $root.google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse(); + }; + + /** + * Creates a plain object from an InternalUpdateGraphOperationResponse message. Also converts values to other types if specified. + * @function toObject + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse + * @static + * @param {google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse} message InternalUpdateGraphOperationResponse + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + InternalUpdateGraphOperationResponse.toObject = function toObject() { + return {}; + }; + + /** + * Converts this InternalUpdateGraphOperationResponse to JSON. + * @function toJSON + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse + * @instance + * @returns {Object.} JSON object + */ + InternalUpdateGraphOperationResponse.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for InternalUpdateGraphOperationResponse + * @function getTypeUrl + * @memberof google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + InternalUpdateGraphOperationResponse.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse"; + }; + + return InternalUpdateGraphOperationResponse; + })(); + return v1; })(); diff --git a/protos/protos.json b/protos/protos.json index 484f08bf5..1b54c45df 100644 --- a/protos/protos.json +++ b/protos/protos.json @@ -3205,6 +3205,18 @@ "(google.api.method_signature)": "parent" } ] + }, + "InternalUpdateGraphOperation": { + "requestType": "InternalUpdateGraphOperationRequest", + "responseType": "InternalUpdateGraphOperationResponse", + "options": { + "(google.api.method_signature)": "database,operation_id" + }, + "parsedOptions": [ + { + "(google.api.method_signature)": "database,operation_id" + } + ] } } }, @@ -3886,6 +3898,49 @@ } } } + }, + "InternalUpdateGraphOperationRequest": { + "fields": { + "database": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "REQUIRED", + "(google.api.resource_reference).type": "spanner.googleapis.com/Database" + } + }, + "operationId": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "vmIdentityToken": { + "type": "string", + "id": 5, + "options": { + "(google.api.field_behavior)": "REQUIRED" + } + }, + "progress": { + "type": "double", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "status": { + "type": "google.rpc.Status", + "id": 6, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, + "InternalUpdateGraphOperationResponse": { + "fields": {} } } } diff --git a/src/v1/database_admin_client.ts b/src/v1/database_admin_client.ts index 6e18d1482..dd5d214e5 100644 --- a/src/v1/database_admin_client.ts +++ b/src/v1/database_admin_client.ts @@ -472,6 +472,7 @@ export class DatabaseAdminClient { 'updateBackupSchedule', 'deleteBackupSchedule', 'listBackupSchedules', + 'internalUpdateGraphOperation', ]; for (const methodName of databaseAdminStubMethods) { const callPromise = this.databaseAdminStub.then( @@ -2573,6 +2574,152 @@ export class DatabaseAdminClient { throw error; }); } + /** + * This is an internal API called by Spanner Graph jobs. You should never need + * to call this API directly. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.database + * Internal field, do not use directly. + * @param {string} request.operationId + * Internal field, do not use directly. + * @param {string} request.vmIdentityToken + * Internal field, do not use directly. + * @param {number} [request.progress] + * Internal field, do not use directly. + * @param {google.rpc.Status} [request.status] + * Internal field, do not use directly. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing {@link protos.google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse|InternalUpdateGraphOperationResponse}. + * Please see the {@link https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods | documentation } + * for more details and examples. + */ + internalUpdateGraphOperation( + request?: protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest, + options?: CallOptions, + ): Promise< + [ + protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse, + ( + | protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest + | undefined + ), + {} | undefined, + ] + >; + internalUpdateGraphOperation( + request: protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest, + options: CallOptions, + callback: Callback< + protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse, + | protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest + | null + | undefined, + {} | null | undefined + >, + ): void; + internalUpdateGraphOperation( + request: protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest, + callback: Callback< + protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse, + | protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest + | null + | undefined, + {} | null | undefined + >, + ): void; + internalUpdateGraphOperation( + request?: protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest, + optionsOrCallback?: + | CallOptions + | Callback< + protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse, + | protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse, + | protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest + | null + | undefined, + {} | null | undefined + >, + ): Promise< + [ + protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse, + ( + | protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest + | undefined + ), + {} | undefined, + ] + > | void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + this.initialize().catch(err => { + throw err; + }); + this._log.info('internalUpdateGraphOperation request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse, + | protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('internalUpdateGraphOperation response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .internalUpdateGraphOperation(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse, + ( + | protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('internalUpdateGraphOperation response %j', response); + return [response, options, rawResponse]; + }, + ) + .catch((error: any) => { + if ( + error && + 'statusDetails' in error && + error.statusDetails instanceof Array + ) { + const protos = this._gaxModule.protobuf.Root.fromJSON( + jsonProtos, + ) as unknown as gax.protobuf.Type; + error.statusDetails = decodeAnyProtosInArray( + error.statusDetails, + protos, + ); + } + throw error; + }); + } /** * Creates a new Cloud Spanner database and starts to prepare it for serving. diff --git a/src/v1/database_admin_client_config.json b/src/v1/database_admin_client_config.json index 24246ae3c..5b58878bc 100644 --- a/src/v1/database_admin_client_config.json +++ b/src/v1/database_admin_client_config.json @@ -158,6 +158,10 @@ "timeout_millis": 3600000, "retry_codes_name": "idempotent", "retry_params_name": "e9fafda332ce8a1702dc1575de3ca81c4feb4799" + }, + "InternalUpdateGraphOperation": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" } } } diff --git a/test/gapic_database_admin_v1.ts b/test/gapic_database_admin_v1.ts index 7f42bef4b..dd4ae9e5c 100644 --- a/test/gapic_database_admin_v1.ts +++ b/test/gapic_database_admin_v1.ts @@ -2206,6 +2206,98 @@ describe('v1.DatabaseAdminClient', () => { }); }); + describe('internalUpdateGraphOperation', () => { + it('invokes internalUpdateGraphOperation without error', async () => { + const client = new databaseadminModule.v1.DatabaseAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest(), + ); + const expectedResponse = generateSampleMessage( + new protos.google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse(), + ); + client.innerApiCalls.internalUpdateGraphOperation = + stubSimpleCall(expectedResponse); + const [response] = await client.internalUpdateGraphOperation(request); + assert.deepStrictEqual(response, expectedResponse); + }); + + it('invokes internalUpdateGraphOperation without error using callback', async () => { + const client = new databaseadminModule.v1.DatabaseAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest(), + ); + const expectedResponse = generateSampleMessage( + new protos.google.spanner.admin.database.v1.InternalUpdateGraphOperationResponse(), + ); + client.innerApiCalls.internalUpdateGraphOperation = + stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.internalUpdateGraphOperation( + request, + ( + err?: Error | null, + result?: protos.google.spanner.admin.database.v1.IInternalUpdateGraphOperationResponse | null, + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }, + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + }); + + it('invokes internalUpdateGraphOperation with error', async () => { + const client = new databaseadminModule.v1.DatabaseAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest(), + ); + const expectedError = new Error('expected'); + client.innerApiCalls.internalUpdateGraphOperation = stubSimpleCall( + undefined, + expectedError, + ); + await assert.rejects( + client.internalUpdateGraphOperation(request), + expectedError, + ); + }); + + it('invokes internalUpdateGraphOperation with closed client', async () => { + const client = new databaseadminModule.v1.DatabaseAdminClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + await client.initialize(); + const request = generateSampleMessage( + new protos.google.spanner.admin.database.v1.InternalUpdateGraphOperationRequest(), + ); + const expectedError = new Error('The client has already been closed.'); + client.close().catch(err => { + throw err; + }); + await assert.rejects( + client.internalUpdateGraphOperation(request), + expectedError, + ); + }); + }); + describe('createDatabase', () => { it('invokes createDatabase without error', async () => { const client = new databaseadminModule.v1.DatabaseAdminClient({ From 2753dd1118fceb6ab825b6688b2f233aa8122532 Mon Sep 17 00:00:00 2001 From: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> Date: Thu, 24 Jul 2025 05:18:26 +0000 Subject: [PATCH 29/31] chore: remove is dependency (#2358) --- package.json | 1 - src/batch-transaction.ts | 4 +- src/codec.ts | 56 +++++++----- src/common-grpc/service.ts | 29 ++++--- src/helper.ts | 160 +++++++++++++++++++++++++++++++++++ src/partial-result-stream.ts | 10 +-- src/session-pool.ts | 4 +- src/transaction.ts | 9 +- system-test/spanner.ts | 14 +-- test/codec.ts | 4 +- test/common/service.ts | 4 +- 11 files changed, 237 insertions(+), 58 deletions(-) diff --git a/package.json b/package.json index 8c1cd01f5..72259f809 100644 --- a/package.json +++ b/package.json @@ -78,7 +78,6 @@ "google-auth-library": "^10.0.0-rc.1", "google-gax": "^5.0.1-rc.0", "grpc-gcp": "^1.0.1", - "is": "^3.3.0", "lodash.snakecase": "^4.1.1", "merge-stream": "^2.0.0", "p-queue": "^6.0.2", diff --git a/src/batch-transaction.ts b/src/batch-transaction.ts index f52e8c9b7..4f88d9a65 100644 --- a/src/batch-transaction.ts +++ b/src/batch-transaction.ts @@ -17,7 +17,6 @@ import {PreciseDate} from '@google-cloud/precise-date'; import {promisifyAll} from '@google-cloud/promisify'; import * as extend from 'extend'; -import * as is from 'is'; import { ExecuteSqlRequest, ReadCallback, @@ -36,6 +35,7 @@ import { } from '../src/common'; import {startTrace, setSpanError, traceConfig} from './instrument'; import {injectRequestIDIntoHeaders} from './request_id_header'; +import {isString} from './helper'; export interface TransactionIdentifier { session: string | Session; @@ -450,7 +450,7 @@ class BatchTransaction extends Snapshot { */ executeStream(partition) { // TODO: Instrument the streams with Otel. - if (is.string(partition.table)) { + if (isString(partition.table)) { return this.createReadStream(partition.table, partition); } return this.runStream(partition); diff --git a/src/codec.ts b/src/codec.ts index 7ef850c3d..702fdbb61 100644 --- a/src/codec.ts +++ b/src/codec.ts @@ -15,9 +15,21 @@ */ import {GrpcService} from './common-grpc/service'; import {PreciseDate} from '@google-cloud/precise-date'; -import {toArray} from './helper'; +import { + isArray, + isBoolean, + isDate, + isDecimal, + isInfinite, + isInteger, + isNull, + isNumber, + isObject, + isString, + isUndefined, + toArray, +} from './helper'; import {Big} from 'big.js'; -import * as is from 'is'; import {common as p} from 'protobufjs'; import {google as spannerClient} from '../protos/protos'; import {GoogleError} from 'google-gax'; @@ -440,19 +452,19 @@ export class Interval { * @param nanoseconds nanoseconds part of the `Interval` */ constructor(months: number, days: number, nanoseconds: bigint) { - if (!is.integer(months)) { + if (!isInteger(months)) { throw new GoogleError( `Invalid months: ${months}, months should be an integral value`, ); } - if (!is.integer(days)) { + if (!isInteger(days)) { throw new GoogleError( `Invalid days: ${days}, days should be an integral value`, ); } - if (is.null(nanoseconds) || is.undefined(nanoseconds)) { + if (isNull(nanoseconds) || isUndefined(nanoseconds)) { throw new GoogleError( `Invalid nanoseconds: ${nanoseconds}, nanoseconds should be a valid bigint value`, ); @@ -502,7 +514,7 @@ export class Interval { * Constructs an `Interval` with specified seconds. */ static fromSeconds(seconds: number): Interval { - if (!is.integer(seconds)) { + if (!isInteger(seconds)) { throw new GoogleError( `Invalid seconds: ${seconds}, seconds should be an integral value`, ); @@ -518,7 +530,7 @@ export class Interval { * Constructs an `Interval` with specified milliseconds. */ static fromMilliseconds(milliseconds: number): Interval { - if (!is.integer(milliseconds)) { + if (!isInteger(milliseconds)) { throw new GoogleError( `Invalid milliseconds: ${milliseconds}, milliseconds should be an integral value`, ); @@ -534,7 +546,7 @@ export class Interval { * Constructs an `Interval` with specified microseconds. */ static fromMicroseconds(microseconds: number): Interval { - if (!is.integer(microseconds)) { + if (!isInteger(microseconds)) { throw new GoogleError( `Invalid microseconds: ${microseconds}, microseconds should be an integral value`, ); @@ -817,7 +829,7 @@ function decode( type: spannerClient.spanner.v1.Type, columnMetadata?: object, ): Value { - if (is.null(value)) { + if (isNull(value)) { return null; } @@ -954,11 +966,11 @@ function encode(value: Value): p.IValue { * @returns {*} */ function encodeValue(value: Value): Value { - if (is.number(value) && !is.decimal(value)) { + if (isNumber(value) && !isDecimal(value)) { return value.toString(); } - if (is.date(value)) { + if (isDate(value)) { return value.toJSON(); } @@ -990,7 +1002,7 @@ function encodeValue(value: Value): Value { return Array.from(value).map(field => encodeValue(field.value)); } - if (is.array(value)) { + if (isArray(value)) { return value.map(encodeValue); } @@ -1002,7 +1014,7 @@ function encodeValue(value: Value): Value { return value.toISO8601(); } - if (is.object(value)) { + if (isObject(value)) { return JSON.stringify(value); } @@ -1099,17 +1111,17 @@ interface FieldType extends Type { */ function getType(value: Value): Type { const isSpecialNumber = - is.infinite(value) || (is.number(value) && isNaN(value)); + isInfinite(value) || (isNumber(value) && isNaN(value)); if (value instanceof Float32) { return {type: 'float32'}; } - if (is.decimal(value) || isSpecialNumber || value instanceof Float) { + if (isDecimal(value) || isSpecialNumber || value instanceof Float) { return {type: 'float64'}; } - if (is.number(value) || value instanceof Int) { + if (isNumber(value) || value instanceof Int) { return {type: 'int64'}; } @@ -1141,11 +1153,11 @@ function getType(value: Value): Type { return {type: 'enum', fullName: value.fullName}; } - if (is.boolean(value)) { + if (isBoolean(value)) { return {type: 'bool'}; } - if (is.string(value)) { + if (isString(value)) { return {type: 'string'}; } @@ -1157,7 +1169,7 @@ function getType(value: Value): Type { return {type: 'date'}; } - if (is.date(value)) { + if (isDate(value)) { return {type: 'timestamp'}; } @@ -1170,13 +1182,13 @@ function getType(value: Value): Type { }; } - if (is.array(value)) { + if (isArray(value)) { let child; for (let i = 0; i < value.length; i++) { child = value[i]; - if (!is.null(child)) { + if (!isNull(child)) { break; } } @@ -1187,7 +1199,7 @@ function getType(value: Value): Type { }; } - if (is.object(value)) { + if (isObject(value)) { return {type: 'json'}; } diff --git a/src/common-grpc/service.ts b/src/common-grpc/service.ts index ca4bda373..ba7691ce5 100644 --- a/src/common-grpc/service.ts +++ b/src/common-grpc/service.ts @@ -37,10 +37,19 @@ import * as duplexify from 'duplexify'; import {EventEmitter} from 'events'; import * as extend from 'extend'; import {grpc, GrpcClient} from 'google-gax'; -import * as is from 'is'; import {Request, Response} from 'teeny-request'; import * as retryRequest from 'retry-request'; import {Duplex, PassThrough} from 'stream'; +import { + isArray, + isBoolean, + isError, + isNull, + isNumber, + isObject, + isString, + isUndefined, +} from '../helper'; const gaxProtoPath = path.join( path.dirname(require.resolve('google-gax')), @@ -265,7 +274,7 @@ export class ObjectToStructConverter { for (const prop in obj) { if (Object.prototype.hasOwnProperty.call(obj, prop)) { const value = obj[prop]; - if (is.undefined(value)) { + if (isUndefined(value)) { continue; } convertedObject.fields[prop] = this.encodeValue_(value); @@ -292,19 +301,19 @@ export class ObjectToStructConverter { encodeValue_(value: {}) { let convertedValue; - if (is.null(value)) { + if (isNull(value)) { convertedValue = { nullValue: 0, }; - } else if (is.number(value)) { + } else if (isNumber(value)) { convertedValue = { numberValue: value, }; - } else if (is.string(value)) { + } else if (isString(value)) { convertedValue = { stringValue: value, }; - } else if (is.boolean(value)) { + } else if (isBoolean(value)) { convertedValue = { boolValue: value, }; @@ -312,7 +321,7 @@ export class ObjectToStructConverter { convertedValue = { blobValue: value, }; - } else if (is.object(value)) { + } else if (isObject(value)) { if (this.seenObjects.has(value)) { // Circular reference. if (!this.removeCircular) { @@ -331,7 +340,7 @@ export class ObjectToStructConverter { structValue: this.convert(value), }; } - } else if (is.array(value)) { + } else if (isArray(value)) { convertedValue = { listValue: { values: (value as Array<{}>).map(this.encodeValue_.bind(this)), @@ -723,7 +732,7 @@ export class GrpcService extends Service { const grpcMetadata = this.grpcMetadata; const grpcOpts: GrpcOptions = {}; - if (is.number(protoOpts.timeout)) { + if (isNumber(protoOpts.timeout)) { grpcOpts.deadline = GrpcService.createDeadline_(protoOpts.timeout); } @@ -821,7 +830,7 @@ export class GrpcService extends Service { * @return {error|null} */ static decorateError_(err: Error): Error | null { - const errorObj = is.error(err) ? err : {}; + const errorObj = isError(err) ? err : {}; return GrpcService.decorateGrpcResponse_(errorObj, err); } diff --git a/src/helper.ts b/src/helper.ts index fbd16c4a9..30692c13c 100644 --- a/src/helper.ts +++ b/src/helper.ts @@ -116,3 +116,163 @@ export function toArray(value: any) { return [value]; } + +/** + * Checks if a value is defined. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is NOT `undefined`, otherwise `false`. + */ +export function isDefined(value: any): boolean { + return typeof value !== 'undefined'; +} + +/** + * Checks if a value is null. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is null, otherwise `false`. + */ +export function isNull(value: any): boolean { + return value === null; +} + +/** + * Checks if a value is undefined. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is `undefined`, otherwise `false`. + */ +export function isUndefined(value: any): boolean { + return typeof value === 'undefined'; +} + +/** + * Checks if a value is empty. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is empty, otherwise `false`. + */ +export function isEmpty(value: any): boolean { + const type = Object.prototype.toString.call(value); + if ( + type === '[object Array]' || + type === '[object Arguments]' || + type === '[object String]' + ) { + return value.length === 0; + } + if (type === '[object Object]') { + for (const key in value) { + if (Object.prototype.hasOwnProperty.call(value, key)) { + return false; + } + } + return true; + } + return !value; +} + +/** + * Checks if a value is a plain javascript object. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is an object, otherwise `false`. + */ +export function isObject(value: any): boolean { + return Object.prototype.toString.call(value) === '[object Object]'; +} + +/** + * Checks if a value is a string. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is string, otherwise `false`. + */ +export function isString(value: any): boolean { + return Object.prototype.toString.call(value) === '[object String]'; +} + +/** + * Checks if a value is an array. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is an array, otherwise `false`. + */ +export function isArray(value: any): boolean { + return Array.isArray(value); +} + +/** + * Checks if a value is a Date object. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is a `Date` object, otherwise `false`. + */ +export function isDate(value: any): boolean { + return Object.prototype.toString.call(value) === '[object Date]'; +} + +/** + * Checks if a value is a boolean. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is boolean, otherwise `false`. + */ +export function isBoolean(value: any): boolean { + return Object.prototype.toString.call(value) === '[object Boolean]'; +} + +/** + * Checks if a value is a number. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is a number, otherwise `false`. + */ +export function isNumber(value: any): boolean { + return Object.prototype.toString.call(value) === '[object Number]'; +} + +/** + * Checks if a value is an integer. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is an integer, otherwise `false`. + */ +export function isInteger(value: any): boolean { + return Number.isInteger(value); +} + +/** + * Checks if a value is `NaN`. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is `NaN`, otherwise `false`. + */ +export function isActualNaN(value: any): boolean { + return value !== value; +} + +/** + * Checks if a value is a non-integer (decimal) number. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is a decimal number, otherwise `false`. + */ +export function isDecimal(value: any): boolean { + // A number is a decimal if it's a number but not an integer. + return ( + isNumber(value) && + !isInfinite(value) && + !isActualNaN(value) && + value % 1 !== 0 + ); +} + +/** + * Checks if a value is `Infinity` or `-Infinity`. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is infinite, otherwise `false`. + */ +export function isInfinite(value: any): boolean { + return value === Infinity || value === -Infinity; +} + +/** + * Checks if a value is an `Error` object. + * @param {*} value The value to check. + * @returns {Boolean} `true` if the value is an `Error` object, otherwise `false`. + */ +export function isError(value: any): boolean { + return ( + value instanceof Error || + Object.prototype.toString.call(value) === '[object Error]' + ); +} diff --git a/src/partial-result-stream.ts b/src/partial-result-stream.ts index c79c366ad..e72e5c494 100644 --- a/src/partial-result-stream.ts +++ b/src/partial-result-stream.ts @@ -17,7 +17,6 @@ import {GrpcService} from './common-grpc/service'; import * as checkpointStream from 'checkpoint-stream'; import * as eventsIntercept from 'events-intercept'; -import * as is from 'is'; import mergeStream = require('merge-stream'); import {common as p} from 'protobufjs'; import {Readable, Transform} from 'stream'; @@ -28,6 +27,7 @@ import {DeadlineError, isRetryableInternalError} from './transaction-runner'; import {codec, JSONOptions, Json, Field, Value} from './codec'; import {google} from '../protos/protos'; import * as stream from 'stream'; +import {isDefined, isEmpty, isString} from './helper'; export type ResumeToken = string | Uint8Array; @@ -241,7 +241,7 @@ export class PartialResultStream extends Transform implements ResultEvents { } let res = true; - if (!is.empty(chunk.values)) { + if (!isEmpty(chunk.values)) { res = this._addChunk(chunk); } @@ -429,7 +429,7 @@ export class PartialResultStream extends Transform implements ResultEvents { return [PartialResultStream.mergeLists(type, head, tail)]; } - if (is.string(head) && is.string(tail)) { + if (isString(head) && isString(tail)) { return [head + tail]; } @@ -540,7 +540,7 @@ export function partialResultStream( }); }; const makeRequest = (): void => { - if (is.defined(lastResumeToken) && lastResumeToken.length > 0) { + if (isDefined(lastResumeToken) && lastResumeToken.length > 0) { partialRSStream._resetPendingValues(); } lastRequestStream = requestFn(lastResumeToken); @@ -620,5 +620,5 @@ export function partialResultStream( } function _hasResumeToken(chunk: google.spanner.v1.PartialResultSet): boolean { - return is.defined(chunk.resumeToken) && chunk.resumeToken.length > 0; + return isDefined(chunk.resumeToken) && chunk.resumeToken.length > 0; } diff --git a/src/session-pool.ts b/src/session-pool.ts index c64a20b49..78c0290f1 100644 --- a/src/session-pool.ts +++ b/src/session-pool.ts @@ -15,7 +15,6 @@ */ import {EventEmitter} from 'events'; -import * as is from 'is'; import PQueue from 'p-queue'; import {Database} from './database'; @@ -37,6 +36,7 @@ import { isDefaultCredentialsNotSetError, isProjectIdNotSetInEnvironmentError, isCreateSessionPermissionError, + isInfinite, } from './helper'; /** @@ -870,7 +870,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { const timeout = this.options.acquireTimeout; let removeTimeoutListener = () => {}; - if (!is.infinite(timeout!)) { + if (!isInfinite(timeout!)) { const elapsed = Date.now() - startTime!; const remaining = timeout! - elapsed; diff --git a/src/transaction.ts b/src/transaction.ts index 5bc3a21ee..456af01a9 100644 --- a/src/transaction.ts +++ b/src/transaction.ts @@ -16,11 +16,10 @@ import {DateStruct, PreciseDate} from '@google-cloud/precise-date'; import {promisifyAll} from '@google-cloud/promisify'; -import {toArray} from './helper'; +import {isEmpty, toArray} from './helper'; import Long = require('long'); import {EventEmitter} from 'events'; import {grpc, CallOptions, ServiceError, Status, GoogleError} from 'google-gax'; -import * as is from 'is'; import {common as p} from 'protobufjs'; import {finished, Readable, PassThrough, Stream} from 'stream'; @@ -1638,7 +1637,7 @@ export class Snapshot extends EventEmitter { }); } - if (is.empty(keySet)) { + if (isEmpty(keySet)) { keySet.all = true; } @@ -1686,7 +1685,7 @@ export class Snapshot extends EventEmitter { // If we didn't detect a convenience format, we'll just assume that // they passed in a protobuf timestamp. - if (is.empty(readOnly)) { + if (isEmpty(readOnly)) { Object.assign(readOnly, options); } @@ -1725,7 +1724,7 @@ export class Snapshot extends EventEmitter { params.fields = fields; } - if (!is.empty(typeMap)) { + if (!isEmpty(typeMap)) { Object.keys(typeMap).forEach(param => { const type = typeMap[param]; paramTypes[param] = codec.createTypeObject(type); diff --git a/system-test/spanner.ts b/system-test/spanner.ts index d8125695b..961550b7e 100644 --- a/system-test/spanner.ts +++ b/system-test/spanner.ts @@ -21,7 +21,6 @@ import pLimit = require('p-limit'); import concat = require('concat-stream'); import * as crypto from 'crypto'; import * as extend from 'extend'; -import * as is from 'is'; import * as uuid from 'uuid'; import { Backup, @@ -59,6 +58,7 @@ import { CreateQueryPartitionsResponse, CreateReadPartitionsResponse, } from '../src/batch-transaction'; +import {isNull, isNumber} from '../src/helper'; const fs = require('fs'); const SKIP_BACKUPS = process.env.SKIP_BACKUPS; @@ -5069,7 +5069,7 @@ describe('Spanner', () => { assert.ifError(err); const expected = values.map(val => { - return is.number(val) ? {value: String(val)} : val; + return isNumber(val) ? {value: String(val)} : val; }); assert.strictEqual( @@ -5290,7 +5290,7 @@ describe('Spanner', () => { assert.ifError(err); const expected = values.map(val => { - return is.number(val) ? Spanner.float32(val) : val; + return isNumber(val) ? Spanner.float32(val) : val; }); for (let i = 0; i < rows[0][0].value.length; i++) { @@ -5448,7 +5448,7 @@ describe('Spanner', () => { assert.ifError(err); const expected = values.map(val => { - return is.number(val) ? {value: val + ''} : val; + return isNumber(val) ? {value: val + ''} : val; }); assert.strictEqual( @@ -5533,7 +5533,7 @@ describe('Spanner', () => { assert.ifError(err); const expected = values.map(val => { - return is.number(val) ? {value: val} : val; + return isNumber(val) ? {value: val} : val; }); assert.strictEqual( @@ -5662,7 +5662,7 @@ describe('Spanner', () => { assert.ifError(err); const expected = values.map(val => { - return is.number(val) ? {value: val + ''} : val; + return isNumber(val) ? {value: val + ''} : val; }); assert.strictEqual( @@ -6107,7 +6107,7 @@ describe('Spanner', () => { assert.ifError(err); const returnedValues = rows[0][0].value.map(val => { - return is.null(val) ? val : Spanner.date(val); + return isNull(val) ? val : Spanner.date(val); }); assert.deepStrictEqual(returnedValues, values); diff --git a/test/codec.ts b/test/codec.ts index 9d364d493..43ea6a17c 100644 --- a/test/codec.ts +++ b/test/codec.ts @@ -25,9 +25,9 @@ import {google} from '../protos/protos'; import {GoogleError} from 'google-gax'; import {util} from 'protobufjs'; import Long = util.Long; +import {isString} from '../src/helper'; const singer = require('./data/singer'); const music = singer.examples.spanner.music; -const is = require('is'); describe('codec', () => { let codec; @@ -946,7 +946,7 @@ describe('codec', () => { it('should store value as string', () => { const protoEnum = new codec.ProtoEnum(enumParams); - assert(is.string(protoEnum.value)); + assert(isString(protoEnum.value)); }); it('should throw an error when value is non numeric string and enumObject is not passed', () => { diff --git a/test/common/service.ts b/test/common/service.ts index 1707c9f9c..45c05354b 100644 --- a/test/common/service.ts +++ b/test/common/service.ts @@ -21,11 +21,11 @@ import {before, beforeEach, after, afterEach, describe, it} from 'mocha'; import * as duplexify from 'duplexify'; import * as extend from 'extend'; import {grpc, GrpcClient} from 'google-gax'; -import * as is from 'is'; import * as proxyquire from 'proxyquire'; import * as retryRequest from 'retry-request'; import * as sn from 'sinon'; import {PassThrough} from 'stream'; +import {isDate} from '../../src/helper'; const sinon = sn.createSandbox(); const glob = global as {} as {GCLOUD_SANDBOX_ENV?: boolean | {}}; @@ -864,7 +864,7 @@ describe('GrpcService', () => { grpcService.getService_ = () => { return { method(reqOpts, metadata, grpcOpts) { - assert(is.date(grpcOpts.deadline)); + assert(isDate(grpcOpts.deadline)); assert(grpcOpts.deadline.getTime() > expectedDeadlineRange[0]); assert(grpcOpts.deadline.getTime() < expectedDeadlineRange[1]); From fce8a71f13cc8955940186c8d4f9097393800aff Mon Sep 17 00:00:00 2001 From: surbhigarg92 Date: Mon, 28 Jul 2025 23:11:56 +0530 Subject: [PATCH 30/31] chore: pin dependency version (#2353) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: pin dependency version * updated gax dependency * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert gax package * pin: babel/types version * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- package.json | 9 +- protos/protos.d.ts | 570 +------------ protos/protos.js | 1980 +------------------------------------------- protos/protos.json | 263 +----- 4 files changed, 56 insertions(+), 2766 deletions(-) diff --git a/package.json b/package.json index 72259f809..d92df37c8 100644 --- a/package.json +++ b/package.json @@ -76,7 +76,7 @@ "events-intercept": "^2.0.0", "extend": "^3.0.2", "google-auth-library": "^10.0.0-rc.1", - "google-gax": "^5.0.1-rc.0", + "google-gax": "5.0.0", "grpc-gcp": "^1.0.1", "lodash.snakecase": "^4.1.1", "merge-stream": "^2.0.0", @@ -86,8 +86,11 @@ "split-array-stream": "^2.0.0", "stack-trace": "0.0.10", "stream-events": "^1.0.5", - "teeny-request": "^10.0.0", - "through2": "^4.0.2" + "teeny-request": "^10.0.0", + "through2": "^4.0.2", + "@babel/traverse": "7.27.7", + "@babel/core": "7.27.7", + "@babel/helpers": "7.27.6" }, "devDependencies": { "@opentelemetry/sdk-trace-base": "^2.0.0", diff --git a/protos/protos.d.ts b/protos/protos.d.ts index f5acc8627..d6f0e8362 100644 --- a/protos/protos.d.ts +++ b/protos/protos.d.ts @@ -223,7 +223,6 @@ export namespace google { /** Edition enum. */ enum Edition { EDITION_UNKNOWN = 0, - EDITION_LEGACY = 900, EDITION_PROTO2 = 998, EDITION_PROTO3 = 999, EDITION_2023 = 1000, @@ -254,9 +253,6 @@ export namespace google { /** FileDescriptorProto weakDependency */ weakDependency?: (number[]|null); - /** FileDescriptorProto optionDependency */ - optionDependency?: (string[]|null); - /** FileDescriptorProto messageType */ messageType?: (google.protobuf.IDescriptorProto[]|null); @@ -306,9 +302,6 @@ export namespace google { /** FileDescriptorProto weakDependency. */ public weakDependency: number[]; - /** FileDescriptorProto optionDependency. */ - public optionDependency: string[]; - /** FileDescriptorProto messageType. */ public messageType: google.protobuf.IDescriptorProto[]; @@ -443,9 +436,6 @@ export namespace google { /** DescriptorProto reservedName */ reservedName?: (string[]|null); - - /** DescriptorProto visibility */ - visibility?: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility|null); } /** Represents a DescriptorProto. */ @@ -487,9 +477,6 @@ export namespace google { /** DescriptorProto reservedName. */ public reservedName: string[]; - /** DescriptorProto visibility. */ - public visibility: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility); - /** * Creates a new DescriptorProto instance using the specified properties. * @param [properties] Properties to set @@ -1337,9 +1324,6 @@ export namespace google { /** EnumDescriptorProto reservedName */ reservedName?: (string[]|null); - - /** EnumDescriptorProto visibility */ - visibility?: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility|null); } /** Represents an EnumDescriptorProto. */ @@ -1366,9 +1350,6 @@ export namespace google { /** EnumDescriptorProto reservedName. */ public reservedName: string[]; - /** EnumDescriptorProto visibility. */ - public visibility: (google.protobuf.SymbolVisibility|keyof typeof google.protobuf.SymbolVisibility); - /** * Creates a new EnumDescriptorProto instance using the specified properties. * @param [properties] Properties to set @@ -2303,9 +2284,6 @@ export namespace google { /** FieldOptions features */ features?: (google.protobuf.IFeatureSet|null); - /** FieldOptions featureSupport */ - featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); - /** FieldOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); @@ -2361,9 +2339,6 @@ export namespace google { /** FieldOptions features. */ public features?: (google.protobuf.IFeatureSet|null); - /** FieldOptions featureSupport. */ - public featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); - /** FieldOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -2584,121 +2559,6 @@ export namespace google { */ public static getTypeUrl(typeUrlPrefix?: string): string; } - - /** Properties of a FeatureSupport. */ - interface IFeatureSupport { - - /** FeatureSupport editionIntroduced */ - editionIntroduced?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); - - /** FeatureSupport editionDeprecated */ - editionDeprecated?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); - - /** FeatureSupport deprecationWarning */ - deprecationWarning?: (string|null); - - /** FeatureSupport editionRemoved */ - editionRemoved?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); - } - - /** Represents a FeatureSupport. */ - class FeatureSupport implements IFeatureSupport { - - /** - * Constructs a new FeatureSupport. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.FieldOptions.IFeatureSupport); - - /** FeatureSupport editionIntroduced. */ - public editionIntroduced: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); - - /** FeatureSupport editionDeprecated. */ - public editionDeprecated: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); - - /** FeatureSupport deprecationWarning. */ - public deprecationWarning: string; - - /** FeatureSupport editionRemoved. */ - public editionRemoved: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); - - /** - * Creates a new FeatureSupport instance using the specified properties. - * @param [properties] Properties to set - * @returns FeatureSupport instance - */ - public static create(properties?: google.protobuf.FieldOptions.IFeatureSupport): google.protobuf.FieldOptions.FeatureSupport; - - /** - * Encodes the specified FeatureSupport message. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. - * @param message FeatureSupport message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.FieldOptions.IFeatureSupport, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified FeatureSupport message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. - * @param message FeatureSupport message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.FieldOptions.IFeatureSupport, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a FeatureSupport message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns FeatureSupport - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FieldOptions.FeatureSupport; - - /** - * Decodes a FeatureSupport message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns FeatureSupport - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FieldOptions.FeatureSupport; - - /** - * Verifies a FeatureSupport message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a FeatureSupport message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns FeatureSupport - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FieldOptions.FeatureSupport; - - /** - * Creates a plain object from a FeatureSupport message. Also converts values to other types if specified. - * @param message FeatureSupport - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.FieldOptions.FeatureSupport, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this FeatureSupport to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for FeatureSupport - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } } /** Properties of an OneofOptions. */ @@ -2937,9 +2797,6 @@ export namespace google { /** EnumValueOptions debugRedact */ debugRedact?: (boolean|null); - /** EnumValueOptions featureSupport */ - featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); - /** EnumValueOptions uninterpretedOption */ uninterpretedOption?: (google.protobuf.IUninterpretedOption[]|null); } @@ -2962,9 +2819,6 @@ export namespace google { /** EnumValueOptions debugRedact. */ public debugRedact: boolean; - /** EnumValueOptions featureSupport. */ - public featureSupport?: (google.protobuf.FieldOptions.IFeatureSupport|null); - /** EnumValueOptions uninterpretedOption. */ public uninterpretedOption: google.protobuf.IUninterpretedOption[]; @@ -3557,12 +3411,6 @@ export namespace google { /** FeatureSet jsonFormat */ jsonFormat?: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat|null); - - /** FeatureSet enforceNamingStyle */ - enforceNamingStyle?: (google.protobuf.FeatureSet.EnforceNamingStyle|keyof typeof google.protobuf.FeatureSet.EnforceNamingStyle|null); - - /** FeatureSet defaultSymbolVisibility */ - defaultSymbolVisibility?: (google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|keyof typeof google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|null); } /** Represents a FeatureSet. */ @@ -3592,12 +3440,6 @@ export namespace google { /** FeatureSet jsonFormat. */ public jsonFormat: (google.protobuf.FeatureSet.JsonFormat|keyof typeof google.protobuf.FeatureSet.JsonFormat); - /** FeatureSet enforceNamingStyle. */ - public enforceNamingStyle: (google.protobuf.FeatureSet.EnforceNamingStyle|keyof typeof google.protobuf.FeatureSet.EnforceNamingStyle); - - /** FeatureSet defaultSymbolVisibility. */ - public defaultSymbolVisibility: (google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|keyof typeof google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility); - /** * Creates a new FeatureSet instance using the specified properties. * @param [properties] Properties to set @@ -3720,116 +3562,6 @@ export namespace google { ALLOW = 1, LEGACY_BEST_EFFORT = 2 } - - /** EnforceNamingStyle enum. */ - enum EnforceNamingStyle { - ENFORCE_NAMING_STYLE_UNKNOWN = 0, - STYLE2024 = 1, - STYLE_LEGACY = 2 - } - - /** Properties of a VisibilityFeature. */ - interface IVisibilityFeature { - } - - /** Represents a VisibilityFeature. */ - class VisibilityFeature implements IVisibilityFeature { - - /** - * Constructs a new VisibilityFeature. - * @param [properties] Properties to set - */ - constructor(properties?: google.protobuf.FeatureSet.IVisibilityFeature); - - /** - * Creates a new VisibilityFeature instance using the specified properties. - * @param [properties] Properties to set - * @returns VisibilityFeature instance - */ - public static create(properties?: google.protobuf.FeatureSet.IVisibilityFeature): google.protobuf.FeatureSet.VisibilityFeature; - - /** - * Encodes the specified VisibilityFeature message. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. - * @param message VisibilityFeature message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.protobuf.FeatureSet.IVisibilityFeature, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified VisibilityFeature message, length delimited. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. - * @param message VisibilityFeature message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.protobuf.FeatureSet.IVisibilityFeature, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a VisibilityFeature message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns VisibilityFeature - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.protobuf.FeatureSet.VisibilityFeature; - - /** - * Decodes a VisibilityFeature message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns VisibilityFeature - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.protobuf.FeatureSet.VisibilityFeature; - - /** - * Verifies a VisibilityFeature message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a VisibilityFeature message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns VisibilityFeature - */ - public static fromObject(object: { [k: string]: any }): google.protobuf.FeatureSet.VisibilityFeature; - - /** - * Creates a plain object from a VisibilityFeature message. Also converts values to other types if specified. - * @param message VisibilityFeature - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.protobuf.FeatureSet.VisibilityFeature, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this VisibilityFeature to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for VisibilityFeature - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - - namespace VisibilityFeature { - - /** DefaultSymbolVisibility enum. */ - enum DefaultSymbolVisibility { - DEFAULT_SYMBOL_VISIBILITY_UNKNOWN = 0, - EXPORT_ALL = 1, - EXPORT_TOP_LEVEL = 2, - LOCAL_ALL = 3, - STRICT = 4 - } - } } /** Properties of a FeatureSetDefaults. */ @@ -3949,11 +3681,8 @@ export namespace google { /** FeatureSetEditionDefault edition */ edition?: (google.protobuf.Edition|keyof typeof google.protobuf.Edition|null); - /** FeatureSetEditionDefault overridableFeatures */ - overridableFeatures?: (google.protobuf.IFeatureSet|null); - - /** FeatureSetEditionDefault fixedFeatures */ - fixedFeatures?: (google.protobuf.IFeatureSet|null); + /** FeatureSetEditionDefault features */ + features?: (google.protobuf.IFeatureSet|null); } /** Represents a FeatureSetEditionDefault. */ @@ -3968,11 +3697,8 @@ export namespace google { /** FeatureSetEditionDefault edition. */ public edition: (google.protobuf.Edition|keyof typeof google.protobuf.Edition); - /** FeatureSetEditionDefault overridableFeatures. */ - public overridableFeatures?: (google.protobuf.IFeatureSet|null); - - /** FeatureSetEditionDefault fixedFeatures. */ - public fixedFeatures?: (google.protobuf.IFeatureSet|null); + /** FeatureSetEditionDefault features. */ + public features?: (google.protobuf.IFeatureSet|null); /** * Creates a new FeatureSetEditionDefault instance using the specified properties. @@ -4505,13 +4231,6 @@ export namespace google { } } - /** SymbolVisibility enum. */ - enum SymbolVisibility { - VISIBILITY_UNSET = 0, - VISIBILITY_LOCAL = 1, - VISIBILITY_EXPORT = 2 - } - /** Properties of an Any. */ interface IAny { @@ -5655,24 +5374,6 @@ export namespace google { /** Violation description */ description?: (string|null); - - /** Violation apiService */ - apiService?: (string|null); - - /** Violation quotaMetric */ - quotaMetric?: (string|null); - - /** Violation quotaId */ - quotaId?: (string|null); - - /** Violation quotaDimensions */ - quotaDimensions?: ({ [k: string]: string }|null); - - /** Violation quotaValue */ - quotaValue?: (number|Long|string|null); - - /** Violation futureQuotaValue */ - futureQuotaValue?: (number|Long|string|null); } /** Represents a Violation. */ @@ -5690,24 +5391,6 @@ export namespace google { /** Violation description. */ public description: string; - /** Violation apiService. */ - public apiService: string; - - /** Violation quotaMetric. */ - public quotaMetric: string; - - /** Violation quotaId. */ - public quotaId: string; - - /** Violation quotaDimensions. */ - public quotaDimensions: { [k: string]: string }; - - /** Violation quotaValue. */ - public quotaValue: (number|Long|string); - - /** Violation futureQuotaValue. */ - public futureQuotaValue?: (number|Long|string|null); - /** * Creates a new Violation instance using the specified properties. * @param [properties] Properties to set @@ -6103,12 +5786,6 @@ export namespace google { /** FieldViolation description */ description?: (string|null); - - /** FieldViolation reason */ - reason?: (string|null); - - /** FieldViolation localizedMessage */ - localizedMessage?: (google.rpc.ILocalizedMessage|null); } /** Represents a FieldViolation. */ @@ -6126,12 +5803,6 @@ export namespace google { /** FieldViolation description. */ public description: string; - /** FieldViolation reason. */ - public reason: string; - - /** FieldViolation localizedMessage. */ - public localizedMessage?: (google.rpc.ILocalizedMessage|null); - /** * Creates a new FieldViolation instance using the specified properties. * @param [properties] Properties to set @@ -37604,9 +37275,6 @@ export namespace google { /** CommonLanguageSettings destinations */ destinations?: (google.api.ClientLibraryDestination[]|null); - - /** CommonLanguageSettings selectiveGapicGeneration */ - selectiveGapicGeneration?: (google.api.ISelectiveGapicGeneration|null); } /** Represents a CommonLanguageSettings. */ @@ -37624,9 +37292,6 @@ export namespace google { /** CommonLanguageSettings destinations. */ public destinations: google.api.ClientLibraryDestination[]; - /** CommonLanguageSettings selectiveGapicGeneration. */ - public selectiveGapicGeneration?: (google.api.ISelectiveGapicGeneration|null); - /** * Creates a new CommonLanguageSettings instance using the specified properties. * @param [properties] Properties to set @@ -38327,9 +37992,6 @@ export namespace google { /** PythonSettings common */ common?: (google.api.ICommonLanguageSettings|null); - - /** PythonSettings experimentalFeatures */ - experimentalFeatures?: (google.api.PythonSettings.IExperimentalFeatures|null); } /** Represents a PythonSettings. */ @@ -38344,9 +38006,6 @@ export namespace google { /** PythonSettings common. */ public common?: (google.api.ICommonLanguageSettings|null); - /** PythonSettings experimentalFeatures. */ - public experimentalFeatures?: (google.api.PythonSettings.IExperimentalFeatures|null); - /** * Creates a new PythonSettings instance using the specified properties. * @param [properties] Properties to set @@ -38425,118 +38084,6 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } - namespace PythonSettings { - - /** Properties of an ExperimentalFeatures. */ - interface IExperimentalFeatures { - - /** ExperimentalFeatures restAsyncIoEnabled */ - restAsyncIoEnabled?: (boolean|null); - - /** ExperimentalFeatures protobufPythonicTypesEnabled */ - protobufPythonicTypesEnabled?: (boolean|null); - - /** ExperimentalFeatures unversionedPackageDisabled */ - unversionedPackageDisabled?: (boolean|null); - } - - /** Represents an ExperimentalFeatures. */ - class ExperimentalFeatures implements IExperimentalFeatures { - - /** - * Constructs a new ExperimentalFeatures. - * @param [properties] Properties to set - */ - constructor(properties?: google.api.PythonSettings.IExperimentalFeatures); - - /** ExperimentalFeatures restAsyncIoEnabled. */ - public restAsyncIoEnabled: boolean; - - /** ExperimentalFeatures protobufPythonicTypesEnabled. */ - public protobufPythonicTypesEnabled: boolean; - - /** ExperimentalFeatures unversionedPackageDisabled. */ - public unversionedPackageDisabled: boolean; - - /** - * Creates a new ExperimentalFeatures instance using the specified properties. - * @param [properties] Properties to set - * @returns ExperimentalFeatures instance - */ - public static create(properties?: google.api.PythonSettings.IExperimentalFeatures): google.api.PythonSettings.ExperimentalFeatures; - - /** - * Encodes the specified ExperimentalFeatures message. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. - * @param message ExperimentalFeatures message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.api.PythonSettings.IExperimentalFeatures, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified ExperimentalFeatures message, length delimited. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. - * @param message ExperimentalFeatures message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.api.PythonSettings.IExperimentalFeatures, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an ExperimentalFeatures message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns ExperimentalFeatures - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.PythonSettings.ExperimentalFeatures; - - /** - * Decodes an ExperimentalFeatures message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns ExperimentalFeatures - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.PythonSettings.ExperimentalFeatures; - - /** - * Verifies an ExperimentalFeatures message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates an ExperimentalFeatures message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns ExperimentalFeatures - */ - public static fromObject(object: { [k: string]: any }): google.api.PythonSettings.ExperimentalFeatures; - - /** - * Creates a plain object from an ExperimentalFeatures message. Also converts values to other types if specified. - * @param message ExperimentalFeatures - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.api.PythonSettings.ExperimentalFeatures, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this ExperimentalFeatures to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for ExperimentalFeatures - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - } - /** Properties of a NodeSettings. */ interface INodeSettings { @@ -38863,9 +38410,6 @@ export namespace google { /** GoSettings common */ common?: (google.api.ICommonLanguageSettings|null); - - /** GoSettings renamedServices */ - renamedServices?: ({ [k: string]: string }|null); } /** Represents a GoSettings. */ @@ -38880,9 +38424,6 @@ export namespace google { /** GoSettings common. */ public common?: (google.api.ICommonLanguageSettings|null); - /** GoSettings renamedServices. */ - public renamedServices: { [k: string]: string }; - /** * Creates a new GoSettings instance using the specified properties. * @param [properties] Properties to set @@ -39207,109 +38748,6 @@ export namespace google { PACKAGE_MANAGER = 20 } - /** Properties of a SelectiveGapicGeneration. */ - interface ISelectiveGapicGeneration { - - /** SelectiveGapicGeneration methods */ - methods?: (string[]|null); - - /** SelectiveGapicGeneration generateOmittedAsInternal */ - generateOmittedAsInternal?: (boolean|null); - } - - /** Represents a SelectiveGapicGeneration. */ - class SelectiveGapicGeneration implements ISelectiveGapicGeneration { - - /** - * Constructs a new SelectiveGapicGeneration. - * @param [properties] Properties to set - */ - constructor(properties?: google.api.ISelectiveGapicGeneration); - - /** SelectiveGapicGeneration methods. */ - public methods: string[]; - - /** SelectiveGapicGeneration generateOmittedAsInternal. */ - public generateOmittedAsInternal: boolean; - - /** - * Creates a new SelectiveGapicGeneration instance using the specified properties. - * @param [properties] Properties to set - * @returns SelectiveGapicGeneration instance - */ - public static create(properties?: google.api.ISelectiveGapicGeneration): google.api.SelectiveGapicGeneration; - - /** - * Encodes the specified SelectiveGapicGeneration message. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. - * @param message SelectiveGapicGeneration message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: google.api.ISelectiveGapicGeneration, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Encodes the specified SelectiveGapicGeneration message, length delimited. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. - * @param message SelectiveGapicGeneration message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encodeDelimited(message: google.api.ISelectiveGapicGeneration, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a SelectiveGapicGeneration message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns SelectiveGapicGeneration - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.api.SelectiveGapicGeneration; - - /** - * Decodes a SelectiveGapicGeneration message from the specified reader or buffer, length delimited. - * @param reader Reader or buffer to decode from - * @returns SelectiveGapicGeneration - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.api.SelectiveGapicGeneration; - - /** - * Verifies a SelectiveGapicGeneration message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - - /** - * Creates a SelectiveGapicGeneration message from a plain object. Also converts values to their respective internal types. - * @param object Plain object - * @returns SelectiveGapicGeneration - */ - public static fromObject(object: { [k: string]: any }): google.api.SelectiveGapicGeneration; - - /** - * Creates a plain object from a SelectiveGapicGeneration message. Also converts values to other types if specified. - * @param message SelectiveGapicGeneration - * @param [options] Conversion options - * @returns Plain object - */ - public static toObject(message: google.api.SelectiveGapicGeneration, options?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this SelectiveGapicGeneration to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; - - /** - * Gets the default type url for SelectiveGapicGeneration - * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns The default type url - */ - public static getTypeUrl(typeUrlPrefix?: string): string; - } - /** LaunchStage enum. */ enum LaunchStage { LAUNCH_STAGE_UNSPECIFIED = 0, diff --git a/protos/protos.js b/protos/protos.js index b0e8a8db3..449fab434 100644 --- a/protos/protos.js +++ b/protos/protos.js @@ -522,7 +522,6 @@ * @name google.protobuf.Edition * @enum {number} * @property {number} EDITION_UNKNOWN=0 EDITION_UNKNOWN value - * @property {number} EDITION_LEGACY=900 EDITION_LEGACY value * @property {number} EDITION_PROTO2=998 EDITION_PROTO2 value * @property {number} EDITION_PROTO3=999 EDITION_PROTO3 value * @property {number} EDITION_2023=1000 EDITION_2023 value @@ -537,7 +536,6 @@ protobuf.Edition = (function() { var valuesById = {}, values = Object.create(valuesById); values[valuesById[0] = "EDITION_UNKNOWN"] = 0; - values[valuesById[900] = "EDITION_LEGACY"] = 900; values[valuesById[998] = "EDITION_PROTO2"] = 998; values[valuesById[999] = "EDITION_PROTO3"] = 999; values[valuesById[1000] = "EDITION_2023"] = 1000; @@ -562,7 +560,6 @@ * @property {Array.|null} [dependency] FileDescriptorProto dependency * @property {Array.|null} [publicDependency] FileDescriptorProto publicDependency * @property {Array.|null} [weakDependency] FileDescriptorProto weakDependency - * @property {Array.|null} [optionDependency] FileDescriptorProto optionDependency * @property {Array.|null} [messageType] FileDescriptorProto messageType * @property {Array.|null} [enumType] FileDescriptorProto enumType * @property {Array.|null} [service] FileDescriptorProto service @@ -585,7 +582,6 @@ this.dependency = []; this.publicDependency = []; this.weakDependency = []; - this.optionDependency = []; this.messageType = []; this.enumType = []; this.service = []; @@ -636,14 +632,6 @@ */ FileDescriptorProto.prototype.weakDependency = $util.emptyArray; - /** - * FileDescriptorProto optionDependency. - * @member {Array.} optionDependency - * @memberof google.protobuf.FileDescriptorProto - * @instance - */ - FileDescriptorProto.prototype.optionDependency = $util.emptyArray; - /** * FileDescriptorProto messageType. * @member {Array.} messageType @@ -765,9 +753,6 @@ writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) writer.uint32(/* id 14, wireType 0 =*/112).int32(message.edition); - if (message.optionDependency != null && message.optionDependency.length) - for (var i = 0; i < message.optionDependency.length; ++i) - writer.uint32(/* id 15, wireType 2 =*/122).string(message.optionDependency[i]); return writer; }; @@ -840,12 +825,6 @@ message.weakDependency.push(reader.int32()); break; } - case 15: { - if (!(message.optionDependency && message.optionDependency.length)) - message.optionDependency = []; - message.optionDependency.push(reader.string()); - break; - } case 4: { if (!(message.messageType && message.messageType.length)) message.messageType = []; @@ -948,13 +927,6 @@ if (!$util.isInteger(message.weakDependency[i])) return "weakDependency: integer[] expected"; } - if (message.optionDependency != null && message.hasOwnProperty("optionDependency")) { - if (!Array.isArray(message.optionDependency)) - return "optionDependency: array expected"; - for (var i = 0; i < message.optionDependency.length; ++i) - if (!$util.isString(message.optionDependency[i])) - return "optionDependency: string[] expected"; - } if (message.messageType != null && message.hasOwnProperty("messageType")) { if (!Array.isArray(message.messageType)) return "messageType: array expected"; @@ -1009,7 +981,6 @@ default: return "edition: enum value expected"; case 0: - case 900: case 998: case 999: case 1000: @@ -1062,13 +1033,6 @@ for (var i = 0; i < object.weakDependency.length; ++i) message.weakDependency[i] = object.weakDependency[i] | 0; } - if (object.optionDependency) { - if (!Array.isArray(object.optionDependency)) - throw TypeError(".google.protobuf.FileDescriptorProto.optionDependency: array expected"); - message.optionDependency = []; - for (var i = 0; i < object.optionDependency.length; ++i) - message.optionDependency[i] = String(object.optionDependency[i]); - } if (object.messageType) { if (!Array.isArray(object.messageType)) throw TypeError(".google.protobuf.FileDescriptorProto.messageType: array expected"); @@ -1132,10 +1096,6 @@ case 0: message.edition = 0; break; - case "EDITION_LEGACY": - case 900: - message.edition = 900; - break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -1201,7 +1161,6 @@ object.extension = []; object.publicDependency = []; object.weakDependency = []; - object.optionDependency = []; } if (options.defaults) { object.name = ""; @@ -1258,11 +1217,6 @@ object.syntax = message.syntax; if (message.edition != null && message.hasOwnProperty("edition")) object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; - if (message.optionDependency && message.optionDependency.length) { - object.optionDependency = []; - for (var j = 0; j < message.optionDependency.length; ++j) - object.optionDependency[j] = message.optionDependency[j]; - } return object; }; @@ -1311,7 +1265,6 @@ * @property {google.protobuf.IMessageOptions|null} [options] DescriptorProto options * @property {Array.|null} [reservedRange] DescriptorProto reservedRange * @property {Array.|null} [reservedName] DescriptorProto reservedName - * @property {google.protobuf.SymbolVisibility|null} [visibility] DescriptorProto visibility */ /** @@ -1417,14 +1370,6 @@ */ DescriptorProto.prototype.reservedName = $util.emptyArray; - /** - * DescriptorProto visibility. - * @member {google.protobuf.SymbolVisibility} visibility - * @memberof google.protobuf.DescriptorProto - * @instance - */ - DescriptorProto.prototype.visibility = 0; - /** * Creates a new DescriptorProto instance using the specified properties. * @function create @@ -1477,8 +1422,6 @@ if (message.reservedName != null && message.reservedName.length) for (var i = 0; i < message.reservedName.length; ++i) writer.uint32(/* id 10, wireType 2 =*/82).string(message.reservedName[i]); - if (message.visibility != null && Object.hasOwnProperty.call(message, "visibility")) - writer.uint32(/* id 11, wireType 0 =*/88).int32(message.visibility); return writer; }; @@ -1571,10 +1514,6 @@ message.reservedName.push(reader.string()); break; } - case 11: { - message.visibility = reader.int32(); - break; - } default: reader.skipType(tag & 7); break; @@ -1688,15 +1627,6 @@ if (!$util.isString(message.reservedName[i])) return "reservedName: string[] expected"; } - if (message.visibility != null && message.hasOwnProperty("visibility")) - switch (message.visibility) { - default: - return "visibility: enum value expected"; - case 0: - case 1: - case 2: - break; - } return null; }; @@ -1796,26 +1726,6 @@ for (var i = 0; i < object.reservedName.length; ++i) message.reservedName[i] = String(object.reservedName[i]); } - switch (object.visibility) { - default: - if (typeof object.visibility === "number") { - message.visibility = object.visibility; - break; - } - break; - case "VISIBILITY_UNSET": - case 0: - message.visibility = 0; - break; - case "VISIBILITY_LOCAL": - case 1: - message.visibility = 1; - break; - case "VISIBILITY_EXPORT": - case 2: - message.visibility = 2; - break; - } return message; }; @@ -1845,7 +1755,6 @@ if (options.defaults) { object.name = ""; object.options = null; - object.visibility = options.enums === String ? "VISIBILITY_UNSET" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -1891,8 +1800,6 @@ for (var j = 0; j < message.reservedName.length; ++j) object.reservedName[j] = message.reservedName[j]; } - if (message.visibility != null && message.hasOwnProperty("visibility")) - object.visibility = options.enums === String ? $root.google.protobuf.SymbolVisibility[message.visibility] === undefined ? message.visibility : $root.google.protobuf.SymbolVisibility[message.visibility] : message.visibility; return object; }; @@ -3937,7 +3844,6 @@ * @property {google.protobuf.IEnumOptions|null} [options] EnumDescriptorProto options * @property {Array.|null} [reservedRange] EnumDescriptorProto reservedRange * @property {Array.|null} [reservedName] EnumDescriptorProto reservedName - * @property {google.protobuf.SymbolVisibility|null} [visibility] EnumDescriptorProto visibility */ /** @@ -3998,14 +3904,6 @@ */ EnumDescriptorProto.prototype.reservedName = $util.emptyArray; - /** - * EnumDescriptorProto visibility. - * @member {google.protobuf.SymbolVisibility} visibility - * @memberof google.protobuf.EnumDescriptorProto - * @instance - */ - EnumDescriptorProto.prototype.visibility = 0; - /** * Creates a new EnumDescriptorProto instance using the specified properties. * @function create @@ -4043,8 +3941,6 @@ if (message.reservedName != null && message.reservedName.length) for (var i = 0; i < message.reservedName.length; ++i) writer.uint32(/* id 5, wireType 2 =*/42).string(message.reservedName[i]); - if (message.visibility != null && Object.hasOwnProperty.call(message, "visibility")) - writer.uint32(/* id 6, wireType 0 =*/48).int32(message.visibility); return writer; }; @@ -4107,10 +4003,6 @@ message.reservedName.push(reader.string()); break; } - case 6: { - message.visibility = reader.int32(); - break; - } default: reader.skipType(tag & 7); break; @@ -4179,15 +4071,6 @@ if (!$util.isString(message.reservedName[i])) return "reservedName: string[] expected"; } - if (message.visibility != null && message.hasOwnProperty("visibility")) - switch (message.visibility) { - default: - return "visibility: enum value expected"; - case 0: - case 1: - case 2: - break; - } return null; }; @@ -4237,26 +4120,6 @@ for (var i = 0; i < object.reservedName.length; ++i) message.reservedName[i] = String(object.reservedName[i]); } - switch (object.visibility) { - default: - if (typeof object.visibility === "number") { - message.visibility = object.visibility; - break; - } - break; - case "VISIBILITY_UNSET": - case 0: - message.visibility = 0; - break; - case "VISIBILITY_LOCAL": - case 1: - message.visibility = 1; - break; - case "VISIBILITY_EXPORT": - case 2: - message.visibility = 2; - break; - } return message; }; @@ -4281,7 +4144,6 @@ if (options.defaults) { object.name = ""; object.options = null; - object.visibility = options.enums === String ? "VISIBILITY_UNSET" : 0; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -4302,8 +4164,6 @@ for (var j = 0; j < message.reservedName.length; ++j) object.reservedName[j] = message.reservedName[j]; } - if (message.visibility != null && message.hasOwnProperty("visibility")) - object.visibility = options.enums === String ? $root.google.protobuf.SymbolVisibility[message.visibility] === undefined ? message.visibility : $root.google.protobuf.SymbolVisibility[message.visibility] : message.visibility; return object; }; @@ -6622,7 +6482,6 @@ * @property {Array.|null} [targets] FieldOptions targets * @property {Array.|null} [editionDefaults] FieldOptions editionDefaults * @property {google.protobuf.IFeatureSet|null} [features] FieldOptions features - * @property {google.protobuf.FieldOptions.IFeatureSupport|null} [featureSupport] FieldOptions featureSupport * @property {Array.|null} [uninterpretedOption] FieldOptions uninterpretedOption * @property {Array.|null} [".google.api.fieldBehavior"] FieldOptions .google.api.fieldBehavior * @property {google.api.IResourceReference|null} [".google.api.resourceReference"] FieldOptions .google.api.resourceReference @@ -6743,14 +6602,6 @@ */ FieldOptions.prototype.features = null; - /** - * FieldOptions featureSupport. - * @member {google.protobuf.FieldOptions.IFeatureSupport|null|undefined} featureSupport - * @memberof google.protobuf.FieldOptions - * @instance - */ - FieldOptions.prototype.featureSupport = null; - /** * FieldOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -6825,8 +6676,6 @@ $root.google.protobuf.FieldOptions.EditionDefault.encode(message.editionDefaults[i], writer.uint32(/* id 20, wireType 2 =*/162).fork()).ldelim(); if (message.features != null && Object.hasOwnProperty.call(message, "features")) $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 21, wireType 2 =*/170).fork()).ldelim(); - if (message.featureSupport != null && Object.hasOwnProperty.call(message, "featureSupport")) - $root.google.protobuf.FieldOptions.FeatureSupport.encode(message.featureSupport, writer.uint32(/* id 22, wireType 2 =*/178).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -6928,10 +6777,6 @@ message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); break; } - case 22: { - message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.decode(reader, reader.uint32()); - break; - } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -7067,11 +6912,6 @@ if (error) return "features." + error; } - if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) { - var error = $root.google.protobuf.FieldOptions.FeatureSupport.verify(message.featureSupport); - if (error) - return "featureSupport." + error; - } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -7260,11 +7100,6 @@ throw TypeError(".google.protobuf.FieldOptions.features: object expected"); message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); } - if (object.featureSupport != null) { - if (typeof object.featureSupport !== "object") - throw TypeError(".google.protobuf.FieldOptions.featureSupport: object expected"); - message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.fromObject(object.featureSupport); - } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.FieldOptions.uninterpretedOption: array expected"); @@ -7362,7 +7197,6 @@ object.debugRedact = false; object.retention = options.enums === String ? "RETENTION_UNKNOWN" : 0; object.features = null; - object.featureSupport = null; object[".google.api.resourceReference"] = null; } if (message.ctype != null && message.hasOwnProperty("ctype")) @@ -7395,8 +7229,6 @@ } if (message.features != null && message.hasOwnProperty("features")) object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); - if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) - object.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.toObject(message.featureSupport, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -7669,7 +7501,6 @@ default: return "edition: enum value expected"; case 0: - case 900: case 998: case 999: case 1000: @@ -7711,10 +7542,6 @@ case 0: message.edition = 0; break; - case "EDITION_LEGACY": - case 900: - message.edition = 900; - break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -7814,488 +7641,6 @@ return EditionDefault; })(); - FieldOptions.FeatureSupport = (function() { - - /** - * Properties of a FeatureSupport. - * @memberof google.protobuf.FieldOptions - * @interface IFeatureSupport - * @property {google.protobuf.Edition|null} [editionIntroduced] FeatureSupport editionIntroduced - * @property {google.protobuf.Edition|null} [editionDeprecated] FeatureSupport editionDeprecated - * @property {string|null} [deprecationWarning] FeatureSupport deprecationWarning - * @property {google.protobuf.Edition|null} [editionRemoved] FeatureSupport editionRemoved - */ - - /** - * Constructs a new FeatureSupport. - * @memberof google.protobuf.FieldOptions - * @classdesc Represents a FeatureSupport. - * @implements IFeatureSupport - * @constructor - * @param {google.protobuf.FieldOptions.IFeatureSupport=} [properties] Properties to set - */ - function FeatureSupport(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * FeatureSupport editionIntroduced. - * @member {google.protobuf.Edition} editionIntroduced - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @instance - */ - FeatureSupport.prototype.editionIntroduced = 0; - - /** - * FeatureSupport editionDeprecated. - * @member {google.protobuf.Edition} editionDeprecated - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @instance - */ - FeatureSupport.prototype.editionDeprecated = 0; - - /** - * FeatureSupport deprecationWarning. - * @member {string} deprecationWarning - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @instance - */ - FeatureSupport.prototype.deprecationWarning = ""; - - /** - * FeatureSupport editionRemoved. - * @member {google.protobuf.Edition} editionRemoved - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @instance - */ - FeatureSupport.prototype.editionRemoved = 0; - - /** - * Creates a new FeatureSupport instance using the specified properties. - * @function create - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @static - * @param {google.protobuf.FieldOptions.IFeatureSupport=} [properties] Properties to set - * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport instance - */ - FeatureSupport.create = function create(properties) { - return new FeatureSupport(properties); - }; - - /** - * Encodes the specified FeatureSupport message. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. - * @function encode - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @static - * @param {google.protobuf.FieldOptions.IFeatureSupport} message FeatureSupport message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FeatureSupport.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.editionIntroduced != null && Object.hasOwnProperty.call(message, "editionIntroduced")) - writer.uint32(/* id 1, wireType 0 =*/8).int32(message.editionIntroduced); - if (message.editionDeprecated != null && Object.hasOwnProperty.call(message, "editionDeprecated")) - writer.uint32(/* id 2, wireType 0 =*/16).int32(message.editionDeprecated); - if (message.deprecationWarning != null && Object.hasOwnProperty.call(message, "deprecationWarning")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.deprecationWarning); - if (message.editionRemoved != null && Object.hasOwnProperty.call(message, "editionRemoved")) - writer.uint32(/* id 4, wireType 0 =*/32).int32(message.editionRemoved); - return writer; - }; - - /** - * Encodes the specified FeatureSupport message, length delimited. Does not implicitly {@link google.protobuf.FieldOptions.FeatureSupport.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @static - * @param {google.protobuf.FieldOptions.IFeatureSupport} message FeatureSupport message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - FeatureSupport.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a FeatureSupport message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FeatureSupport.decode = function decode(reader, length, error) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FieldOptions.FeatureSupport(); - while (reader.pos < end) { - var tag = reader.uint32(); - if (tag === error) - break; - switch (tag >>> 3) { - case 1: { - message.editionIntroduced = reader.int32(); - break; - } - case 2: { - message.editionDeprecated = reader.int32(); - break; - } - case 3: { - message.deprecationWarning = reader.string(); - break; - } - case 4: { - message.editionRemoved = reader.int32(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a FeatureSupport message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - FeatureSupport.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a FeatureSupport message. - * @function verify - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - FeatureSupport.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.editionIntroduced != null && message.hasOwnProperty("editionIntroduced")) - switch (message.editionIntroduced) { - default: - return "editionIntroduced: enum value expected"; - case 0: - case 900: - case 998: - case 999: - case 1000: - case 1001: - case 1: - case 2: - case 99997: - case 99998: - case 99999: - case 2147483647: - break; - } - if (message.editionDeprecated != null && message.hasOwnProperty("editionDeprecated")) - switch (message.editionDeprecated) { - default: - return "editionDeprecated: enum value expected"; - case 0: - case 900: - case 998: - case 999: - case 1000: - case 1001: - case 1: - case 2: - case 99997: - case 99998: - case 99999: - case 2147483647: - break; - } - if (message.deprecationWarning != null && message.hasOwnProperty("deprecationWarning")) - if (!$util.isString(message.deprecationWarning)) - return "deprecationWarning: string expected"; - if (message.editionRemoved != null && message.hasOwnProperty("editionRemoved")) - switch (message.editionRemoved) { - default: - return "editionRemoved: enum value expected"; - case 0: - case 900: - case 998: - case 999: - case 1000: - case 1001: - case 1: - case 2: - case 99997: - case 99998: - case 99999: - case 2147483647: - break; - } - return null; - }; - - /** - * Creates a FeatureSupport message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.FieldOptions.FeatureSupport} FeatureSupport - */ - FeatureSupport.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FieldOptions.FeatureSupport) - return object; - var message = new $root.google.protobuf.FieldOptions.FeatureSupport(); - switch (object.editionIntroduced) { - default: - if (typeof object.editionIntroduced === "number") { - message.editionIntroduced = object.editionIntroduced; - break; - } - break; - case "EDITION_UNKNOWN": - case 0: - message.editionIntroduced = 0; - break; - case "EDITION_LEGACY": - case 900: - message.editionIntroduced = 900; - break; - case "EDITION_PROTO2": - case 998: - message.editionIntroduced = 998; - break; - case "EDITION_PROTO3": - case 999: - message.editionIntroduced = 999; - break; - case "EDITION_2023": - case 1000: - message.editionIntroduced = 1000; - break; - case "EDITION_2024": - case 1001: - message.editionIntroduced = 1001; - break; - case "EDITION_1_TEST_ONLY": - case 1: - message.editionIntroduced = 1; - break; - case "EDITION_2_TEST_ONLY": - case 2: - message.editionIntroduced = 2; - break; - case "EDITION_99997_TEST_ONLY": - case 99997: - message.editionIntroduced = 99997; - break; - case "EDITION_99998_TEST_ONLY": - case 99998: - message.editionIntroduced = 99998; - break; - case "EDITION_99999_TEST_ONLY": - case 99999: - message.editionIntroduced = 99999; - break; - case "EDITION_MAX": - case 2147483647: - message.editionIntroduced = 2147483647; - break; - } - switch (object.editionDeprecated) { - default: - if (typeof object.editionDeprecated === "number") { - message.editionDeprecated = object.editionDeprecated; - break; - } - break; - case "EDITION_UNKNOWN": - case 0: - message.editionDeprecated = 0; - break; - case "EDITION_LEGACY": - case 900: - message.editionDeprecated = 900; - break; - case "EDITION_PROTO2": - case 998: - message.editionDeprecated = 998; - break; - case "EDITION_PROTO3": - case 999: - message.editionDeprecated = 999; - break; - case "EDITION_2023": - case 1000: - message.editionDeprecated = 1000; - break; - case "EDITION_2024": - case 1001: - message.editionDeprecated = 1001; - break; - case "EDITION_1_TEST_ONLY": - case 1: - message.editionDeprecated = 1; - break; - case "EDITION_2_TEST_ONLY": - case 2: - message.editionDeprecated = 2; - break; - case "EDITION_99997_TEST_ONLY": - case 99997: - message.editionDeprecated = 99997; - break; - case "EDITION_99998_TEST_ONLY": - case 99998: - message.editionDeprecated = 99998; - break; - case "EDITION_99999_TEST_ONLY": - case 99999: - message.editionDeprecated = 99999; - break; - case "EDITION_MAX": - case 2147483647: - message.editionDeprecated = 2147483647; - break; - } - if (object.deprecationWarning != null) - message.deprecationWarning = String(object.deprecationWarning); - switch (object.editionRemoved) { - default: - if (typeof object.editionRemoved === "number") { - message.editionRemoved = object.editionRemoved; - break; - } - break; - case "EDITION_UNKNOWN": - case 0: - message.editionRemoved = 0; - break; - case "EDITION_LEGACY": - case 900: - message.editionRemoved = 900; - break; - case "EDITION_PROTO2": - case 998: - message.editionRemoved = 998; - break; - case "EDITION_PROTO3": - case 999: - message.editionRemoved = 999; - break; - case "EDITION_2023": - case 1000: - message.editionRemoved = 1000; - break; - case "EDITION_2024": - case 1001: - message.editionRemoved = 1001; - break; - case "EDITION_1_TEST_ONLY": - case 1: - message.editionRemoved = 1; - break; - case "EDITION_2_TEST_ONLY": - case 2: - message.editionRemoved = 2; - break; - case "EDITION_99997_TEST_ONLY": - case 99997: - message.editionRemoved = 99997; - break; - case "EDITION_99998_TEST_ONLY": - case 99998: - message.editionRemoved = 99998; - break; - case "EDITION_99999_TEST_ONLY": - case 99999: - message.editionRemoved = 99999; - break; - case "EDITION_MAX": - case 2147483647: - message.editionRemoved = 2147483647; - break; - } - return message; - }; - - /** - * Creates a plain object from a FeatureSupport message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @static - * @param {google.protobuf.FieldOptions.FeatureSupport} message FeatureSupport - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - FeatureSupport.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.editionIntroduced = options.enums === String ? "EDITION_UNKNOWN" : 0; - object.editionDeprecated = options.enums === String ? "EDITION_UNKNOWN" : 0; - object.deprecationWarning = ""; - object.editionRemoved = options.enums === String ? "EDITION_UNKNOWN" : 0; - } - if (message.editionIntroduced != null && message.hasOwnProperty("editionIntroduced")) - object.editionIntroduced = options.enums === String ? $root.google.protobuf.Edition[message.editionIntroduced] === undefined ? message.editionIntroduced : $root.google.protobuf.Edition[message.editionIntroduced] : message.editionIntroduced; - if (message.editionDeprecated != null && message.hasOwnProperty("editionDeprecated")) - object.editionDeprecated = options.enums === String ? $root.google.protobuf.Edition[message.editionDeprecated] === undefined ? message.editionDeprecated : $root.google.protobuf.Edition[message.editionDeprecated] : message.editionDeprecated; - if (message.deprecationWarning != null && message.hasOwnProperty("deprecationWarning")) - object.deprecationWarning = message.deprecationWarning; - if (message.editionRemoved != null && message.hasOwnProperty("editionRemoved")) - object.editionRemoved = options.enums === String ? $root.google.protobuf.Edition[message.editionRemoved] === undefined ? message.editionRemoved : $root.google.protobuf.Edition[message.editionRemoved] : message.editionRemoved; - return object; - }; - - /** - * Converts this FeatureSupport to JSON. - * @function toJSON - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @instance - * @returns {Object.} JSON object - */ - FeatureSupport.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for FeatureSupport - * @function getTypeUrl - * @memberof google.protobuf.FieldOptions.FeatureSupport - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - FeatureSupport.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.FieldOptions.FeatureSupport"; - }; - - return FeatureSupport; - })(); - return FieldOptions; })(); @@ -8888,7 +8233,6 @@ * @property {boolean|null} [deprecated] EnumValueOptions deprecated * @property {google.protobuf.IFeatureSet|null} [features] EnumValueOptions features * @property {boolean|null} [debugRedact] EnumValueOptions debugRedact - * @property {google.protobuf.FieldOptions.IFeatureSupport|null} [featureSupport] EnumValueOptions featureSupport * @property {Array.|null} [uninterpretedOption] EnumValueOptions uninterpretedOption */ @@ -8932,14 +8276,6 @@ */ EnumValueOptions.prototype.debugRedact = false; - /** - * EnumValueOptions featureSupport. - * @member {google.protobuf.FieldOptions.IFeatureSupport|null|undefined} featureSupport - * @memberof google.protobuf.EnumValueOptions - * @instance - */ - EnumValueOptions.prototype.featureSupport = null; - /** * EnumValueOptions uninterpretedOption. * @member {Array.} uninterpretedOption @@ -8978,8 +8314,6 @@ $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.debugRedact != null && Object.hasOwnProperty.call(message, "debugRedact")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.debugRedact); - if (message.featureSupport != null && Object.hasOwnProperty.call(message, "featureSupport")) - $root.google.protobuf.FieldOptions.FeatureSupport.encode(message.featureSupport, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); @@ -9031,10 +8365,6 @@ message.debugRedact = reader.bool(); break; } - case 4: { - message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.decode(reader, reader.uint32()); - break; - } case 999: { if (!(message.uninterpretedOption && message.uninterpretedOption.length)) message.uninterpretedOption = []; @@ -9087,11 +8417,6 @@ if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) if (typeof message.debugRedact !== "boolean") return "debugRedact: boolean expected"; - if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) { - var error = $root.google.protobuf.FieldOptions.FeatureSupport.verify(message.featureSupport); - if (error) - return "featureSupport." + error; - } if (message.uninterpretedOption != null && message.hasOwnProperty("uninterpretedOption")) { if (!Array.isArray(message.uninterpretedOption)) return "uninterpretedOption: array expected"; @@ -9125,11 +8450,6 @@ } if (object.debugRedact != null) message.debugRedact = Boolean(object.debugRedact); - if (object.featureSupport != null) { - if (typeof object.featureSupport !== "object") - throw TypeError(".google.protobuf.EnumValueOptions.featureSupport: object expected"); - message.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.fromObject(object.featureSupport); - } if (object.uninterpretedOption) { if (!Array.isArray(object.uninterpretedOption)) throw TypeError(".google.protobuf.EnumValueOptions.uninterpretedOption: array expected"); @@ -9162,7 +8482,6 @@ object.deprecated = false; object.features = null; object.debugRedact = false; - object.featureSupport = null; } if (message.deprecated != null && message.hasOwnProperty("deprecated")) object.deprecated = message.deprecated; @@ -9170,8 +8489,6 @@ object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.debugRedact != null && message.hasOwnProperty("debugRedact")) object.debugRedact = message.debugRedact; - if (message.featureSupport != null && message.hasOwnProperty("featureSupport")) - object.featureSupport = $root.google.protobuf.FieldOptions.FeatureSupport.toObject(message.featureSupport, options); if (message.uninterpretedOption && message.uninterpretedOption.length) { object.uninterpretedOption = []; for (var j = 0; j < message.uninterpretedOption.length; ++j) @@ -10639,8 +9956,6 @@ * @property {google.protobuf.FeatureSet.Utf8Validation|null} [utf8Validation] FeatureSet utf8Validation * @property {google.protobuf.FeatureSet.MessageEncoding|null} [messageEncoding] FeatureSet messageEncoding * @property {google.protobuf.FeatureSet.JsonFormat|null} [jsonFormat] FeatureSet jsonFormat - * @property {google.protobuf.FeatureSet.EnforceNamingStyle|null} [enforceNamingStyle] FeatureSet enforceNamingStyle - * @property {google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility|null} [defaultSymbolVisibility] FeatureSet defaultSymbolVisibility */ /** @@ -10706,22 +10021,6 @@ */ FeatureSet.prototype.jsonFormat = 0; - /** - * FeatureSet enforceNamingStyle. - * @member {google.protobuf.FeatureSet.EnforceNamingStyle} enforceNamingStyle - * @memberof google.protobuf.FeatureSet - * @instance - */ - FeatureSet.prototype.enforceNamingStyle = 0; - - /** - * FeatureSet defaultSymbolVisibility. - * @member {google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility} defaultSymbolVisibility - * @memberof google.protobuf.FeatureSet - * @instance - */ - FeatureSet.prototype.defaultSymbolVisibility = 0; - /** * Creates a new FeatureSet instance using the specified properties. * @function create @@ -10758,10 +10057,6 @@ writer.uint32(/* id 5, wireType 0 =*/40).int32(message.messageEncoding); if (message.jsonFormat != null && Object.hasOwnProperty.call(message, "jsonFormat")) writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jsonFormat); - if (message.enforceNamingStyle != null && Object.hasOwnProperty.call(message, "enforceNamingStyle")) - writer.uint32(/* id 7, wireType 0 =*/56).int32(message.enforceNamingStyle); - if (message.defaultSymbolVisibility != null && Object.hasOwnProperty.call(message, "defaultSymbolVisibility")) - writer.uint32(/* id 8, wireType 0 =*/64).int32(message.defaultSymbolVisibility); return writer; }; @@ -10822,14 +10117,6 @@ message.jsonFormat = reader.int32(); break; } - case 7: { - message.enforceNamingStyle = reader.int32(); - break; - } - case 8: { - message.defaultSymbolVisibility = reader.int32(); - break; - } default: reader.skipType(tag & 7); break; @@ -10920,26 +10207,6 @@ case 2: break; } - if (message.enforceNamingStyle != null && message.hasOwnProperty("enforceNamingStyle")) - switch (message.enforceNamingStyle) { - default: - return "enforceNamingStyle: enum value expected"; - case 0: - case 1: - case 2: - break; - } - if (message.defaultSymbolVisibility != null && message.hasOwnProperty("defaultSymbolVisibility")) - switch (message.defaultSymbolVisibility) { - default: - return "defaultSymbolVisibility: enum value expected"; - case 0: - case 1: - case 2: - case 3: - case 4: - break; - } return null; }; @@ -11079,54 +10346,6 @@ message.jsonFormat = 2; break; } - switch (object.enforceNamingStyle) { - default: - if (typeof object.enforceNamingStyle === "number") { - message.enforceNamingStyle = object.enforceNamingStyle; - break; - } - break; - case "ENFORCE_NAMING_STYLE_UNKNOWN": - case 0: - message.enforceNamingStyle = 0; - break; - case "STYLE2024": - case 1: - message.enforceNamingStyle = 1; - break; - case "STYLE_LEGACY": - case 2: - message.enforceNamingStyle = 2; - break; - } - switch (object.defaultSymbolVisibility) { - default: - if (typeof object.defaultSymbolVisibility === "number") { - message.defaultSymbolVisibility = object.defaultSymbolVisibility; - break; - } - break; - case "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": - case 0: - message.defaultSymbolVisibility = 0; - break; - case "EXPORT_ALL": - case 1: - message.defaultSymbolVisibility = 1; - break; - case "EXPORT_TOP_LEVEL": - case 2: - message.defaultSymbolVisibility = 2; - break; - case "LOCAL_ALL": - case 3: - message.defaultSymbolVisibility = 3; - break; - case "STRICT": - case 4: - message.defaultSymbolVisibility = 4; - break; - } return message; }; @@ -11150,8 +10369,6 @@ object.utf8Validation = options.enums === String ? "UTF8_VALIDATION_UNKNOWN" : 0; object.messageEncoding = options.enums === String ? "MESSAGE_ENCODING_UNKNOWN" : 0; object.jsonFormat = options.enums === String ? "JSON_FORMAT_UNKNOWN" : 0; - object.enforceNamingStyle = options.enums === String ? "ENFORCE_NAMING_STYLE_UNKNOWN" : 0; - object.defaultSymbolVisibility = options.enums === String ? "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN" : 0; } if (message.fieldPresence != null && message.hasOwnProperty("fieldPresence")) object.fieldPresence = options.enums === String ? $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] === undefined ? message.fieldPresence : $root.google.protobuf.FeatureSet.FieldPresence[message.fieldPresence] : message.fieldPresence; @@ -11165,10 +10382,6 @@ object.messageEncoding = options.enums === String ? $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] === undefined ? message.messageEncoding : $root.google.protobuf.FeatureSet.MessageEncoding[message.messageEncoding] : message.messageEncoding; if (message.jsonFormat != null && message.hasOwnProperty("jsonFormat")) object.jsonFormat = options.enums === String ? $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] === undefined ? message.jsonFormat : $root.google.protobuf.FeatureSet.JsonFormat[message.jsonFormat] : message.jsonFormat; - if (message.enforceNamingStyle != null && message.hasOwnProperty("enforceNamingStyle")) - object.enforceNamingStyle = options.enums === String ? $root.google.protobuf.FeatureSet.EnforceNamingStyle[message.enforceNamingStyle] === undefined ? message.enforceNamingStyle : $root.google.protobuf.FeatureSet.EnforceNamingStyle[message.enforceNamingStyle] : message.enforceNamingStyle; - if (message.defaultSymbolVisibility != null && message.hasOwnProperty("defaultSymbolVisibility")) - object.defaultSymbolVisibility = options.enums === String ? $root.google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility[message.defaultSymbolVisibility] === undefined ? message.defaultSymbolVisibility : $root.google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility[message.defaultSymbolVisibility] : message.defaultSymbolVisibility; return object; }; @@ -11296,219 +10509,6 @@ return values; })(); - /** - * EnforceNamingStyle enum. - * @name google.protobuf.FeatureSet.EnforceNamingStyle - * @enum {number} - * @property {number} ENFORCE_NAMING_STYLE_UNKNOWN=0 ENFORCE_NAMING_STYLE_UNKNOWN value - * @property {number} STYLE2024=1 STYLE2024 value - * @property {number} STYLE_LEGACY=2 STYLE_LEGACY value - */ - FeatureSet.EnforceNamingStyle = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "ENFORCE_NAMING_STYLE_UNKNOWN"] = 0; - values[valuesById[1] = "STYLE2024"] = 1; - values[valuesById[2] = "STYLE_LEGACY"] = 2; - return values; - })(); - - FeatureSet.VisibilityFeature = (function() { - - /** - * Properties of a VisibilityFeature. - * @memberof google.protobuf.FeatureSet - * @interface IVisibilityFeature - */ - - /** - * Constructs a new VisibilityFeature. - * @memberof google.protobuf.FeatureSet - * @classdesc Represents a VisibilityFeature. - * @implements IVisibilityFeature - * @constructor - * @param {google.protobuf.FeatureSet.IVisibilityFeature=} [properties] Properties to set - */ - function VisibilityFeature(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Creates a new VisibilityFeature instance using the specified properties. - * @function create - * @memberof google.protobuf.FeatureSet.VisibilityFeature - * @static - * @param {google.protobuf.FeatureSet.IVisibilityFeature=} [properties] Properties to set - * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature instance - */ - VisibilityFeature.create = function create(properties) { - return new VisibilityFeature(properties); - }; - - /** - * Encodes the specified VisibilityFeature message. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. - * @function encode - * @memberof google.protobuf.FeatureSet.VisibilityFeature - * @static - * @param {google.protobuf.FeatureSet.IVisibilityFeature} message VisibilityFeature message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - VisibilityFeature.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - return writer; - }; - - /** - * Encodes the specified VisibilityFeature message, length delimited. Does not implicitly {@link google.protobuf.FeatureSet.VisibilityFeature.verify|verify} messages. - * @function encodeDelimited - * @memberof google.protobuf.FeatureSet.VisibilityFeature - * @static - * @param {google.protobuf.FeatureSet.IVisibilityFeature} message VisibilityFeature message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - VisibilityFeature.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a VisibilityFeature message from the specified reader or buffer. - * @function decode - * @memberof google.protobuf.FeatureSet.VisibilityFeature - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - VisibilityFeature.decode = function decode(reader, length, error) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.protobuf.FeatureSet.VisibilityFeature(); - while (reader.pos < end) { - var tag = reader.uint32(); - if (tag === error) - break; - switch (tag >>> 3) { - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a VisibilityFeature message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.protobuf.FeatureSet.VisibilityFeature - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - VisibilityFeature.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a VisibilityFeature message. - * @function verify - * @memberof google.protobuf.FeatureSet.VisibilityFeature - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - VisibilityFeature.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - return null; - }; - - /** - * Creates a VisibilityFeature message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.protobuf.FeatureSet.VisibilityFeature - * @static - * @param {Object.} object Plain object - * @returns {google.protobuf.FeatureSet.VisibilityFeature} VisibilityFeature - */ - VisibilityFeature.fromObject = function fromObject(object) { - if (object instanceof $root.google.protobuf.FeatureSet.VisibilityFeature) - return object; - return new $root.google.protobuf.FeatureSet.VisibilityFeature(); - }; - - /** - * Creates a plain object from a VisibilityFeature message. Also converts values to other types if specified. - * @function toObject - * @memberof google.protobuf.FeatureSet.VisibilityFeature - * @static - * @param {google.protobuf.FeatureSet.VisibilityFeature} message VisibilityFeature - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - VisibilityFeature.toObject = function toObject() { - return {}; - }; - - /** - * Converts this VisibilityFeature to JSON. - * @function toJSON - * @memberof google.protobuf.FeatureSet.VisibilityFeature - * @instance - * @returns {Object.} JSON object - */ - VisibilityFeature.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for VisibilityFeature - * @function getTypeUrl - * @memberof google.protobuf.FeatureSet.VisibilityFeature - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - VisibilityFeature.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.protobuf.FeatureSet.VisibilityFeature"; - }; - - /** - * DefaultSymbolVisibility enum. - * @name google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility - * @enum {number} - * @property {number} DEFAULT_SYMBOL_VISIBILITY_UNKNOWN=0 DEFAULT_SYMBOL_VISIBILITY_UNKNOWN value - * @property {number} EXPORT_ALL=1 EXPORT_ALL value - * @property {number} EXPORT_TOP_LEVEL=2 EXPORT_TOP_LEVEL value - * @property {number} LOCAL_ALL=3 LOCAL_ALL value - * @property {number} STRICT=4 STRICT value - */ - VisibilityFeature.DefaultSymbolVisibility = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN"] = 0; - values[valuesById[1] = "EXPORT_ALL"] = 1; - values[valuesById[2] = "EXPORT_TOP_LEVEL"] = 2; - values[valuesById[3] = "LOCAL_ALL"] = 3; - values[valuesById[4] = "STRICT"] = 4; - return values; - })(); - - return VisibilityFeature; - })(); - return FeatureSet; })(); @@ -11693,7 +10693,6 @@ default: return "minimumEdition: enum value expected"; case 0: - case 900: case 998: case 999: case 1000: @@ -11711,7 +10710,6 @@ default: return "maximumEdition: enum value expected"; case 0: - case 900: case 998: case 999: case 1000: @@ -11760,10 +10758,6 @@ case 0: message.minimumEdition = 0; break; - case "EDITION_LEGACY": - case 900: - message.minimumEdition = 900; - break; case "EDITION_PROTO2": case 998: message.minimumEdition = 998; @@ -11816,10 +10810,6 @@ case 0: message.maximumEdition = 0; break; - case "EDITION_LEGACY": - case 900: - message.maximumEdition = 900; - break; case "EDITION_PROTO2": case 998: message.maximumEdition = 998; @@ -11928,8 +10918,7 @@ * @memberof google.protobuf.FeatureSetDefaults * @interface IFeatureSetEditionDefault * @property {google.protobuf.Edition|null} [edition] FeatureSetEditionDefault edition - * @property {google.protobuf.IFeatureSet|null} [overridableFeatures] FeatureSetEditionDefault overridableFeatures - * @property {google.protobuf.IFeatureSet|null} [fixedFeatures] FeatureSetEditionDefault fixedFeatures + * @property {google.protobuf.IFeatureSet|null} [features] FeatureSetEditionDefault features */ /** @@ -11956,20 +10945,12 @@ FeatureSetEditionDefault.prototype.edition = 0; /** - * FeatureSetEditionDefault overridableFeatures. - * @member {google.protobuf.IFeatureSet|null|undefined} overridableFeatures + * FeatureSetEditionDefault features. + * @member {google.protobuf.IFeatureSet|null|undefined} features * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault * @instance */ - FeatureSetEditionDefault.prototype.overridableFeatures = null; - - /** - * FeatureSetEditionDefault fixedFeatures. - * @member {google.protobuf.IFeatureSet|null|undefined} fixedFeatures - * @memberof google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault - * @instance - */ - FeatureSetEditionDefault.prototype.fixedFeatures = null; + FeatureSetEditionDefault.prototype.features = null; /** * Creates a new FeatureSetEditionDefault instance using the specified properties. @@ -11995,12 +10976,10 @@ FeatureSetEditionDefault.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); + if (message.features != null && Object.hasOwnProperty.call(message, "features")) + $root.google.protobuf.FeatureSet.encode(message.features, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.edition != null && Object.hasOwnProperty.call(message, "edition")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.edition); - if (message.overridableFeatures != null && Object.hasOwnProperty.call(message, "overridableFeatures")) - $root.google.protobuf.FeatureSet.encode(message.overridableFeatures, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.fixedFeatures != null && Object.hasOwnProperty.call(message, "fixedFeatures")) - $root.google.protobuf.FeatureSet.encode(message.fixedFeatures, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); return writer; }; @@ -12041,12 +11020,8 @@ message.edition = reader.int32(); break; } - case 4: { - message.overridableFeatures = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); - break; - } - case 5: { - message.fixedFeatures = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); + case 2: { + message.features = $root.google.protobuf.FeatureSet.decode(reader, reader.uint32()); break; } default: @@ -12089,7 +11064,6 @@ default: return "edition: enum value expected"; case 0: - case 900: case 998: case 999: case 1000: @@ -12102,15 +11076,10 @@ case 2147483647: break; } - if (message.overridableFeatures != null && message.hasOwnProperty("overridableFeatures")) { - var error = $root.google.protobuf.FeatureSet.verify(message.overridableFeatures); - if (error) - return "overridableFeatures." + error; - } - if (message.fixedFeatures != null && message.hasOwnProperty("fixedFeatures")) { - var error = $root.google.protobuf.FeatureSet.verify(message.fixedFeatures); + if (message.features != null && message.hasOwnProperty("features")) { + var error = $root.google.protobuf.FeatureSet.verify(message.features); if (error) - return "fixedFeatures." + error; + return "features." + error; } return null; }; @@ -12138,10 +11107,6 @@ case 0: message.edition = 0; break; - case "EDITION_LEGACY": - case 900: - message.edition = 900; - break; case "EDITION_PROTO2": case 998: message.edition = 998; @@ -12183,15 +11148,10 @@ message.edition = 2147483647; break; } - if (object.overridableFeatures != null) { - if (typeof object.overridableFeatures !== "object") - throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.overridableFeatures: object expected"); - message.overridableFeatures = $root.google.protobuf.FeatureSet.fromObject(object.overridableFeatures); - } - if (object.fixedFeatures != null) { - if (typeof object.fixedFeatures !== "object") - throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.fixedFeatures: object expected"); - message.fixedFeatures = $root.google.protobuf.FeatureSet.fromObject(object.fixedFeatures); + if (object.features != null) { + if (typeof object.features !== "object") + throw TypeError(".google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.features: object expected"); + message.features = $root.google.protobuf.FeatureSet.fromObject(object.features); } return message; }; @@ -12210,16 +11170,13 @@ options = {}; var object = {}; if (options.defaults) { + object.features = null; object.edition = options.enums === String ? "EDITION_UNKNOWN" : 0; - object.overridableFeatures = null; - object.fixedFeatures = null; } + if (message.features != null && message.hasOwnProperty("features")) + object.features = $root.google.protobuf.FeatureSet.toObject(message.features, options); if (message.edition != null && message.hasOwnProperty("edition")) object.edition = options.enums === String ? $root.google.protobuf.Edition[message.edition] === undefined ? message.edition : $root.google.protobuf.Edition[message.edition] : message.edition; - if (message.overridableFeatures != null && message.hasOwnProperty("overridableFeatures")) - object.overridableFeatures = $root.google.protobuf.FeatureSet.toObject(message.overridableFeatures, options); - if (message.fixedFeatures != null && message.hasOwnProperty("fixedFeatures")) - object.fixedFeatures = $root.google.protobuf.FeatureSet.toObject(message.fixedFeatures, options); return object; }; @@ -13434,22 +12391,6 @@ return GeneratedCodeInfo; })(); - /** - * SymbolVisibility enum. - * @name google.protobuf.SymbolVisibility - * @enum {number} - * @property {number} VISIBILITY_UNSET=0 VISIBILITY_UNSET value - * @property {number} VISIBILITY_LOCAL=1 VISIBILITY_LOCAL value - * @property {number} VISIBILITY_EXPORT=2 VISIBILITY_EXPORT value - */ - protobuf.SymbolVisibility = (function() { - var valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "VISIBILITY_UNSET"] = 0; - values[valuesById[1] = "VISIBILITY_LOCAL"] = 1; - values[valuesById[2] = "VISIBILITY_EXPORT"] = 2; - return values; - })(); - protobuf.Any = (function() { /** @@ -16194,12 +15135,6 @@ * @interface IViolation * @property {string|null} [subject] Violation subject * @property {string|null} [description] Violation description - * @property {string|null} [apiService] Violation apiService - * @property {string|null} [quotaMetric] Violation quotaMetric - * @property {string|null} [quotaId] Violation quotaId - * @property {Object.|null} [quotaDimensions] Violation quotaDimensions - * @property {number|Long|null} [quotaValue] Violation quotaValue - * @property {number|Long|null} [futureQuotaValue] Violation futureQuotaValue */ /** @@ -16211,7 +15146,6 @@ * @param {google.rpc.QuotaFailure.IViolation=} [properties] Properties to set */ function Violation(properties) { - this.quotaDimensions = {}; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -16234,63 +15168,6 @@ */ Violation.prototype.description = ""; - /** - * Violation apiService. - * @member {string} apiService - * @memberof google.rpc.QuotaFailure.Violation - * @instance - */ - Violation.prototype.apiService = ""; - - /** - * Violation quotaMetric. - * @member {string} quotaMetric - * @memberof google.rpc.QuotaFailure.Violation - * @instance - */ - Violation.prototype.quotaMetric = ""; - - /** - * Violation quotaId. - * @member {string} quotaId - * @memberof google.rpc.QuotaFailure.Violation - * @instance - */ - Violation.prototype.quotaId = ""; - - /** - * Violation quotaDimensions. - * @member {Object.} quotaDimensions - * @memberof google.rpc.QuotaFailure.Violation - * @instance - */ - Violation.prototype.quotaDimensions = $util.emptyObject; - - /** - * Violation quotaValue. - * @member {number|Long} quotaValue - * @memberof google.rpc.QuotaFailure.Violation - * @instance - */ - Violation.prototype.quotaValue = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * Violation futureQuotaValue. - * @member {number|Long|null|undefined} futureQuotaValue - * @memberof google.rpc.QuotaFailure.Violation - * @instance - */ - Violation.prototype.futureQuotaValue = null; - - // OneOf field names bound to virtual getters and setters - var $oneOfFields; - - // Virtual OneOf for proto3 optional field - Object.defineProperty(Violation.prototype, "_futureQuotaValue", { - get: $util.oneOfGetter($oneOfFields = ["futureQuotaValue"]), - set: $util.oneOfSetter($oneOfFields) - }); - /** * Creates a new Violation instance using the specified properties. * @function create @@ -16319,19 +15196,6 @@ writer.uint32(/* id 1, wireType 2 =*/10).string(message.subject); if (message.description != null && Object.hasOwnProperty.call(message, "description")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.description); - if (message.apiService != null && Object.hasOwnProperty.call(message, "apiService")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.apiService); - if (message.quotaMetric != null && Object.hasOwnProperty.call(message, "quotaMetric")) - writer.uint32(/* id 4, wireType 2 =*/34).string(message.quotaMetric); - if (message.quotaId != null && Object.hasOwnProperty.call(message, "quotaId")) - writer.uint32(/* id 5, wireType 2 =*/42).string(message.quotaId); - if (message.quotaDimensions != null && Object.hasOwnProperty.call(message, "quotaDimensions")) - for (var keys = Object.keys(message.quotaDimensions), i = 0; i < keys.length; ++i) - writer.uint32(/* id 6, wireType 2 =*/50).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.quotaDimensions[keys[i]]).ldelim(); - if (message.quotaValue != null && Object.hasOwnProperty.call(message, "quotaValue")) - writer.uint32(/* id 7, wireType 0 =*/56).int64(message.quotaValue); - if (message.futureQuotaValue != null && Object.hasOwnProperty.call(message, "futureQuotaValue")) - writer.uint32(/* id 8, wireType 0 =*/64).int64(message.futureQuotaValue); return writer; }; @@ -16362,7 +15226,7 @@ Violation.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.rpc.QuotaFailure.Violation(), key, value; + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.rpc.QuotaFailure.Violation(); while (reader.pos < end) { var tag = reader.uint32(); if (tag === error) @@ -16376,49 +15240,6 @@ message.description = reader.string(); break; } - case 3: { - message.apiService = reader.string(); - break; - } - case 4: { - message.quotaMetric = reader.string(); - break; - } - case 5: { - message.quotaId = reader.string(); - break; - } - case 6: { - if (message.quotaDimensions === $util.emptyObject) - message.quotaDimensions = {}; - var end2 = reader.uint32() + reader.pos; - key = ""; - value = ""; - while (reader.pos < end2) { - var tag2 = reader.uint32(); - switch (tag2 >>> 3) { - case 1: - key = reader.string(); - break; - case 2: - value = reader.string(); - break; - default: - reader.skipType(tag2 & 7); - break; - } - } - message.quotaDimensions[key] = value; - break; - } - case 7: { - message.quotaValue = reader.int64(); - break; - } - case 8: { - message.futureQuotaValue = reader.int64(); - break; - } default: reader.skipType(tag & 7); break; @@ -16454,38 +15275,12 @@ Violation.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - var properties = {}; if (message.subject != null && message.hasOwnProperty("subject")) if (!$util.isString(message.subject)) return "subject: string expected"; if (message.description != null && message.hasOwnProperty("description")) if (!$util.isString(message.description)) return "description: string expected"; - if (message.apiService != null && message.hasOwnProperty("apiService")) - if (!$util.isString(message.apiService)) - return "apiService: string expected"; - if (message.quotaMetric != null && message.hasOwnProperty("quotaMetric")) - if (!$util.isString(message.quotaMetric)) - return "quotaMetric: string expected"; - if (message.quotaId != null && message.hasOwnProperty("quotaId")) - if (!$util.isString(message.quotaId)) - return "quotaId: string expected"; - if (message.quotaDimensions != null && message.hasOwnProperty("quotaDimensions")) { - if (!$util.isObject(message.quotaDimensions)) - return "quotaDimensions: object expected"; - var key = Object.keys(message.quotaDimensions); - for (var i = 0; i < key.length; ++i) - if (!$util.isString(message.quotaDimensions[key[i]])) - return "quotaDimensions: string{k:string} expected"; - } - if (message.quotaValue != null && message.hasOwnProperty("quotaValue")) - if (!$util.isInteger(message.quotaValue) && !(message.quotaValue && $util.isInteger(message.quotaValue.low) && $util.isInteger(message.quotaValue.high))) - return "quotaValue: integer|Long expected"; - if (message.futureQuotaValue != null && message.hasOwnProperty("futureQuotaValue")) { - properties._futureQuotaValue = 1; - if (!$util.isInteger(message.futureQuotaValue) && !(message.futureQuotaValue && $util.isInteger(message.futureQuotaValue.low) && $util.isInteger(message.futureQuotaValue.high))) - return "futureQuotaValue: integer|Long expected"; - } return null; }; @@ -16505,37 +15300,6 @@ message.subject = String(object.subject); if (object.description != null) message.description = String(object.description); - if (object.apiService != null) - message.apiService = String(object.apiService); - if (object.quotaMetric != null) - message.quotaMetric = String(object.quotaMetric); - if (object.quotaId != null) - message.quotaId = String(object.quotaId); - if (object.quotaDimensions) { - if (typeof object.quotaDimensions !== "object") - throw TypeError(".google.rpc.QuotaFailure.Violation.quotaDimensions: object expected"); - message.quotaDimensions = {}; - for (var keys = Object.keys(object.quotaDimensions), i = 0; i < keys.length; ++i) - message.quotaDimensions[keys[i]] = String(object.quotaDimensions[keys[i]]); - } - if (object.quotaValue != null) - if ($util.Long) - (message.quotaValue = $util.Long.fromValue(object.quotaValue)).unsigned = false; - else if (typeof object.quotaValue === "string") - message.quotaValue = parseInt(object.quotaValue, 10); - else if (typeof object.quotaValue === "number") - message.quotaValue = object.quotaValue; - else if (typeof object.quotaValue === "object") - message.quotaValue = new $util.LongBits(object.quotaValue.low >>> 0, object.quotaValue.high >>> 0).toNumber(); - if (object.futureQuotaValue != null) - if ($util.Long) - (message.futureQuotaValue = $util.Long.fromValue(object.futureQuotaValue)).unsigned = false; - else if (typeof object.futureQuotaValue === "string") - message.futureQuotaValue = parseInt(object.futureQuotaValue, 10); - else if (typeof object.futureQuotaValue === "number") - message.futureQuotaValue = object.futureQuotaValue; - else if (typeof object.futureQuotaValue === "object") - message.futureQuotaValue = new $util.LongBits(object.futureQuotaValue.low >>> 0, object.futureQuotaValue.high >>> 0).toNumber(); return message; }; @@ -16552,49 +15316,14 @@ if (!options) options = {}; var object = {}; - if (options.objects || options.defaults) - object.quotaDimensions = {}; if (options.defaults) { object.subject = ""; object.description = ""; - object.apiService = ""; - object.quotaMetric = ""; - object.quotaId = ""; - if ($util.Long) { - var long = new $util.Long(0, 0, false); - object.quotaValue = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long; - } else - object.quotaValue = options.longs === String ? "0" : 0; } if (message.subject != null && message.hasOwnProperty("subject")) object.subject = message.subject; if (message.description != null && message.hasOwnProperty("description")) object.description = message.description; - if (message.apiService != null && message.hasOwnProperty("apiService")) - object.apiService = message.apiService; - if (message.quotaMetric != null && message.hasOwnProperty("quotaMetric")) - object.quotaMetric = message.quotaMetric; - if (message.quotaId != null && message.hasOwnProperty("quotaId")) - object.quotaId = message.quotaId; - var keys2; - if (message.quotaDimensions && (keys2 = Object.keys(message.quotaDimensions)).length) { - object.quotaDimensions = {}; - for (var j = 0; j < keys2.length; ++j) - object.quotaDimensions[keys2[j]] = message.quotaDimensions[keys2[j]]; - } - if (message.quotaValue != null && message.hasOwnProperty("quotaValue")) - if (typeof message.quotaValue === "number") - object.quotaValue = options.longs === String ? String(message.quotaValue) : message.quotaValue; - else - object.quotaValue = options.longs === String ? $util.Long.prototype.toString.call(message.quotaValue) : options.longs === Number ? new $util.LongBits(message.quotaValue.low >>> 0, message.quotaValue.high >>> 0).toNumber() : message.quotaValue; - if (message.futureQuotaValue != null && message.hasOwnProperty("futureQuotaValue")) { - if (typeof message.futureQuotaValue === "number") - object.futureQuotaValue = options.longs === String ? String(message.futureQuotaValue) : message.futureQuotaValue; - else - object.futureQuotaValue = options.longs === String ? $util.Long.prototype.toString.call(message.futureQuotaValue) : options.longs === Number ? new $util.LongBits(message.futureQuotaValue.low >>> 0, message.futureQuotaValue.high >>> 0).toNumber() : message.futureQuotaValue; - if (options.oneofs) - object._futureQuotaValue = "futureQuotaValue"; - } return object; }; @@ -17339,8 +16068,6 @@ * @interface IFieldViolation * @property {string|null} [field] FieldViolation field * @property {string|null} [description] FieldViolation description - * @property {string|null} [reason] FieldViolation reason - * @property {google.rpc.ILocalizedMessage|null} [localizedMessage] FieldViolation localizedMessage */ /** @@ -17374,22 +16101,6 @@ */ FieldViolation.prototype.description = ""; - /** - * FieldViolation reason. - * @member {string} reason - * @memberof google.rpc.BadRequest.FieldViolation - * @instance - */ - FieldViolation.prototype.reason = ""; - - /** - * FieldViolation localizedMessage. - * @member {google.rpc.ILocalizedMessage|null|undefined} localizedMessage - * @memberof google.rpc.BadRequest.FieldViolation - * @instance - */ - FieldViolation.prototype.localizedMessage = null; - /** * Creates a new FieldViolation instance using the specified properties. * @function create @@ -17418,10 +16129,6 @@ writer.uint32(/* id 1, wireType 2 =*/10).string(message.field); if (message.description != null && Object.hasOwnProperty.call(message, "description")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.description); - if (message.reason != null && Object.hasOwnProperty.call(message, "reason")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.reason); - if (message.localizedMessage != null && Object.hasOwnProperty.call(message, "localizedMessage")) - $root.google.rpc.LocalizedMessage.encode(message.localizedMessage, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); return writer; }; @@ -17466,14 +16173,6 @@ message.description = reader.string(); break; } - case 3: { - message.reason = reader.string(); - break; - } - case 4: { - message.localizedMessage = $root.google.rpc.LocalizedMessage.decode(reader, reader.uint32()); - break; - } default: reader.skipType(tag & 7); break; @@ -17515,14 +16214,6 @@ if (message.description != null && message.hasOwnProperty("description")) if (!$util.isString(message.description)) return "description: string expected"; - if (message.reason != null && message.hasOwnProperty("reason")) - if (!$util.isString(message.reason)) - return "reason: string expected"; - if (message.localizedMessage != null && message.hasOwnProperty("localizedMessage")) { - var error = $root.google.rpc.LocalizedMessage.verify(message.localizedMessage); - if (error) - return "localizedMessage." + error; - } return null; }; @@ -17542,13 +16233,6 @@ message.field = String(object.field); if (object.description != null) message.description = String(object.description); - if (object.reason != null) - message.reason = String(object.reason); - if (object.localizedMessage != null) { - if (typeof object.localizedMessage !== "object") - throw TypeError(".google.rpc.BadRequest.FieldViolation.localizedMessage: object expected"); - message.localizedMessage = $root.google.rpc.LocalizedMessage.fromObject(object.localizedMessage); - } return message; }; @@ -17568,17 +16252,11 @@ if (options.defaults) { object.field = ""; object.description = ""; - object.reason = ""; - object.localizedMessage = null; } if (message.field != null && message.hasOwnProperty("field")) object.field = message.field; if (message.description != null && message.hasOwnProperty("description")) object.description = message.description; - if (message.reason != null && message.hasOwnProperty("reason")) - object.reason = message.reason; - if (message.localizedMessage != null && message.hasOwnProperty("localizedMessage")) - object.localizedMessage = $root.google.rpc.LocalizedMessage.toObject(message.localizedMessage, options); return object; }; @@ -97535,7 +96213,6 @@ * @interface ICommonLanguageSettings * @property {string|null} [referenceDocsUri] CommonLanguageSettings referenceDocsUri * @property {Array.|null} [destinations] CommonLanguageSettings destinations - * @property {google.api.ISelectiveGapicGeneration|null} [selectiveGapicGeneration] CommonLanguageSettings selectiveGapicGeneration */ /** @@ -97570,14 +96247,6 @@ */ CommonLanguageSettings.prototype.destinations = $util.emptyArray; - /** - * CommonLanguageSettings selectiveGapicGeneration. - * @member {google.api.ISelectiveGapicGeneration|null|undefined} selectiveGapicGeneration - * @memberof google.api.CommonLanguageSettings - * @instance - */ - CommonLanguageSettings.prototype.selectiveGapicGeneration = null; - /** * Creates a new CommonLanguageSettings instance using the specified properties. * @function create @@ -97610,8 +96279,6 @@ writer.int32(message.destinations[i]); writer.ldelim(); } - if (message.selectiveGapicGeneration != null && Object.hasOwnProperty.call(message, "selectiveGapicGeneration")) - $root.google.api.SelectiveGapicGeneration.encode(message.selectiveGapicGeneration, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -97663,10 +96330,6 @@ message.destinations.push(reader.int32()); break; } - case 3: { - message.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.decode(reader, reader.uint32()); - break; - } default: reader.skipType(tag & 7); break; @@ -97718,11 +96381,6 @@ break; } } - if (message.selectiveGapicGeneration != null && message.hasOwnProperty("selectiveGapicGeneration")) { - var error = $root.google.api.SelectiveGapicGeneration.verify(message.selectiveGapicGeneration); - if (error) - return "selectiveGapicGeneration." + error; - } return null; }; @@ -97765,11 +96423,6 @@ break; } } - if (object.selectiveGapicGeneration != null) { - if (typeof object.selectiveGapicGeneration !== "object") - throw TypeError(".google.api.CommonLanguageSettings.selectiveGapicGeneration: object expected"); - message.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.fromObject(object.selectiveGapicGeneration); - } return message; }; @@ -97788,10 +96441,8 @@ var object = {}; if (options.arrays || options.defaults) object.destinations = []; - if (options.defaults) { + if (options.defaults) object.referenceDocsUri = ""; - object.selectiveGapicGeneration = null; - } if (message.referenceDocsUri != null && message.hasOwnProperty("referenceDocsUri")) object.referenceDocsUri = message.referenceDocsUri; if (message.destinations && message.destinations.length) { @@ -97799,8 +96450,6 @@ for (var j = 0; j < message.destinations.length; ++j) object.destinations[j] = options.enums === String ? $root.google.api.ClientLibraryDestination[message.destinations[j]] === undefined ? message.destinations[j] : $root.google.api.ClientLibraryDestination[message.destinations[j]] : message.destinations[j]; } - if (message.selectiveGapicGeneration != null && message.hasOwnProperty("selectiveGapicGeneration")) - object.selectiveGapicGeneration = $root.google.api.SelectiveGapicGeneration.toObject(message.selectiveGapicGeneration, options); return object; }; @@ -99623,7 +98272,6 @@ * @memberof google.api * @interface IPythonSettings * @property {google.api.ICommonLanguageSettings|null} [common] PythonSettings common - * @property {google.api.PythonSettings.IExperimentalFeatures|null} [experimentalFeatures] PythonSettings experimentalFeatures */ /** @@ -99649,14 +98297,6 @@ */ PythonSettings.prototype.common = null; - /** - * PythonSettings experimentalFeatures. - * @member {google.api.PythonSettings.IExperimentalFeatures|null|undefined} experimentalFeatures - * @memberof google.api.PythonSettings - * @instance - */ - PythonSettings.prototype.experimentalFeatures = null; - /** * Creates a new PythonSettings instance using the specified properties. * @function create @@ -99683,8 +98323,6 @@ writer = $Writer.create(); if (message.common != null && Object.hasOwnProperty.call(message, "common")) $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.experimentalFeatures != null && Object.hasOwnProperty.call(message, "experimentalFeatures")) - $root.google.api.PythonSettings.ExperimentalFeatures.encode(message.experimentalFeatures, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; @@ -99725,10 +98363,6 @@ message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); break; } - case 2: { - message.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.decode(reader, reader.uint32()); - break; - } default: reader.skipType(tag & 7); break; @@ -99769,11 +98403,6 @@ if (error) return "common." + error; } - if (message.experimentalFeatures != null && message.hasOwnProperty("experimentalFeatures")) { - var error = $root.google.api.PythonSettings.ExperimentalFeatures.verify(message.experimentalFeatures); - if (error) - return "experimentalFeatures." + error; - } return null; }; @@ -99794,11 +98423,6 @@ throw TypeError(".google.api.PythonSettings.common: object expected"); message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); } - if (object.experimentalFeatures != null) { - if (typeof object.experimentalFeatures !== "object") - throw TypeError(".google.api.PythonSettings.experimentalFeatures: object expected"); - message.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.fromObject(object.experimentalFeatures); - } return message; }; @@ -99815,14 +98439,10 @@ if (!options) options = {}; var object = {}; - if (options.defaults) { + if (options.defaults) object.common = null; - object.experimentalFeatures = null; - } if (message.common != null && message.hasOwnProperty("common")) object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); - if (message.experimentalFeatures != null && message.hasOwnProperty("experimentalFeatures")) - object.experimentalFeatures = $root.google.api.PythonSettings.ExperimentalFeatures.toObject(message.experimentalFeatures, options); return object; }; @@ -99852,258 +98472,6 @@ return typeUrlPrefix + "/google.api.PythonSettings"; }; - PythonSettings.ExperimentalFeatures = (function() { - - /** - * Properties of an ExperimentalFeatures. - * @memberof google.api.PythonSettings - * @interface IExperimentalFeatures - * @property {boolean|null} [restAsyncIoEnabled] ExperimentalFeatures restAsyncIoEnabled - * @property {boolean|null} [protobufPythonicTypesEnabled] ExperimentalFeatures protobufPythonicTypesEnabled - * @property {boolean|null} [unversionedPackageDisabled] ExperimentalFeatures unversionedPackageDisabled - */ - - /** - * Constructs a new ExperimentalFeatures. - * @memberof google.api.PythonSettings - * @classdesc Represents an ExperimentalFeatures. - * @implements IExperimentalFeatures - * @constructor - * @param {google.api.PythonSettings.IExperimentalFeatures=} [properties] Properties to set - */ - function ExperimentalFeatures(properties) { - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * ExperimentalFeatures restAsyncIoEnabled. - * @member {boolean} restAsyncIoEnabled - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @instance - */ - ExperimentalFeatures.prototype.restAsyncIoEnabled = false; - - /** - * ExperimentalFeatures protobufPythonicTypesEnabled. - * @member {boolean} protobufPythonicTypesEnabled - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @instance - */ - ExperimentalFeatures.prototype.protobufPythonicTypesEnabled = false; - - /** - * ExperimentalFeatures unversionedPackageDisabled. - * @member {boolean} unversionedPackageDisabled - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @instance - */ - ExperimentalFeatures.prototype.unversionedPackageDisabled = false; - - /** - * Creates a new ExperimentalFeatures instance using the specified properties. - * @function create - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @static - * @param {google.api.PythonSettings.IExperimentalFeatures=} [properties] Properties to set - * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures instance - */ - ExperimentalFeatures.create = function create(properties) { - return new ExperimentalFeatures(properties); - }; - - /** - * Encodes the specified ExperimentalFeatures message. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. - * @function encode - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @static - * @param {google.api.PythonSettings.IExperimentalFeatures} message ExperimentalFeatures message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ExperimentalFeatures.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.restAsyncIoEnabled != null && Object.hasOwnProperty.call(message, "restAsyncIoEnabled")) - writer.uint32(/* id 1, wireType 0 =*/8).bool(message.restAsyncIoEnabled); - if (message.protobufPythonicTypesEnabled != null && Object.hasOwnProperty.call(message, "protobufPythonicTypesEnabled")) - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.protobufPythonicTypesEnabled); - if (message.unversionedPackageDisabled != null && Object.hasOwnProperty.call(message, "unversionedPackageDisabled")) - writer.uint32(/* id 3, wireType 0 =*/24).bool(message.unversionedPackageDisabled); - return writer; - }; - - /** - * Encodes the specified ExperimentalFeatures message, length delimited. Does not implicitly {@link google.api.PythonSettings.ExperimentalFeatures.verify|verify} messages. - * @function encodeDelimited - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @static - * @param {google.api.PythonSettings.IExperimentalFeatures} message ExperimentalFeatures message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - ExperimentalFeatures.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes an ExperimentalFeatures message from the specified reader or buffer. - * @function decode - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ExperimentalFeatures.decode = function decode(reader, length, error) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.PythonSettings.ExperimentalFeatures(); - while (reader.pos < end) { - var tag = reader.uint32(); - if (tag === error) - break; - switch (tag >>> 3) { - case 1: { - message.restAsyncIoEnabled = reader.bool(); - break; - } - case 2: { - message.protobufPythonicTypesEnabled = reader.bool(); - break; - } - case 3: { - message.unversionedPackageDisabled = reader.bool(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes an ExperimentalFeatures message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - ExperimentalFeatures.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies an ExperimentalFeatures message. - * @function verify - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - ExperimentalFeatures.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.restAsyncIoEnabled != null && message.hasOwnProperty("restAsyncIoEnabled")) - if (typeof message.restAsyncIoEnabled !== "boolean") - return "restAsyncIoEnabled: boolean expected"; - if (message.protobufPythonicTypesEnabled != null && message.hasOwnProperty("protobufPythonicTypesEnabled")) - if (typeof message.protobufPythonicTypesEnabled !== "boolean") - return "protobufPythonicTypesEnabled: boolean expected"; - if (message.unversionedPackageDisabled != null && message.hasOwnProperty("unversionedPackageDisabled")) - if (typeof message.unversionedPackageDisabled !== "boolean") - return "unversionedPackageDisabled: boolean expected"; - return null; - }; - - /** - * Creates an ExperimentalFeatures message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @static - * @param {Object.} object Plain object - * @returns {google.api.PythonSettings.ExperimentalFeatures} ExperimentalFeatures - */ - ExperimentalFeatures.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.PythonSettings.ExperimentalFeatures) - return object; - var message = new $root.google.api.PythonSettings.ExperimentalFeatures(); - if (object.restAsyncIoEnabled != null) - message.restAsyncIoEnabled = Boolean(object.restAsyncIoEnabled); - if (object.protobufPythonicTypesEnabled != null) - message.protobufPythonicTypesEnabled = Boolean(object.protobufPythonicTypesEnabled); - if (object.unversionedPackageDisabled != null) - message.unversionedPackageDisabled = Boolean(object.unversionedPackageDisabled); - return message; - }; - - /** - * Creates a plain object from an ExperimentalFeatures message. Also converts values to other types if specified. - * @function toObject - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @static - * @param {google.api.PythonSettings.ExperimentalFeatures} message ExperimentalFeatures - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - ExperimentalFeatures.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.defaults) { - object.restAsyncIoEnabled = false; - object.protobufPythonicTypesEnabled = false; - object.unversionedPackageDisabled = false; - } - if (message.restAsyncIoEnabled != null && message.hasOwnProperty("restAsyncIoEnabled")) - object.restAsyncIoEnabled = message.restAsyncIoEnabled; - if (message.protobufPythonicTypesEnabled != null && message.hasOwnProperty("protobufPythonicTypesEnabled")) - object.protobufPythonicTypesEnabled = message.protobufPythonicTypesEnabled; - if (message.unversionedPackageDisabled != null && message.hasOwnProperty("unversionedPackageDisabled")) - object.unversionedPackageDisabled = message.unversionedPackageDisabled; - return object; - }; - - /** - * Converts this ExperimentalFeatures to JSON. - * @function toJSON - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @instance - * @returns {Object.} JSON object - */ - ExperimentalFeatures.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for ExperimentalFeatures - * @function getTypeUrl - * @memberof google.api.PythonSettings.ExperimentalFeatures - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - ExperimentalFeatures.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.api.PythonSettings.ExperimentalFeatures"; - }; - - return ExperimentalFeatures; - })(); - return PythonSettings; })(); @@ -100980,7 +99348,6 @@ * @memberof google.api * @interface IGoSettings * @property {google.api.ICommonLanguageSettings|null} [common] GoSettings common - * @property {Object.|null} [renamedServices] GoSettings renamedServices */ /** @@ -100992,7 +99359,6 @@ * @param {google.api.IGoSettings=} [properties] Properties to set */ function GoSettings(properties) { - this.renamedServices = {}; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -101007,14 +99373,6 @@ */ GoSettings.prototype.common = null; - /** - * GoSettings renamedServices. - * @member {Object.} renamedServices - * @memberof google.api.GoSettings - * @instance - */ - GoSettings.prototype.renamedServices = $util.emptyObject; - /** * Creates a new GoSettings instance using the specified properties. * @function create @@ -101041,9 +99399,6 @@ writer = $Writer.create(); if (message.common != null && Object.hasOwnProperty.call(message, "common")) $root.google.api.CommonLanguageSettings.encode(message.common, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.renamedServices != null && Object.hasOwnProperty.call(message, "renamedServices")) - for (var keys = Object.keys(message.renamedServices), i = 0; i < keys.length; ++i) - writer.uint32(/* id 2, wireType 2 =*/18).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.renamedServices[keys[i]]).ldelim(); return writer; }; @@ -101074,7 +99429,7 @@ GoSettings.decode = function decode(reader, length, error) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.GoSettings(), key, value; + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.GoSettings(); while (reader.pos < end) { var tag = reader.uint32(); if (tag === error) @@ -101084,29 +99439,6 @@ message.common = $root.google.api.CommonLanguageSettings.decode(reader, reader.uint32()); break; } - case 2: { - if (message.renamedServices === $util.emptyObject) - message.renamedServices = {}; - var end2 = reader.uint32() + reader.pos; - key = ""; - value = ""; - while (reader.pos < end2) { - var tag2 = reader.uint32(); - switch (tag2 >>> 3) { - case 1: - key = reader.string(); - break; - case 2: - value = reader.string(); - break; - default: - reader.skipType(tag2 & 7); - break; - } - } - message.renamedServices[key] = value; - break; - } default: reader.skipType(tag & 7); break; @@ -101147,14 +99479,6 @@ if (error) return "common." + error; } - if (message.renamedServices != null && message.hasOwnProperty("renamedServices")) { - if (!$util.isObject(message.renamedServices)) - return "renamedServices: object expected"; - var key = Object.keys(message.renamedServices); - for (var i = 0; i < key.length; ++i) - if (!$util.isString(message.renamedServices[key[i]])) - return "renamedServices: string{k:string} expected"; - } return null; }; @@ -101175,13 +99499,6 @@ throw TypeError(".google.api.GoSettings.common: object expected"); message.common = $root.google.api.CommonLanguageSettings.fromObject(object.common); } - if (object.renamedServices) { - if (typeof object.renamedServices !== "object") - throw TypeError(".google.api.GoSettings.renamedServices: object expected"); - message.renamedServices = {}; - for (var keys = Object.keys(object.renamedServices), i = 0; i < keys.length; ++i) - message.renamedServices[keys[i]] = String(object.renamedServices[keys[i]]); - } return message; }; @@ -101198,18 +99515,10 @@ if (!options) options = {}; var object = {}; - if (options.objects || options.defaults) - object.renamedServices = {}; if (options.defaults) object.common = null; if (message.common != null && message.hasOwnProperty("common")) object.common = $root.google.api.CommonLanguageSettings.toObject(message.common, options); - var keys2; - if (message.renamedServices && (keys2 = Object.keys(message.renamedServices)).length) { - object.renamedServices = {}; - for (var j = 0; j < keys2.length; ++j) - object.renamedServices[keys2[j]] = message.renamedServices[keys2[j]]; - } return object; }; @@ -101848,251 +100157,6 @@ return values; })(); - api.SelectiveGapicGeneration = (function() { - - /** - * Properties of a SelectiveGapicGeneration. - * @memberof google.api - * @interface ISelectiveGapicGeneration - * @property {Array.|null} [methods] SelectiveGapicGeneration methods - * @property {boolean|null} [generateOmittedAsInternal] SelectiveGapicGeneration generateOmittedAsInternal - */ - - /** - * Constructs a new SelectiveGapicGeneration. - * @memberof google.api - * @classdesc Represents a SelectiveGapicGeneration. - * @implements ISelectiveGapicGeneration - * @constructor - * @param {google.api.ISelectiveGapicGeneration=} [properties] Properties to set - */ - function SelectiveGapicGeneration(properties) { - this.methods = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * SelectiveGapicGeneration methods. - * @member {Array.} methods - * @memberof google.api.SelectiveGapicGeneration - * @instance - */ - SelectiveGapicGeneration.prototype.methods = $util.emptyArray; - - /** - * SelectiveGapicGeneration generateOmittedAsInternal. - * @member {boolean} generateOmittedAsInternal - * @memberof google.api.SelectiveGapicGeneration - * @instance - */ - SelectiveGapicGeneration.prototype.generateOmittedAsInternal = false; - - /** - * Creates a new SelectiveGapicGeneration instance using the specified properties. - * @function create - * @memberof google.api.SelectiveGapicGeneration - * @static - * @param {google.api.ISelectiveGapicGeneration=} [properties] Properties to set - * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration instance - */ - SelectiveGapicGeneration.create = function create(properties) { - return new SelectiveGapicGeneration(properties); - }; - - /** - * Encodes the specified SelectiveGapicGeneration message. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. - * @function encode - * @memberof google.api.SelectiveGapicGeneration - * @static - * @param {google.api.ISelectiveGapicGeneration} message SelectiveGapicGeneration message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SelectiveGapicGeneration.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.methods != null && message.methods.length) - for (var i = 0; i < message.methods.length; ++i) - writer.uint32(/* id 1, wireType 2 =*/10).string(message.methods[i]); - if (message.generateOmittedAsInternal != null && Object.hasOwnProperty.call(message, "generateOmittedAsInternal")) - writer.uint32(/* id 2, wireType 0 =*/16).bool(message.generateOmittedAsInternal); - return writer; - }; - - /** - * Encodes the specified SelectiveGapicGeneration message, length delimited. Does not implicitly {@link google.api.SelectiveGapicGeneration.verify|verify} messages. - * @function encodeDelimited - * @memberof google.api.SelectiveGapicGeneration - * @static - * @param {google.api.ISelectiveGapicGeneration} message SelectiveGapicGeneration message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - SelectiveGapicGeneration.encodeDelimited = function encodeDelimited(message, writer) { - return this.encode(message, writer).ldelim(); - }; - - /** - * Decodes a SelectiveGapicGeneration message from the specified reader or buffer. - * @function decode - * @memberof google.api.SelectiveGapicGeneration - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SelectiveGapicGeneration.decode = function decode(reader, length, error) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.api.SelectiveGapicGeneration(); - while (reader.pos < end) { - var tag = reader.uint32(); - if (tag === error) - break; - switch (tag >>> 3) { - case 1: { - if (!(message.methods && message.methods.length)) - message.methods = []; - message.methods.push(reader.string()); - break; - } - case 2: { - message.generateOmittedAsInternal = reader.bool(); - break; - } - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Decodes a SelectiveGapicGeneration message from the specified reader or buffer, length delimited. - * @function decodeDelimited - * @memberof google.api.SelectiveGapicGeneration - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - SelectiveGapicGeneration.decodeDelimited = function decodeDelimited(reader) { - if (!(reader instanceof $Reader)) - reader = new $Reader(reader); - return this.decode(reader, reader.uint32()); - }; - - /** - * Verifies a SelectiveGapicGeneration message. - * @function verify - * @memberof google.api.SelectiveGapicGeneration - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - SelectiveGapicGeneration.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.methods != null && message.hasOwnProperty("methods")) { - if (!Array.isArray(message.methods)) - return "methods: array expected"; - for (var i = 0; i < message.methods.length; ++i) - if (!$util.isString(message.methods[i])) - return "methods: string[] expected"; - } - if (message.generateOmittedAsInternal != null && message.hasOwnProperty("generateOmittedAsInternal")) - if (typeof message.generateOmittedAsInternal !== "boolean") - return "generateOmittedAsInternal: boolean expected"; - return null; - }; - - /** - * Creates a SelectiveGapicGeneration message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof google.api.SelectiveGapicGeneration - * @static - * @param {Object.} object Plain object - * @returns {google.api.SelectiveGapicGeneration} SelectiveGapicGeneration - */ - SelectiveGapicGeneration.fromObject = function fromObject(object) { - if (object instanceof $root.google.api.SelectiveGapicGeneration) - return object; - var message = new $root.google.api.SelectiveGapicGeneration(); - if (object.methods) { - if (!Array.isArray(object.methods)) - throw TypeError(".google.api.SelectiveGapicGeneration.methods: array expected"); - message.methods = []; - for (var i = 0; i < object.methods.length; ++i) - message.methods[i] = String(object.methods[i]); - } - if (object.generateOmittedAsInternal != null) - message.generateOmittedAsInternal = Boolean(object.generateOmittedAsInternal); - return message; - }; - - /** - * Creates a plain object from a SelectiveGapicGeneration message. Also converts values to other types if specified. - * @function toObject - * @memberof google.api.SelectiveGapicGeneration - * @static - * @param {google.api.SelectiveGapicGeneration} message SelectiveGapicGeneration - * @param {$protobuf.IConversionOptions} [options] Conversion options - * @returns {Object.} Plain object - */ - SelectiveGapicGeneration.toObject = function toObject(message, options) { - if (!options) - options = {}; - var object = {}; - if (options.arrays || options.defaults) - object.methods = []; - if (options.defaults) - object.generateOmittedAsInternal = false; - if (message.methods && message.methods.length) { - object.methods = []; - for (var j = 0; j < message.methods.length; ++j) - object.methods[j] = message.methods[j]; - } - if (message.generateOmittedAsInternal != null && message.hasOwnProperty("generateOmittedAsInternal")) - object.generateOmittedAsInternal = message.generateOmittedAsInternal; - return object; - }; - - /** - * Converts this SelectiveGapicGeneration to JSON. - * @function toJSON - * @memberof google.api.SelectiveGapicGeneration - * @instance - * @returns {Object.} JSON object - */ - SelectiveGapicGeneration.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * Gets the default type url for SelectiveGapicGeneration - * @function getTypeUrl - * @memberof google.api.SelectiveGapicGeneration - * @static - * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") - * @returns {string} The default type url - */ - SelectiveGapicGeneration.getTypeUrl = function getTypeUrl(typeUrlPrefix) { - if (typeUrlPrefix === undefined) { - typeUrlPrefix = "type.googleapis.com"; - } - return typeUrlPrefix + "/google.api.SelectiveGapicGeneration"; - }; - - return SelectiveGapicGeneration; - })(); - /** * LaunchStage enum. * @name google.api.LaunchStage diff --git a/protos/protos.json b/protos/protos.json index 1b54c45df..bbea8fa39 100644 --- a/protos/protos.json +++ b/protos/protos.json @@ -33,19 +33,12 @@ "type": "FileDescriptorProto", "id": 1 } - }, - "extensions": [ - [ - 536000000, - 536000000 - ] - ] + } }, "Edition": { "edition": "proto2", "values": { "EDITION_UNKNOWN": 0, - "EDITION_LEGACY": 900, "EDITION_PROTO2": 998, "EDITION_PROTO3": 999, "EDITION_2023": 1000, @@ -84,11 +77,6 @@ "type": "int32", "id": 11 }, - "optionDependency": { - "rule": "repeated", - "type": "string", - "id": 15 - }, "messageType": { "rule": "repeated", "type": "DescriptorProto", @@ -177,10 +165,6 @@ "rule": "repeated", "type": "string", "id": 10 - }, - "visibility": { - "type": "SymbolVisibility", - "id": 11 } }, "nested": { @@ -406,10 +390,6 @@ "rule": "repeated", "type": "string", "id": 5 - }, - "visibility": { - "type": "SymbolVisibility", - "id": 6 } }, "nested": { @@ -624,7 +604,6 @@ 42, 42 ], - "php_generic_services", [ 38, 38 @@ -760,8 +739,7 @@ "type": "bool", "id": 10, "options": { - "default": false, - "deprecated": true + "default": false } }, "debugRedact": { @@ -789,10 +767,6 @@ "type": "FeatureSet", "id": 21 }, - "featureSupport": { - "type": "FeatureSupport", - "id": 22 - }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -862,26 +836,6 @@ "id": 2 } } - }, - "FeatureSupport": { - "fields": { - "editionIntroduced": { - "type": "Edition", - "id": 1 - }, - "editionDeprecated": { - "type": "Edition", - "id": 2 - }, - "deprecationWarning": { - "type": "string", - "id": 3 - }, - "editionRemoved": { - "type": "Edition", - "id": 4 - } - } } } }, @@ -970,10 +924,6 @@ "default": false } }, - "featureSupport": { - "type": "FieldOptions.FeatureSupport", - "id": 4 - }, "uninterpretedOption": { "rule": "repeated", "type": "UninterpretedOption", @@ -1116,7 +1066,6 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_2023", "edition_defaults.value": "EXPLICIT" } @@ -1127,7 +1076,6 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "OPEN" } @@ -1138,7 +1086,6 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "PACKED" } @@ -1149,7 +1096,6 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "VERIFY" } @@ -1160,8 +1106,7 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "feature_support.edition_introduced": "EDITION_2023", - "edition_defaults.edition": "EDITION_LEGACY", + "edition_defaults.edition": "EDITION_PROTO2", "edition_defaults.value": "LENGTH_PREFIXED" } }, @@ -1171,38 +1116,27 @@ "options": { "retention": "RETENTION_RUNTIME", "targets": "TARGET_TYPE_FILE", - "feature_support.edition_introduced": "EDITION_2023", "edition_defaults.edition": "EDITION_PROTO3", "edition_defaults.value": "ALLOW" } - }, - "enforceNamingStyle": { - "type": "EnforceNamingStyle", - "id": 7, - "options": { - "retention": "RETENTION_SOURCE", - "targets": "TARGET_TYPE_METHOD", - "feature_support.edition_introduced": "EDITION_2024", - "edition_defaults.edition": "EDITION_2024", - "edition_defaults.value": "STYLE2024" - } - }, - "defaultSymbolVisibility": { - "type": "VisibilityFeature.DefaultSymbolVisibility", - "id": 8, - "options": { - "retention": "RETENTION_SOURCE", - "targets": "TARGET_TYPE_FILE", - "feature_support.edition_introduced": "EDITION_2024", - "edition_defaults.edition": "EDITION_2024", - "edition_defaults.value": "EXPORT_TOP_LEVEL" - } } }, "extensions": [ [ 1000, - 9994 + 1000 + ], + [ + 1001, + 1001 + ], + [ + 1002, + 1002 + ], + [ + 9990, + 9990 ], [ 9995, @@ -1247,13 +1181,7 @@ "UTF8_VALIDATION_UNKNOWN": 0, "VERIFY": 2, "NONE": 3 - }, - "reserved": [ - [ - 1, - 1 - ] - ] + } }, "MessageEncoding": { "values": { @@ -1268,33 +1196,6 @@ "ALLOW": 1, "LEGACY_BEST_EFFORT": 2 } - }, - "EnforceNamingStyle": { - "values": { - "ENFORCE_NAMING_STYLE_UNKNOWN": 0, - "STYLE2024": 1, - "STYLE_LEGACY": 2 - } - }, - "VisibilityFeature": { - "fields": {}, - "reserved": [ - [ - 1, - 536870911 - ] - ], - "nested": { - "DefaultSymbolVisibility": { - "values": { - "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": 0, - "EXPORT_ALL": 1, - "EXPORT_TOP_LEVEL": 2, - "LOCAL_ALL": 3, - "STRICT": 4 - } - } - } } } }, @@ -1322,26 +1223,11 @@ "type": "Edition", "id": 3 }, - "overridableFeatures": { - "type": "FeatureSet", - "id": 4 - }, - "fixedFeatures": { + "features": { "type": "FeatureSet", - "id": 5 + "id": 2 } - }, - "reserved": [ - [ - 1, - 1 - ], - [ - 2, - 2 - ], - "features" - ] + } } } }, @@ -1354,12 +1240,6 @@ "id": 1 } }, - "extensions": [ - [ - 536000000, - 536000000 - ] - ], "nested": { "Location": { "fields": { @@ -1445,14 +1325,6 @@ } } }, - "SymbolVisibility": { - "edition": "proto2", - "values": { - "VISIBILITY_UNSET": 0, - "VISIBILITY_LOCAL": 1, - "VISIBILITY_EXPORT": 2 - } - }, "Any": { "fields": { "type_url": { @@ -1612,13 +1484,6 @@ }, "nested": { "Violation": { - "oneofs": { - "_futureQuotaValue": { - "oneof": [ - "futureQuotaValue" - ] - } - }, "fields": { "subject": { "type": "string", @@ -1627,34 +1492,6 @@ "description": { "type": "string", "id": 2 - }, - "apiService": { - "type": "string", - "id": 3 - }, - "quotaMetric": { - "type": "string", - "id": 4 - }, - "quotaId": { - "type": "string", - "id": 5 - }, - "quotaDimensions": { - "keyType": "string", - "type": "string", - "id": 6 - }, - "quotaValue": { - "type": "int64", - "id": 7 - }, - "futureQuotaValue": { - "type": "int64", - "id": 8, - "options": { - "proto3_optional": true - } } } } @@ -1705,14 +1542,6 @@ "description": { "type": "string", "id": 2 - }, - "reason": { - "type": "string", - "id": 3 - }, - "localizedMessage": { - "type": "LocalizedMessage", - "id": 4 } } } @@ -9864,7 +9693,8 @@ "java_multiple_files": true, "java_outer_classname": "LaunchStageProto", "java_package": "com.google.api", - "objc_class_prefix": "GAPI" + "objc_class_prefix": "GAPI", + "cc_enable_arenas": true }, "nested": { "fieldBehavior": { @@ -10087,10 +9917,6 @@ "rule": "repeated", "type": "ClientLibraryDestination", "id": 2 - }, - "selectiveGapicGeneration": { - "type": "SelectiveGapicGeneration", - "id": 3 } } }, @@ -10231,28 +10057,6 @@ "common": { "type": "CommonLanguageSettings", "id": 1 - }, - "experimentalFeatures": { - "type": "ExperimentalFeatures", - "id": 2 - } - }, - "nested": { - "ExperimentalFeatures": { - "fields": { - "restAsyncIoEnabled": { - "type": "bool", - "id": 1 - }, - "protobufPythonicTypesEnabled": { - "type": "bool", - "id": 2 - }, - "unversionedPackageDisabled": { - "type": "bool", - "id": 3 - } - } } } }, @@ -10310,11 +10114,6 @@ "common": { "type": "CommonLanguageSettings", "id": 1 - }, - "renamedServices": { - "keyType": "string", - "type": "string", - "id": 2 } } }, @@ -10376,19 +10175,6 @@ "PACKAGE_MANAGER": 20 } }, - "SelectiveGapicGeneration": { - "fields": { - "methods": { - "rule": "repeated", - "type": "string", - "id": 1 - }, - "generateOmittedAsInternal": { - "type": "bool", - "id": 2 - } - } - }, "LaunchStage": { "values": { "LAUNCH_STAGE_UNSPECIFIED": 0, @@ -10411,7 +10197,6 @@ "java_multiple_files": true, "java_outer_classname": "OperationsProto", "java_package": "com.google.longrunning", - "objc_class_prefix": "GLRUN", "php_namespace": "Google\\LongRunning" }, "nested": { @@ -10624,13 +10409,13 @@ "nested": { "v1": { "options": { + "cc_enable_arenas": true, "csharp_namespace": "Google.Cloud.Iam.V1", "go_package": "cloud.google.com/go/iam/apiv1/iampb;iampb", "java_multiple_files": true, "java_outer_classname": "PolicyProto", "java_package": "com.google.iam.v1", - "php_namespace": "Google\\Cloud\\Iam\\V1", - "cc_enable_arenas": true + "php_namespace": "Google\\Cloud\\Iam\\V1" }, "nested": { "IAMPolicy": { From a328811c35ed03e55cb8f1e4d3504c5484d2f8b6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 29 Jul 2025 04:28:22 +0000 Subject: [PATCH 31/31] chore(main): release 8.1.0 (#2305) :robot: I have created a release *beep* *boop* --- ## [8.1.0](https://togithub.com/googleapis/nodejs-spanner/compare/v8.0.0...v8.1.0) (2025-07-28) ### Features * Add Custom OpenTelemetry Exporter in for Service Metrics ([#2272](https://togithub.com/googleapis/nodejs-spanner/issues/2272)) ([610d1b9](https://togithub.com/googleapis/nodejs-spanner/commit/610d1b989ba186c0758791343deaa7f683c4bd26)) * Add methods from gax to cache proto root and process custom error details ([#2330](https://togithub.com/googleapis/nodejs-spanner/issues/2330)) ([1b3931a](https://togithub.com/googleapis/nodejs-spanner/commit/1b3931a799bdd052adc91703e59e1d0c83270065)) * Add metrics tracers ([#2319](https://togithub.com/googleapis/nodejs-spanner/issues/2319)) ([192bf2b](https://togithub.com/googleapis/nodejs-spanner/commit/192bf2bb603bca4ac481fcfd1f04974173adc6a1)) * Add support for AFE latency metrics ([#2348](https://togithub.com/googleapis/nodejs-spanner/issues/2348)) ([0666f05](https://togithub.com/googleapis/nodejs-spanner/commit/0666f05d589e2f229b44dffae8e9649220bccf8b)) * Add throughput_mode to UpdateDatabaseDdlRequest to be used by Spanner Migration Tool. See https://togithub.com/GoogleCloudPlatform/spanner-migration-tool ([#2304](https://togithub.com/googleapis/nodejs-spanner/issues/2304)) ([a29af56](https://togithub.com/googleapis/nodejs-spanner/commit/a29af56ae3c31f07115cb938bcf3f0f77241b725)) * Operation, Attempt, and GFE metrics ([#2328](https://togithub.com/googleapis/nodejs-spanner/issues/2328)) ([646e6ea](https://togithub.com/googleapis/nodejs-spanner/commit/646e6ea6f1dc5fa1937e512ae9e81ae4d2637ed0)) * Proto changes for an internal api ([#2356](https://togithub.com/googleapis/nodejs-spanner/issues/2356)) ([380e770](https://togithub.com/googleapis/nodejs-spanner/commit/380e7705a23a692168db386ba5426c91bf1587b6)) * **spanner:** A new field `snapshot_timestamp` is added to message `.google.spanner.v1.CommitResponse` ([#2350](https://togithub.com/googleapis/nodejs-spanner/issues/2350)) ([0875cd8](https://togithub.com/googleapis/nodejs-spanner/commit/0875cd82e99fa6c95ab38807e09c5921303775f8)) * **spanner:** Add new change_stream.proto ([#2315](https://togithub.com/googleapis/nodejs-spanner/issues/2315)) ([57d67be](https://togithub.com/googleapis/nodejs-spanner/commit/57d67be2e3b6d6ac2a8a903acf8613b27a049c3b)) * **spanner:** Add tpc support ([#2333](https://togithub.com/googleapis/nodejs-spanner/issues/2333)) ([a381cab](https://togithub.com/googleapis/nodejs-spanner/commit/a381cab92c31373a6a10edca0f8a8bdfc4415e4b)) * Track precommit token in r/w apis(multiplexed session) ([#2312](https://togithub.com/googleapis/nodejs-spanner/issues/2312)) ([3676bfa](https://togithub.com/googleapis/nodejs-spanner/commit/3676bfa60725c43f85a04ead87943be92e4a99f0)) ### Bug Fixes * Docs-test ([#2297](https://togithub.com/googleapis/nodejs-spanner/issues/2297)) ([61c571c](https://togithub.com/googleapis/nodejs-spanner/commit/61c571c729c2a065df6ff166db784a6e6eaef74d)) * Ensure context propagation works in Node.js 22 with async/await ([#2326](https://togithub.com/googleapis/nodejs-spanner/issues/2326)) ([e8cdbed](https://togithub.com/googleapis/nodejs-spanner/commit/e8cdbedd55f049b8c7766e97388ed045fedd1b4e)) * Pass the Span correctly ([#2332](https://togithub.com/googleapis/nodejs-spanner/issues/2332)) ([edaee77](https://togithub.com/googleapis/nodejs-spanner/commit/edaee7791b2d814f749ed35119dd705924984a78)) * System test against emulator ([#2339](https://togithub.com/googleapis/nodejs-spanner/issues/2339)) ([2a6af4c](https://togithub.com/googleapis/nodejs-spanner/commit/2a6af4c36484f44929a2fac80d8f225dad5d702c)) * Unhandled exceptions from gax ([#2338](https://togithub.com/googleapis/nodejs-spanner/issues/2338)) ([6428bcd](https://togithub.com/googleapis/nodejs-spanner/commit/6428bcd2980852c1bdbc4c3d0ab210a139e5f193)) ### Performance Improvements * Skip gRPC trailers for StreamingRead & ExecuteStreamingSql ([#2313](https://togithub.com/googleapis/nodejs-spanner/issues/2313)) ([8bd0781](https://togithub.com/googleapis/nodejs-spanner/commit/8bd0781e8b434a421f0e0f3395439a5a86c7847c)) --- This PR was generated with [Release Please](https://togithub.com/googleapis/release-please). See [documentation](https://togithub.com/googleapis/release-please#release-please). --- CHANGELOG.md | 31 +++++++++++++++++++++++++++++++ package.json | 4 ++-- samples/package.json | 2 +- 3 files changed, 34 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2e030df1f..6bca0dccb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,37 @@ [1]: https://www.npmjs.com/package/nodejs-spanner?activeTab=versions +## [8.1.0](https://github.com/googleapis/nodejs-spanner/compare/v8.0.0...v8.1.0) (2025-07-28) + + +### Features + +* Add Custom OpenTelemetry Exporter in for Service Metrics ([#2272](https://github.com/googleapis/nodejs-spanner/issues/2272)) ([610d1b9](https://github.com/googleapis/nodejs-spanner/commit/610d1b989ba186c0758791343deaa7f683c4bd26)) +* Add methods from gax to cache proto root and process custom error details ([#2330](https://github.com/googleapis/nodejs-spanner/issues/2330)) ([1b3931a](https://github.com/googleapis/nodejs-spanner/commit/1b3931a799bdd052adc91703e59e1d0c83270065)) +* Add metrics tracers ([#2319](https://github.com/googleapis/nodejs-spanner/issues/2319)) ([192bf2b](https://github.com/googleapis/nodejs-spanner/commit/192bf2bb603bca4ac481fcfd1f04974173adc6a1)) +* Add support for AFE latency metrics ([#2348](https://github.com/googleapis/nodejs-spanner/issues/2348)) ([0666f05](https://github.com/googleapis/nodejs-spanner/commit/0666f05d589e2f229b44dffae8e9649220bccf8b)) +* Add throughput_mode to UpdateDatabaseDdlRequest to be used by Spanner Migration Tool. See https://github.com/GoogleCloudPlatform/spanner-migration-tool ([#2304](https://github.com/googleapis/nodejs-spanner/issues/2304)) ([a29af56](https://github.com/googleapis/nodejs-spanner/commit/a29af56ae3c31f07115cb938bcf3f0f77241b725)) +* Operation, Attempt, and GFE metrics ([#2328](https://github.com/googleapis/nodejs-spanner/issues/2328)) ([646e6ea](https://github.com/googleapis/nodejs-spanner/commit/646e6ea6f1dc5fa1937e512ae9e81ae4d2637ed0)) +* Proto changes for an internal api ([#2356](https://github.com/googleapis/nodejs-spanner/issues/2356)) ([380e770](https://github.com/googleapis/nodejs-spanner/commit/380e7705a23a692168db386ba5426c91bf1587b6)) +* **spanner:** A new field `snapshot_timestamp` is added to message `.google.spanner.v1.CommitResponse` ([#2350](https://github.com/googleapis/nodejs-spanner/issues/2350)) ([0875cd8](https://github.com/googleapis/nodejs-spanner/commit/0875cd82e99fa6c95ab38807e09c5921303775f8)) +* **spanner:** Add new change_stream.proto ([#2315](https://github.com/googleapis/nodejs-spanner/issues/2315)) ([57d67be](https://github.com/googleapis/nodejs-spanner/commit/57d67be2e3b6d6ac2a8a903acf8613b27a049c3b)) +* **spanner:** Add tpc support ([#2333](https://github.com/googleapis/nodejs-spanner/issues/2333)) ([a381cab](https://github.com/googleapis/nodejs-spanner/commit/a381cab92c31373a6a10edca0f8a8bdfc4415e4b)) +* Track precommit token in r/w apis(multiplexed session) ([#2312](https://github.com/googleapis/nodejs-spanner/issues/2312)) ([3676bfa](https://github.com/googleapis/nodejs-spanner/commit/3676bfa60725c43f85a04ead87943be92e4a99f0)) + + +### Bug Fixes + +* Docs-test ([#2297](https://github.com/googleapis/nodejs-spanner/issues/2297)) ([61c571c](https://github.com/googleapis/nodejs-spanner/commit/61c571c729c2a065df6ff166db784a6e6eaef74d)) +* Ensure context propagation works in Node.js 22 with async/await ([#2326](https://github.com/googleapis/nodejs-spanner/issues/2326)) ([e8cdbed](https://github.com/googleapis/nodejs-spanner/commit/e8cdbedd55f049b8c7766e97388ed045fedd1b4e)) +* Pass the Span correctly ([#2332](https://github.com/googleapis/nodejs-spanner/issues/2332)) ([edaee77](https://github.com/googleapis/nodejs-spanner/commit/edaee7791b2d814f749ed35119dd705924984a78)) +* System test against emulator ([#2339](https://github.com/googleapis/nodejs-spanner/issues/2339)) ([2a6af4c](https://github.com/googleapis/nodejs-spanner/commit/2a6af4c36484f44929a2fac80d8f225dad5d702c)) +* Unhandled exceptions from gax ([#2338](https://github.com/googleapis/nodejs-spanner/issues/2338)) ([6428bcd](https://github.com/googleapis/nodejs-spanner/commit/6428bcd2980852c1bdbc4c3d0ab210a139e5f193)) + + +### Performance Improvements + +* Skip gRPC trailers for StreamingRead & ExecuteStreamingSql ([#2313](https://github.com/googleapis/nodejs-spanner/issues/2313)) ([8bd0781](https://github.com/googleapis/nodejs-spanner/commit/8bd0781e8b434a421f0e0f3395439a5a86c7847c)) + ## [8.0.0](https://github.com/googleapis/nodejs-spanner/compare/v7.21.0...v8.0.0) (2025-05-12) diff --git a/package.json b/package.json index d92df37c8..d06f8bc68 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@google-cloud/spanner", "description": "Cloud Spanner Client Library for Node.js", - "version": "8.0.0", + "version": "8.1.0", "license": "Apache-2.0", "author": "Google Inc.", "engines": { @@ -86,7 +86,7 @@ "split-array-stream": "^2.0.0", "stack-trace": "0.0.10", "stream-events": "^1.0.5", - "teeny-request": "^10.0.0", + "teeny-request": "^10.0.0", "through2": "^4.0.2", "@babel/traverse": "7.27.7", "@babel/core": "7.27.7", diff --git a/samples/package.json b/samples/package.json index a0dcc8655..6c694e8bb 100644 --- a/samples/package.json +++ b/samples/package.json @@ -17,7 +17,7 @@ "dependencies": { "@google-cloud/kms": "^5.0.0", "@google-cloud/precise-date": "^5.0.0", - "@google-cloud/spanner": "^8.0.0", + "@google-cloud/spanner": "^8.1.0", "protobufjs": "^7.0.0", "yargs": "^17.0.0" },