diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index b46e4c4d6..a013376d1 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -8,9 +8,9 @@ branchProtectionRules: - "ci/kokoro: Samples test" - "ci/kokoro: System test" - lint - - test (14) - - test (16) - test (18) + - test (20) + - test (22) - cla/google - windows - OwlBot Post Processor diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1f03c7b4d..883082c0b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18, 20, 22] steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 diff --git a/.github/workflows/system-tests-against-emulator.yaml b/.github/workflows/system-tests-against-emulator.yaml index a8c12427d..3a4fca8a1 100644 --- a/.github/workflows/system-tests-against-emulator.yaml +++ b/.github/workflows/system-tests-against-emulator.yaml @@ -19,7 +19,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - uses: actions/setup-node@v4 with: - node-version: 14 + node-version: 18 - run: node --version - run: npm install - run: npm run system-test diff --git a/.gitignore b/.gitignore index 604f64ee0..d4f03a0df 100644 --- a/.gitignore +++ b/.gitignore @@ -12,4 +12,3 @@ system-test/*key.json .DS_Store package-lock.json __pycache__ -.idea diff --git a/.kokoro/continuous/node14/common.cfg b/.kokoro/continuous/node14/common.cfg deleted file mode 100644 index 02d5a407d..000000000 --- a/.kokoro/continuous/node14/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-spanner/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-spanner/.kokoro/test.sh" -} diff --git a/.kokoro/continuous/node14/lint.cfg b/.kokoro/continuous/node14/lint.cfg deleted file mode 100644 index 629ea4ddf..000000000 --- a/.kokoro/continuous/node14/lint.cfg +++ /dev/null @@ -1,4 +0,0 @@ -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-spanner/.kokoro/lint.sh" -} diff --git a/.kokoro/continuous/node14/samples-test.cfg b/.kokoro/continuous/node14/samples-test.cfg deleted file mode 100644 index aa8319f41..000000000 --- a/.kokoro/continuous/node14/samples-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-spanner/.kokoro/samples-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/.kokoro/continuous/node14/system-test.cfg b/.kokoro/continuous/node14/system-test.cfg deleted file mode 100644 index f8dd221bf..000000000 --- a/.kokoro/continuous/node14/system-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-spanner/.kokoro/system-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/.kokoro/continuous/node14/test.cfg b/.kokoro/continuous/node14/test.cfg deleted file mode 100644 index e69de29bb..000000000 diff --git a/.kokoro/presubmit/node14/common.cfg b/.kokoro/presubmit/node14/common.cfg deleted file mode 100644 index 02d5a407d..000000000 --- a/.kokoro/presubmit/node14/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-spanner/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-spanner/.kokoro/test.sh" -} diff --git a/.kokoro/presubmit/node14/samples-test.cfg b/.kokoro/presubmit/node14/samples-test.cfg deleted file mode 100644 index aa8319f41..000000000 --- a/.kokoro/presubmit/node14/samples-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-spanner/.kokoro/samples-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/.kokoro/presubmit/node14/system-test.cfg b/.kokoro/presubmit/node14/system-test.cfg deleted file mode 100644 index f8dd221bf..000000000 --- a/.kokoro/presubmit/node14/system-test.cfg +++ /dev/null @@ -1,12 +0,0 @@ -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-spanner/.kokoro/system-test.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "long-door-651-kokoro-system-test-service-account" -} \ No newline at end of file diff --git a/.kokoro/presubmit/node14/test.cfg b/.kokoro/presubmit/node14/test.cfg deleted file mode 100644 index e69de29bb..000000000 diff --git a/.kokoro/presubmit/node14/system-test-multiplexed-session.cfg b/.kokoro/presubmit/node18/system-test-multiplexed-session.cfg similarity index 100% rename from .kokoro/presubmit/node14/system-test-multiplexed-session.cfg rename to .kokoro/presubmit/node18/system-test-multiplexed-session.cfg diff --git a/.kokoro/samples-test.sh b/.kokoro/samples-test.sh index 98e2fbbed..a05dabce1 100755 --- a/.kokoro/samples-test.sh +++ b/.kokoro/samples-test.sh @@ -16,7 +16,9 @@ set -eo pipefail -export NPM_CONFIG_PREFIX=${HOME}/.npm-global +# Ensure the npm global directory is writable, otherwise rebuild `npm` +mkdir -p $NPM_CONFIG_PREFIX +npm config -g ls || npm i -g npm@`npm --version` # Setup service account credentials. export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/secret_manager/long-door-651-kokoro-system-test-service-account @@ -62,7 +64,7 @@ fi # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=14 +COVERAGE_NODE=18 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/CHANGELOG.md b/CHANGELOG.md index a33e92939..2e030df1f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,33 @@ [1]: https://www.npmjs.com/package/nodejs-spanner?activeTab=versions +## [8.0.0](https://github.com/googleapis/nodejs-spanner/compare/v7.21.0...v8.0.0) (2025-05-12) + + +### ⚠ BREAKING CHANGES + +* remove the arrify package ([#2292](https://github.com/googleapis/nodejs-spanner/issues/2292)) +* migrate to Node 18 ([#2271](https://github.com/googleapis/nodejs-spanner/issues/2271)) + +### Features + +* Add promise based signatures for createQueryPartitions ([#2284](https://github.com/googleapis/nodejs-spanner/issues/2284)) ([255d8a6](https://github.com/googleapis/nodejs-spanner/commit/255d8a6a5749b6a05cd87dd7444cab7dd75d3e42)) +* Add promise based signatures on createReadPartitions ([#2300](https://github.com/googleapis/nodejs-spanner/issues/2300)) ([7b8a1f7](https://github.com/googleapis/nodejs-spanner/commit/7b8a1f70f0de3aa5886a2cde9325c9a36222a311)) +* Support promise based signatures for execute method ([#2301](https://github.com/googleapis/nodejs-spanner/issues/2301)) ([bb857e1](https://github.com/googleapis/nodejs-spanner/commit/bb857e18459f717d67b9b3d144c2b022178363cb)) + + +### Bug Fixes + +* **deps:** Update dependency @google-cloud/kms to v5 ([#2289](https://github.com/googleapis/nodejs-spanner/issues/2289)) ([1ccb505](https://github.com/googleapis/nodejs-spanner/commit/1ccb505935e70b6f576f06e566325146ee68f3ff)) +* **deps:** Update dependency @google-cloud/precise-date to v5 ([#2290](https://github.com/googleapis/nodejs-spanner/issues/2290)) ([44f7575](https://github.com/googleapis/nodejs-spanner/commit/44f7575efd3751d0595beef2ec4eb9f39bc426d7)) +* **deps:** Update dependency big.js to v7 ([#2286](https://github.com/googleapis/nodejs-spanner/issues/2286)) ([0911297](https://github.com/googleapis/nodejs-spanner/commit/0911297cc33aec93c09ef2be42413f20c75fc2bf)) + + +### Miscellaneous Chores + +* Migrate to Node 18 ([#2271](https://github.com/googleapis/nodejs-spanner/issues/2271)) ([cab3f22](https://github.com/googleapis/nodejs-spanner/commit/cab3f229ccb2189bd5af0c25a3006b553f8a5453)) +* Remove the arrify package ([#2292](https://github.com/googleapis/nodejs-spanner/issues/2292)) ([e8f5ca1](https://github.com/googleapis/nodejs-spanner/commit/e8f5ca15125d570949769e6e66f0d911cb21f58d)) + ## [7.21.0](https://github.com/googleapis/nodejs-spanner/compare/v7.20.0...v7.21.0) (2025-04-15) diff --git a/OBSERVABILITY.md b/OBSERVABILITY.md index 92340cd98..ce82cab99 100644 --- a/OBSERVABILITY.md +++ b/OBSERVABILITY.md @@ -49,8 +49,10 @@ const { const exporter = new TraceExporter(); // Create the tracerProvider that the exporter shall be attached to. -const provider = new NodeTracerProvider({resource: resource}); -provider.addSpanProcessor(new BatchSpanProcessor(exporter)); +const provider = new NodeTracerProvider({ + resource: resource, + spanProcessors: [new BatchSpanProcessor(exporter)] +}); // Create the Cloud Spanner Client. const {Spanner} = require('@google-cloud/spanner'); diff --git a/benchmark/benchmarking-multiplexed-session.js b/benchmark/benchmarking-multiplexed-session.js index 7b005d211..6f529b28b 100644 --- a/benchmark/benchmarking-multiplexed-session.js +++ b/benchmark/benchmarking-multiplexed-session.js @@ -25,7 +25,7 @@ async function main( method, multiplexedEnabled, numThreads, - numQueries + numQueries, ) { // enable the env variable multiplexedEnabled === 'true' @@ -70,7 +70,7 @@ async function main( // push the time taken by thread to the array thread_execution_times.push( - (performance.now() - startThreadTime).toFixed(2) + (performance.now() - startThreadTime).toFixed(2), ); } @@ -94,7 +94,7 @@ async function main( // push the time taken by thread to the array thread_execution_times.push( - (performance.now() - startThreadTime).toFixed(2) + (performance.now() - startThreadTime).toFixed(2), ); } diff --git a/benchmark/spanner.ts b/benchmark/spanner.ts index 939e670ee..a62b474a6 100644 --- a/benchmark/spanner.ts +++ b/benchmark/spanner.ts @@ -61,25 +61,26 @@ let dbCounter = 1; require('yargs') .demand(1) .command('burstRead', 'Benchmarks a burst of read operations', {}, () => - runBurstRead().then(() => console.log('Benchmark finished')) + runBurstRead().then(() => console.log('Benchmark finished')), ) .example('node $0 burstRead') .command('burstWrite', 'Benchmarks a burst of write operations', {}, () => - runBurstWrite().then(() => console.log('Benchmark finished')) + runBurstWrite().then(() => console.log('Benchmark finished')), ) .example('node $0 burstWrite') .command( 'burstReadAndWrite', 'Benchmarks a burst of read and write operations', {}, - () => runBurstReadAndWrite().then(() => console.log('Benchmark finished')) + () => runBurstReadAndWrite().then(() => console.log('Benchmark finished')), ) .example('node $0 burstReadAndWrite') .command( 'multipleWriteBursts', 'Benchmarks a burst of read and then write operations', {}, - () => runMultipleWriteBursts().then(() => console.log('Benchmark finished')) + () => + runMultipleWriteBursts().then(() => console.log('Benchmark finished')), ) .example('node $0 multipleWriteBursts') .command( @@ -88,8 +89,8 @@ require('yargs') {}, () => runOneReadTransactionPerSecond().then(() => - console.log('Benchmark finished') - ) + console.log('Benchmark finished'), + ), ) .example('node $0 oneReadTransactionPerSecond') .command( @@ -98,8 +99,8 @@ require('yargs') {}, () => runOneWriteTransactionPerSecond().then(() => - console.log('Benchmark finished') - ) + console.log('Benchmark finished'), + ), ) .example('node $0 oneWriteTransactionPerSecond') .command( @@ -108,15 +109,15 @@ require('yargs') {}, () => runOneReadAndOneWriteTransactionPerSecond().then(() => - console.log('Benchmark finished') - ) + console.log('Benchmark finished'), + ), ) .example('node $0 oneReadAndOneWriteTransactionPerSecond') .command( 'steadyIncrease', 'Benchmarks getting max sessions sequentially', {}, - () => runSteadyIncrease().then(() => console.log('Benchmark finished')) + () => runSteadyIncrease().then(() => console.log('Benchmark finished')), ) .example('node $0 steadyIncrease') .wrap(120) @@ -210,7 +211,7 @@ async function setup() { } else { resolve(assignedPort); } - } + }, ); }); server.start(); @@ -219,51 +220,51 @@ async function setup() { spannerMock.batchCreateSessions, SimulatedExecutionTime.ofMinAndRandomExecTime( NETWORK_LATENCY_TIME + BATCH_CREATE_SESSIONS_MIN_TIME, - BATCH_CREATE_SESSIONS_RND_TIME - ) + BATCH_CREATE_SESSIONS_RND_TIME, + ), ); spannerMock.setExecutionTime( spannerMock.beginTransaction, SimulatedExecutionTime.ofMinAndRandomExecTime( NETWORK_LATENCY_TIME + BEGIN_TRANSACTION_MIN_TIME, - BEGIN_TRANSACTION_RND_TIME - ) + BEGIN_TRANSACTION_RND_TIME, + ), ); spannerMock.setExecutionTime( spannerMock.commit, SimulatedExecutionTime.ofMinAndRandomExecTime( NETWORK_LATENCY_TIME + COMMIT_TRANSACTION_MIN_TIME, - COMMIT_TRANSACTION_RND_TIME - ) + COMMIT_TRANSACTION_RND_TIME, + ), ); spannerMock.setExecutionTime( spannerMock.rollback, SimulatedExecutionTime.ofMinAndRandomExecTime( NETWORK_LATENCY_TIME + ROLLBACK_TRANSACTION_MIN_TIME, - ROLLBACK_TRANSACTION_RND_TIME - ) + ROLLBACK_TRANSACTION_RND_TIME, + ), ); spannerMock.setExecutionTime( spannerMock.executeStreamingSql, SimulatedExecutionTime.ofMinAndRandomExecTime( NETWORK_LATENCY_TIME + EXECUTE_STREAMING_SQL_MIN_TIME, - EXECUTE_STREAMING_SQL_RND_TIME - ) + EXECUTE_STREAMING_SQL_RND_TIME, + ), ); spannerMock.setExecutionTime( spannerMock.executeSql, SimulatedExecutionTime.ofMinAndRandomExecTime( NETWORK_LATENCY_TIME + EXECUTE_SQL_MIN_TIME, - EXECUTE_SQL_RND_TIME - ) + EXECUTE_SQL_RND_TIME, + ), ); spannerMock.putStatementResult( selectSql, - mock.StatementResult.resultSet(createSelect1ResultSet()) + mock.StatementResult.resultSet(createSelect1ResultSet()), ); spannerMock.putStatementResult( updateSql, - mock.StatementResult.updateCount(1) + mock.StatementResult.updateCount(1), ); spanner = new Spanner({ @@ -309,7 +310,7 @@ async function burstRead() { database, NUM_BURST_READ, RND_WAIT_TIME_BETWEEN_REQUESTS, - HOLD_SESSION_TIME + HOLD_SESSION_TIME, ); await Promise.all(promises); if (incStep) { @@ -344,7 +345,7 @@ async function burstWrite() { const promises = queueWriteOperations( database, NUM_BURST_WRITE, - RND_WAIT_TIME_BETWEEN_REQUESTS + RND_WAIT_TIME_BETWEEN_REQUESTS, ); await Promise.all(promises); if (incStep) { @@ -382,12 +383,12 @@ async function burstReadAndWrite() { database, NUM_BURST_READ, RND_WAIT_TIME_BETWEEN_REQUESTS, - HOLD_SESSION_TIME + HOLD_SESSION_TIME, ); const writePromises = queueWriteOperations( database, NUM_BURST_WRITE, - RND_WAIT_TIME_BETWEEN_REQUESTS + RND_WAIT_TIME_BETWEEN_REQUESTS, ); await Promise.all(readPromises.concat(writePromises)); if (incStep) { @@ -422,7 +423,7 @@ async function multipleWriteBursts() { const writePromises = queueWriteOperations( database, NUM_BURST_WRITE, - RND_WAIT_TIME_BETWEEN_REQUESTS + RND_WAIT_TIME_BETWEEN_REQUESTS, ); await Promise.all(writePromises); await new Promise(resolve => setTimeout(resolve, WAIT_BETWEEN_BURSTS)); @@ -455,12 +456,12 @@ async function oneReadTransactionPerSecond() { database, NUM_TRANSACTIONS, RND_WAIT_TIME_BETWEEN_REQUESTS, - 0 + 0, ); readPromises.forEach(p => p.then(t => { console.log(`Time taken: ${t}ms`); - }) + }), ); const t = await Promise.all(readPromises); const max = Math.max(...t); @@ -494,12 +495,12 @@ async function oneWriteTransactionPerSecond() { const writePromises = queueWriteOperations( database, NUM_TRANSACTIONS, - RND_WAIT_TIME_BETWEEN_REQUESTS + RND_WAIT_TIME_BETWEEN_REQUESTS, ); writePromises.forEach(p => p.then(t => { console.log(`Time taken: ${t}ms`); - }) + }), ); const t = await Promise.all(writePromises); const max = Math.max(...t); @@ -533,22 +534,22 @@ async function oneReadAndOneWriteTransactionPerSecond() { database, NUM_READ_TRANSACTIONS, RND_WAIT_TIME_BETWEEN_REQUESTS, - 0 + 0, ); const writePromises = queueWriteOperations( database, NUM_WRITE_TRANSACTIONS, - RND_WAIT_TIME_BETWEEN_REQUESTS + RND_WAIT_TIME_BETWEEN_REQUESTS, ); readPromises.forEach(p => p.then(t => { console.log(`Read tx: ${t}ms`); - }) + }), ); writePromises.forEach(p => p.then(t => { console.log(`Write tx: ${t}ms`); - }) + }), ); const t = await Promise.all(readPromises.concat(writePromises)); const max = Math.max(...t); @@ -621,7 +622,7 @@ function queueReadOperations( database: Database, numRequests: number, waitBetweenRequests: number, - holdSessionTime: number + holdSessionTime: number, ): Promise[] { const promises: Promise[] = []; for (let run = 0; run < numRequests; run++) { @@ -644,7 +645,7 @@ function queueReadOperations( resolve(performance.now() - t1); }); }, Math.random() * waitBetweenRequests); - }) + }), ); } return promises; @@ -663,23 +664,31 @@ function queueReadOperations( function queueWriteOperations( database: Database, numRequests: number, - waitBetweenRequests: number + waitBetweenRequests: number, ): Promise[] { const promises: Promise[] = []; for (let run = 0; run < numRequests; run++) { promises.unshift( - new Promise(resolve => { + new Promise((resolve, rejects) => { setTimeout(() => { const t1 = performance.now(); database.runTransaction((err, tx) => { tx! .runUpdate(updateSql) .then(() => - tx!.commit().then(() => resolve(performance.now() - t1)) - ); + tx! + .commit() + .then(() => resolve(performance.now() - t1)) + .catch(err => { + rejects(err); + }), + ) + .catch(err => { + rejects(err); + }); }); }, Math.random() * waitBetweenRequests); - }) + }), ); } return promises; diff --git a/benchmark/ycsb.js b/benchmark/ycsb.js index e414cd280..055d56528 100644 --- a/benchmark/ycsb.js +++ b/benchmark/ycsb.js @@ -50,7 +50,7 @@ require('yargs') describe: 'The number of buckets in output', }, }, - runWorkloads + runWorkloads, ).argv; function formatOptions(argv) { @@ -80,7 +80,7 @@ function printMetrics(workload) { console.log( dedent`[OVERALL], RunTime(ms), ${workload.duration} - [OVERALL], Throughput(ops/sec), ${totalOps / (workload.duration / 1000)}` + [OVERALL], Throughput(ops/sec), ${totalOps / (workload.duration / 1000)}`, ); workload.operations.forEach(operation => { @@ -97,7 +97,7 @@ function printMetrics(workload) { ${opName}, 95thPercentileLatency(us), ${stats.percentile(lats, 0.95)} ${opName}, 99thPercentileLatency(us), ${stats.percentile(lats, 0.99)} ${opName}, 99.9thPercentileLatency(us), ${stats.percentile(lats, 0.999)} - ${opName}, Return=OK, ${ops}` + ${opName}, Return=OK, ${ops}`, ); for (let i = 0; i < numBucket; i++) { @@ -135,6 +135,6 @@ function runWorkloads(argv) { return Promise.all( Array(options.get('num_worker') || 1) .fill(0) - .map(() => runWorkload(database, options)) + .map(() => runWorkload(database, options)), ); } diff --git a/bin/benchwrapper.js b/bin/benchwrapper.js index 7f08d0008..231c7634f 100644 --- a/bin/benchwrapper.js +++ b/bin/benchwrapper.js @@ -38,7 +38,7 @@ const spannerBenchWrapper = protoDescriptor.spanner_bench; // The benchwrapper should only be executed against an emulator. if (!process.env.SPANNER_EMULATOR_HOST) { throw new Error( - 'This benchmarking server only works when connected to an emulator. Please set SPANNER_EMULATOR_HOST.' + 'This benchmarking server only works when connected to an emulator. Please set SPANNER_EMULATOR_HOST.', ); } // This will connect the Spanner client to an emulator, as SPANNER_EMULATOR_HOST has been set. @@ -110,7 +110,7 @@ function Update(call, callback) { .withCode(err.code) .withDetails(err.details || err.message) .withMetadata(err.metadata) - .build() + .build(), ); transaction.rollback().then(() => {}); return; @@ -145,5 +145,5 @@ server.bindAsync( return; } server.start(); - } + }, ); diff --git a/bin/benchwrapper_test_client.js b/bin/benchwrapper_test_client.js index ab8c2ecf0..a7fc5d468 100644 --- a/bin/benchwrapper_test_client.js +++ b/bin/benchwrapper_test_client.js @@ -39,7 +39,7 @@ const spannerBenchWrapper = protoDescriptor.spanner_bench; console.log(`connecting to localhost:${argv.port}`); const client = new spannerBenchWrapper.SpannerBenchWrapper( `localhost:${argv.port}`, - grpc.credentials.createInsecure() + grpc.credentials.createInsecure(), ); const readReq = { Query: 'SELECT 1 AS COL1 UNION ALL SELECT 2 AS COL1', diff --git a/observability-test/batch-transaction.ts b/observability-test/batch-transaction.ts index 89cd49b17..47c124d39 100644 --- a/observability-test/batch-transaction.ts +++ b/observability-test/batch-transaction.ts @@ -116,8 +116,8 @@ describe('BatchTransaction', () => { const provider = new NodeTracerProvider({ sampler: sampler, exporter: traceExporter, + spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - provider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); afterEach(() => { traceExporter.reset(); @@ -176,7 +176,7 @@ describe('BatchTransaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that createPartitions_ is a child span of createQueryPartitions. @@ -184,29 +184,29 @@ describe('BatchTransaction', () => { const spanCreateQueryPartitions = spans[1]; assert.ok( spanCreateQueryPartitions.spanContext().traceId, - 'Expected that createQueryPartitions has a defined traceId' + 'Expected that createQueryPartitions has a defined traceId', ); assert.ok( spanCreatePartitions_.spanContext().traceId, - 'Expected that createPartitions_ has a defined traceId' + 'Expected that createPartitions_ has a defined traceId', ); assert.deepStrictEqual( spanCreatePartitions_.spanContext().traceId, spanCreateQueryPartitions.spanContext().traceId, - 'Expected that both spans share a traceId' + 'Expected that both spans share a traceId', ); assert.ok( spanCreateQueryPartitions.spanContext().spanId, - 'Expected that createQueryPartitions has a defined spanId' + 'Expected that createQueryPartitions has a defined spanId', ); assert.ok( spanCreatePartitions_.spanContext().spanId, - 'Expected that createPartitions_ has a defined spanId' + 'Expected that createPartitions_ has a defined spanId', ); assert.deepStrictEqual( - spanCreatePartitions_.parentSpanId, + spanCreatePartitions_.parentSpanContext.spanId, spanCreateQueryPartitions.spanContext().spanId, - 'Expected that createQueryPartitions is the parent to createPartitions_' + 'Expected that createQueryPartitions is the parent to createPartitions_', ); done(); }); @@ -239,7 +239,7 @@ describe('BatchTransaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); done(); }); diff --git a/observability-test/database.ts b/observability-test/database.ts index 9ae9ccd85..7473d3d28 100644 --- a/observability-test/database.ts +++ b/observability-test/database.ts @@ -101,12 +101,12 @@ class FakeSession { } partitionedDml(): FakeTransaction { return new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.PartitionedDml + {} as google.spanner.v1.TransactionOptions.PartitionedDml, ); } snapshot(): FakeTransaction { return new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadOnly + {} as google.spanner.v1.TransactionOptions.ReadOnly, ); } } @@ -151,7 +151,7 @@ class FakeTransaction extends EventEmitter { setReadWriteTransactionOptions(options: RunTransactionOptions) {} commit( options?: CommitOptions, - callback?: CommitCallback + callback?: CommitCallback, ): void | Promise { if (callback) { callback(null, {commitTimestamp: {seconds: 1, nanos: 0}}); @@ -249,7 +249,7 @@ describe('Database', () => { }); const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - INSTANCE.formattedName_ + '/databases/' + NAME + INSTANCE.formattedName_ + '/databases/' + NAME, ); beforeEach(() => { @@ -280,8 +280,8 @@ describe('Database', () => { const provider = new NodeTracerProvider({ sampler: sampler, exporter: traceExporter, + spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - provider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); afterEach(() => { sandbox.restore(); @@ -317,7 +317,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span's status code is UNSET. @@ -325,7 +325,7 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Expected an OK span status' + 'Expected an OK span status', ); // We don't expect events. @@ -333,7 +333,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -367,7 +367,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -375,12 +375,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'our error', firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); // We don't expect events. @@ -388,7 +388,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -422,7 +422,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span didn't encounter an error. @@ -430,12 +430,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected span status code' + 'Unexpected span status code', ); assert.strictEqual( undefined, firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); // We don't expect events. @@ -443,7 +443,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -475,7 +475,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -483,12 +483,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'batchCreateSessions.error', firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); // We don't expect events. @@ -496,7 +496,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -516,7 +516,7 @@ describe('Database', () => { fakeSessionFactory = database.sessionFactory_; fakeSession = new FakeSession(); fakeSnapshot = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadOnly + {} as google.spanner.v1.TransactionOptions.ReadOnly, ); beginSnapshotStub = ( @@ -557,7 +557,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -565,12 +565,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'our snapshot error', firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); // We don't expect events. @@ -578,7 +578,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -593,10 +593,10 @@ describe('Database', () => { const fakeSession2 = new FakeSession(); const fakeSnapshot2 = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadOnly + {} as google.spanner.v1.TransactionOptions.ReadOnly, ); (sandbox.stub(fakeSnapshot2, 'begin') as sinon.SinonStub).callsFake( - callback => callback(null) + callback => callback(null), ); sandbox.stub(fakeSession2, 'snapshot').returns(fakeSnapshot2); @@ -643,7 +643,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the first span actually produced an error that was recorded. @@ -651,48 +651,48 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, parentSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'Session not found', parentSpan.status.message.toString(), - 'Mismatched span status message' + 'Mismatched span status message', ); // Ensure that the second span is a child of the first span. const secondRetrySpan = spans[1]; assert.ok( parentSpan.spanContext().traceId, - 'Expected that the initial parent span has a defined traceId' + 'Expected that the initial parent span has a defined traceId', ); assert.ok( secondRetrySpan.spanContext().traceId, - 'Expected that the second retry span has a defined traceId' + 'Expected that the second retry span has a defined traceId', ); assert.deepStrictEqual( parentSpan.spanContext().traceId, secondRetrySpan.spanContext().traceId, - 'Expected that both spans share a traceId' + 'Expected that both spans share a traceId', ); assert.ok( parentSpan.spanContext().spanId, - 'Expected that the initial parent span has a defined spanId' + 'Expected that the initial parent span has a defined spanId', ); assert.ok( secondRetrySpan.spanContext().spanId, - 'Expected that the second retry span has a defined spanId' + 'Expected that the second retry span has a defined spanId', ); assert.deepStrictEqual( - secondRetrySpan.parentSpanId, + secondRetrySpan.parentSpanContext.spanId, parentSpan.spanContext().spanId, - 'Expected that secondRetrySpan is the child to parentSpan' + 'Expected that secondRetrySpan is the child to parentSpan', ); const expectedEventNames = ['No session available']; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -745,7 +745,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -753,12 +753,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'with session error', firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); // We don't expect events. @@ -766,7 +766,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -817,7 +817,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -825,19 +825,19 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected span status' + 'Unexpected span status', ); assert.strictEqual( undefined, firstSpan.status.message, - `No span status message expected\n\tGot: undefined\n\tWant: ${firstSpan.status.message}` + `No span status message expected\n\tGot: undefined\n\tWant: ${firstSpan.status.message}`, ); const expectedEventNames = ['Using Session']; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -883,7 +883,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -891,12 +891,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'our createBatchTransaction error', firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); // We don't expect events. @@ -904,7 +904,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -923,7 +923,7 @@ describe('Database', () => { fakePool = database.pool_; fakeSession = new FakeSession(); fakeTransaction = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadWrite + {} as google.spanner.v1.TransactionOptions.ReadWrite, ); getSessionStub = ( @@ -957,12 +957,11 @@ describe('Database', () => { actualEventNames.push(event.name); }); }); - const expectedSpanNames = ['CloudSpanner.Database.getTransaction']; assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // In the event of a sessionPool error, we should not have events. @@ -970,7 +969,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `event names mismatch:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `event names mismatch:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -978,19 +977,19 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'pool error', firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); assert.strictEqual( spans[0].attributes['transaction.tag'], - 'transaction-tag' + 'transaction-tag', ); done(); - } + }, ); }); @@ -1017,7 +1016,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that we have specific events. @@ -1025,7 +1024,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `event names mismatch:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `event names mismatch:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1033,12 +1032,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - `Unexpected span status code: ${firstSpan.status.code}` + `Unexpected span status code: ${firstSpan.status.code}`, ); assert.strictEqual( undefined, firstSpan.status.message, - `Unexpected status message: ${firstSpan.status.message}` + `Unexpected status message: ${firstSpan.status.message}`, ); done(); @@ -1056,7 +1055,7 @@ describe('Database', () => { const SESSION = new FakeSession(); const RESPONSE = {commitTimestamp: {seconds: 1, nanos: 0}}; const TRANSACTION = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadWrite + {} as google.spanner.v1.TransactionOptions.ReadWrite, ); let sessionFactory: FakeSessionFactory; @@ -1066,7 +1065,7 @@ describe('Database', () => { (sandbox.stub(sessionFactory, 'getSession') as sinon.SinonStub).callsFake( callback => { callback(null, SESSION, TRANSACTION); - } + }, ); sandbox.stub(sessionFactory, 'isMultiplexedEnabled').returns(false); }); @@ -1075,7 +1074,7 @@ describe('Database', () => { const fakeErr = new Error('getting session error'); (sessionFactory.getSession as sinon.SinonStub).callsFake(callback => - callback(fakeErr, null, null) + callback(fakeErr, null, null), ); database.writeAtLeastOnce(mutations, err => { @@ -1098,7 +1097,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1106,12 +1105,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'getting session error', firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); // We don't expect events. @@ -1119,7 +1118,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -1133,7 +1132,7 @@ describe('Database', () => { assert.ifError(err); assert.deepStrictEqual( response.commitTimestamp, - RESPONSE.commitTimestamp + RESPONSE.commitTimestamp, ); const spans = traceExporter.getFinishedSpans(); @@ -1153,7 +1152,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1161,19 +1160,19 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected span status code' + 'Unexpected span status code', ); assert.strictEqual( undefined, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); const expectedEventNames = ['Using Session']; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -1211,7 +1210,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1219,14 +1218,14 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); const errorMessage = firstSpan.status.message; assert.ok( errorMessage.includes( - "Cannot read properties of null (reading 'proto')" - ) || errorMessage.includes("Cannot read property 'proto' of null") + "Cannot read properties of null (reading 'proto')", + ) || errorMessage.includes("Cannot read property 'proto' of null"), ); // We expect an exception to have been caught as well as a Session event. @@ -1234,7 +1233,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -1316,7 +1315,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1324,12 +1323,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.deepStrictEqual( firstSpan.status.message, - sessionNotFoundError.message + sessionNotFoundError.message, ); // The last span should not have an error status. @@ -1337,7 +1336,7 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.UNSET, lastSpan.status.code, - 'Unexpected span status' + 'Unexpected span status', ); assert.deepStrictEqual(lastSpan.status.message, undefined); @@ -1350,7 +1349,7 @@ describe('Database', () => { assert.deepStrictEqual(actualEventNames, expectedEventNames); assert.strictEqual( spans[0].attributes['transaction.tag'], - 'batch-write-tag' + 'batch-write-tag', ); done(); }); @@ -1386,7 +1385,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1394,7 +1393,7 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.deepStrictEqual(firstSpan.status.message, fakeError.message); @@ -1431,7 +1430,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1439,13 +1438,13 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected span status code' + 'Unexpected span status code', ); assert.strictEqual( undefined, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); const expectedEventNames = ['Using Session']; @@ -1462,7 +1461,7 @@ describe('Database', () => { describe('runTransaction', () => { const SESSION = new FakeSession(); const TRANSACTION = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadWrite + {} as google.spanner.v1.TransactionOptions.ReadWrite, ); let pool: FakeSessionPool; @@ -1473,7 +1472,7 @@ describe('Database', () => { (sandbox.stub(pool, 'getSession') as sinon.SinonStub).callsFake( callback => { callback(null, SESSION, TRANSACTION); - } + }, ); }); @@ -1481,7 +1480,7 @@ describe('Database', () => { const fakeErr = new Error('getting a session'); (pool.getSession as sinon.SinonStub).callsFake(callback => - callback(fakeErr) + callback(fakeErr), ); database.runTransaction( @@ -1506,7 +1505,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1514,12 +1513,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'getting a session', firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); // We don't expect events. @@ -1527,15 +1526,15 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); assert.strictEqual( spans[0].attributes['transaction.tag'], - 'transaction-tag' + 'transaction-tag', ); done(); - } + }, ); }); @@ -1564,7 +1563,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1572,12 +1571,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'internal rejects err', firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); // We don't expect events. @@ -1585,7 +1584,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -1596,7 +1595,7 @@ describe('Database', () => { describe('runTransactionAsync', () => { const SESSION = new FakeSession(); const TRANSACTION = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadWrite + {} as google.spanner.v1.TransactionOptions.ReadWrite, ); let pool: FakeSessionPool; @@ -1606,7 +1605,7 @@ describe('Database', () => { (sandbox.stub(pool, 'getSession') as sinon.SinonStub).callsFake( callback => { callback(null, SESSION, TRANSACTION); - } + }, ); }); @@ -1623,7 +1622,7 @@ describe('Database', () => { const result = await txn.run('SELECT 1'); await txn.commit(); return result; - } + }, ); assert.strictEqual(value, fakeValue); @@ -1646,7 +1645,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1654,23 +1653,23 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected span status' + 'Unexpected span status', ); assert.strictEqual( undefined, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); const expectedEventNames = ['Using Session']; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); assert.strictEqual( spans[0].attributes['transaction.tag'], - 'transaction-tag' + 'transaction-tag', ); }); @@ -1680,7 +1679,7 @@ describe('Database', () => { .stub(FakeAsyncTransactionRunner.prototype, 'run') .throws(ourException); - assert.rejects(async () => { + await assert.rejects(async () => { await database.runTransactionAsync(async txn => { const result = await txn.run('SELECT 1'); await txn.commit(); @@ -1706,7 +1705,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1714,19 +1713,19 @@ describe('Database', () => { assert.strictEqual( firstSpan.status.code, SpanStatusCode.ERROR, - 'Unexpected span status' + 'Unexpected span status', ); assert.strictEqual( firstSpan.status.message, ourException.message, - 'Unexpected span status message' + 'Unexpected span status message', ); const expectedEventNames = ['Using Session', 'exception']; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); }); }); @@ -1753,10 +1752,10 @@ describe('Database', () => { fakeSession = new FakeSession(); fakeSession2 = new FakeSession(); fakeSnapshot = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadOnly + {} as google.spanner.v1.TransactionOptions.ReadOnly, ); fakeSnapshot2 = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadOnly + {} as google.spanner.v1.TransactionOptions.ReadOnly, ); fakeStream = through.obj(); fakeStream2 = through.obj(); @@ -1805,7 +1804,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1813,12 +1812,12 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'getSession error', firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); // We don't expect events. @@ -1826,7 +1825,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); assert.strictEqual(spans[0].attributes['request.tag'], 'request-tag'); @@ -1859,7 +1858,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1867,19 +1866,19 @@ describe('Database', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.strictEqual( 'propagation err', firstSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); const expectedEventNames = ['Using Session']; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -1931,7 +1930,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the span actually produced an error that was recorded. @@ -1939,12 +1938,12 @@ describe('Database', () => { assert.deepStrictEqual( SpanStatusCode.ERROR, lastSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.deepStrictEqual( 'Session not found', lastSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); // Ensure that the final span that got retries did not error. @@ -1952,12 +1951,12 @@ describe('Database', () => { assert.deepStrictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected span status code' + 'Unexpected span status code', ); assert.deepStrictEqual( undefined, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); const expectedEventNames = [ @@ -1968,7 +1967,7 @@ describe('Database', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -1993,7 +1992,7 @@ describe('Database', () => { let fakeSessionFactory: FakeSessionFactory; let fakeSession: FakeSession; let fakePartitionedDml = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.PartitionedDml + {} as google.spanner.v1.TransactionOptions.PartitionedDml, ); let getSessionStub; @@ -2003,13 +2002,13 @@ describe('Database', () => { fakeSessionFactory = database.sessionFactory_; fakeSession = new FakeSession(); fakePartitionedDml = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.PartitionedDml + {} as google.spanner.v1.TransactionOptions.PartitionedDml, ); getSessionStub = ( sandbox.stub( fakeSessionFactory, - 'getSessionForPartitionedOps' + 'getSessionForPartitionedOps', ) as sinon.SinonStub ).callsFake(callback => { callback(null, fakeSession); @@ -2073,7 +2072,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the first span actually produced an error that was recorded. @@ -2081,19 +2080,19 @@ describe('Database', () => { assert.deepStrictEqual( SpanStatusCode.ERROR, parentSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.deepStrictEqual( fakeError.message, parentSpan.status.message.toString(), - 'Mismatched span status message' + 'Mismatched span status message', ); const expectedEventNames = []; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); assert.strictEqual(spans[0].attributes['request.tag'], 'request-tag'); @@ -2126,7 +2125,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the first span actually produced an error that was recorded. @@ -2134,19 +2133,19 @@ describe('Database', () => { assert.deepStrictEqual( SpanStatusCode.ERROR, parentSpan.status.code, - 'Expected an ERROR span status' + 'Expected an ERROR span status', ); assert.deepStrictEqual( fakeError.message, parentSpan.status.message.toString(), - 'Mismatched span status message' + 'Mismatched span status message', ); const expectedEventNames = []; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); }); @@ -2169,7 +2168,7 @@ describe('Database', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Ensure that the first span actually produced an error that was recorded. @@ -2177,19 +2176,19 @@ describe('Database', () => { assert.deepStrictEqual( SpanStatusCode.UNSET, parentSpan.status.code, - 'Unexpected span status' + 'Unexpected span status', ); assert.deepStrictEqual( undefined, parentSpan.status.message, - 'Mismatched span status message' + 'Mismatched span status message', ); const expectedEventNames = []; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); }); diff --git a/observability-test/helper.ts b/observability-test/helper.ts index 591171666..46006ee1b 100644 --- a/observability-test/helper.ts +++ b/observability-test/helper.ts @@ -61,7 +61,7 @@ export function generateWithAllSpansHaveDBName(dbName: String): Function { assert.deepStrictEqual( span.attributes[SEMATTRS_DB_NAME], dbName, - `Span ${span.name} has mismatched DB_NAME` + `Span ${span.name} has mismatched DB_NAME`, ); }); }; @@ -70,7 +70,7 @@ export function generateWithAllSpansHaveDBName(dbName: String): Function { export async function verifySpansAndEvents( traceExporter, expectedSpans, - expectedEvents + expectedEvents, ) { await traceExporter.forceFlush(); const spans = traceExporter.getFinishedSpans(); @@ -85,11 +85,11 @@ export async function verifySpansAndEvents( assert.deepStrictEqual( actualSpanNames, expectedSpans, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpans}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpans}`, ); assert.deepStrictEqual( actualEventNames, expectedEvents, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEvents}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEvents}`, ); } diff --git a/observability-test/observability.ts b/observability-test/observability.ts index 2a9e87bd8..d40d7b98e 100644 --- a/observability-test/observability.ts +++ b/observability-test/observability.ts @@ -56,8 +56,8 @@ describe('startTrace', () => { const globalProvider = new NodeTracerProvider({ sampler: sampler, exporter: globalExporter, + spanProcessors: [new SimpleSpanProcessor(globalExporter)], }); - globalProvider.addSpanProcessor(new SimpleSpanProcessor(globalExporter)); globalProvider.register(); const contextManager = new AsyncHooksContextManager(); @@ -80,7 +80,7 @@ describe('startTrace', () => { assert.equal( span.name, SPAN_NAMESPACE_PREFIX + '.mySpan', - 'name mismatch' + 'name mismatch', ); }); }); @@ -90,10 +90,8 @@ describe('startTrace', () => { const overridingProvider = new NodeTracerProvider({ sampler: sampler, exporter: overridingExporter, + spanProcessors: [new SimpleSpanProcessor(overridingExporter)], }); - overridingProvider.addSpanProcessor( - new SimpleSpanProcessor(overridingExporter) - ); startTrace( 'aSpan', @@ -106,19 +104,19 @@ describe('startTrace', () => { assert.strictEqual( gotSpansFromGlobal.length, 0, - 'Expected no spans from the global tracerProvider and exporter but got ${gotSpansFromGlobal.length}' + 'Expected no spans from the global tracerProvider and exporter but got ${gotSpansFromGlobal.length}', ); const gotSpansFromCurrent = overridingExporter.getFinishedSpans(); assert.strictEqual( gotSpansFromCurrent.length, 1, - 'Expected exactly 1 span but got ${gotSpansFromCurrent.length}' + 'Expected exactly 1 span but got ${gotSpansFromCurrent.length}', ); overridingExporter.forceFlush(); await overridingProvider.shutdown(); - } + }, ); }); @@ -133,43 +131,43 @@ describe('startTrace', () => { assert.equal( span.attributes[ATTR_OTEL_SCOPE_NAME], TRACER_NAME, - 'Missing OTEL_SCOPE_NAME attribute' + 'Missing OTEL_SCOPE_NAME attribute', ); assert.equal( span.attributes[ATTR_OTEL_SCOPE_VERSION], TRACER_VERSION, - 'Missing OTEL_SCOPE_VERSION attribute' + 'Missing OTEL_SCOPE_VERSION attribute', ); assert.equal( span.attributes['gcp.client.service'], 'spanner', - 'Missing gcp.client.service attribute' + 'Missing gcp.client.service attribute', ); assert.equal( span.attributes['gcp.client.version'], TRACER_VERSION, - 'Missing gcp.client.version attribute' + 'Missing gcp.client.version attribute', ); assert.equal( span.attributes['gcp.client.repo'], 'googleapis/nodejs-spanner', - 'Missing gcp.client.repo attribute' + 'Missing gcp.client.repo attribute', ); assert.equal( span.attributes[SEMATTRS_DB_SQL_TABLE], 'table', - 'Missing DB_SQL_TABLE attribute' + 'Missing DB_SQL_TABLE attribute', ); assert.equal( span.attributes[SEMATTRS_DB_NAME], 'db', - 'Missing DB_NAME attribute' + 'Missing DB_NAME attribute', ); }); }); @@ -180,7 +178,7 @@ describe('startTrace', () => { assert.equal( span.attributes[SEMATTRS_DB_STATEMENT], undefined, - 'Unexpected DB_STATEMENT attribute' + 'Unexpected DB_STATEMENT attribute', ); }); }); @@ -195,7 +193,7 @@ describe('startTrace', () => { assert.equal( span.attributes[SEMATTRS_DB_STATEMENT], 'SELECT CURRENT_TIMESTAMP()', - 'Mismatched DB_STATEMENT attribute' + 'Mismatched DB_STATEMENT attribute', ); }); }); @@ -210,7 +208,7 @@ describe('startTrace', () => { assert.equal( span.attributes[SEMATTRS_DB_STATEMENT], undefined, - 'Mismatched DB_STATEMENT attribute' + 'Mismatched DB_STATEMENT attribute', ); }); }); @@ -226,7 +224,7 @@ describe('startTrace', () => { assert.equal( span.attributes[SEMATTRS_DB_STATEMENT], 'SELECT 1=1', - 'Mismatched DB_STATEMENT attribute' + 'Mismatched DB_STATEMENT attribute', ); }); }); @@ -242,7 +240,7 @@ describe('startTrace', () => { assert.equal( span.attributes[SEMATTRS_DB_STATEMENT], req.sql, - 'Mismatched DB_STATEMENT attribute' + 'Mismatched DB_STATEMENT attribute', ); }); }); @@ -252,10 +250,8 @@ describe('startTrace', () => { const overridingProvider = new NodeTracerProvider({ sampler: new AlwaysOffSampler(), exporter: overridingExporter, + spanProcessors: [new SimpleSpanProcessor(overridingExporter)], }); - overridingProvider.addSpanProcessor( - new SimpleSpanProcessor(overridingExporter) - ); overridingProvider.register(); startTrace( @@ -269,19 +265,19 @@ describe('startTrace', () => { assert.strictEqual( gotSpansFromGlobal.length, 0, - 'Expected no spans but got ${gotSpansFromGlobal.length}' + 'Expected no spans but got ${gotSpansFromGlobal.length}', ); const gotSpansFromCurrent = overridingExporter.getFinishedSpans(); assert.strictEqual( gotSpansFromCurrent.length, 0, - 'Expected no spans but got ${gotSpansFromCurrent.length}' + 'Expected no spans but got ${gotSpansFromCurrent.length}', ); overridingExporter.forceFlush(); await overridingProvider.shutdown(); - } + }, ); }); }); @@ -295,8 +291,8 @@ describe('getActiveOrNoopSpan', () => { globalProvider = new NodeTracerProvider({ sampler: new AlwaysOffSampler(), exporter: exporter, + spanProcessors: [new SimpleSpanProcessor(exporter)], }); - globalProvider.addSpanProcessor(new SimpleSpanProcessor(exporter)); globalProvider.register(); }); @@ -321,32 +317,32 @@ describe('getActiveOrNoopSpan', () => { assert.strictEqual( span.name, SPAN_NAMESPACE_PREFIX + '.aSpan', - 'names must match' + 'names must match', ); assert.strictEqual( span.name, activeSpan.name, - `names must match between activeSpan or current one\n\tGot: ${span.name}\n\tWant: ${activeSpan.name}` + `names must match between activeSpan or current one\n\tGot: ${span.name}\n\tWant: ${activeSpan.name}`, ); assert.strictEqual( span.startTime, activeSpan.startTime, - 'startTimes must match' + 'startTimes must match', ); assert.ok( span.duration, undefined, - 'the unended span must have an undefined duration' + 'the unended span must have an undefined duration', ); assert.ok( activeSpan.duration, undefined, - 'the unended span must have an undefined duration, got ${activeSpan.duration}' + 'the unended span must have an undefined duration, got ${activeSpan.duration}', ); assert.strictEqual( span.duration, activeSpan.duration, - 'durations must match' + 'durations must match', ); span.end(); }); @@ -358,8 +354,8 @@ describe('setError', () => { const provider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), exporter: exporter, + spanProcessors: [new SimpleSpanProcessor(exporter)], }); - provider.addSpanProcessor(new SimpleSpanProcessor(exporter)); provider.register(); const contextManager = new AsyncHooksContextManager(); @@ -384,7 +380,7 @@ describe('setError', () => { assert.strictEqual( status1, status2, - 'setting null error should have no effect' + 'setting null error should have no effect', ); res = setSpanError(null, null); @@ -414,8 +410,8 @@ describe('setErrorAndException', () => { const provider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), exporter: exporter, + spanProcessors: [new SimpleSpanProcessor(exporter)], }); - provider.addSpanProcessor(new SimpleSpanProcessor(exporter)); provider.register(); const contextManager = new AsyncHooksContextManager(); @@ -439,7 +435,7 @@ describe('setErrorAndException', () => { assert.strictEqual( status1, status2, - 'setting null error should have no effect' + 'setting null error should have no effect', ); res = setSpanErrorAndException(null, null); @@ -464,7 +460,7 @@ describe('setErrorAndException', () => { assert.strictEqual( expSpan.events[0].attributes[SEMATTRS_EXCEPTION_MESSAGE], 'this one', - 'the exception must have been recorded' + 'the exception must have been recorded', ); }); }); diff --git a/observability-test/session-pool.ts b/observability-test/session-pool.ts index f60553dc0..8864e3993 100644 --- a/observability-test/session-pool.ts +++ b/observability-test/session-pool.ts @@ -95,8 +95,8 @@ describe('SessionPool', () => { const provider = new NodeTracerProvider({ sampler: sampler, exporter: traceExporter, + spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - provider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); beforeEach(() => { DATABASE.session = createSession; @@ -115,7 +115,7 @@ describe('SessionPool', () => { sandbox.stub(DATABASE, 'batchCreateSessions').throws(ourException); sandbox.stub(sessionPool, 'release'); - assert.rejects(async () => { + await assert.rejects(async () => { await sessionPool._createSessions(OPTIONS); }, ourException); @@ -135,7 +135,7 @@ describe('SessionPool', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = [ @@ -147,19 +147,19 @@ describe('SessionPool', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); const firstSpan = spans[0]; assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Unexpected an span status code' + 'Unexpected an span status code', ); assert.strictEqual( ourException.message, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); }); @@ -188,7 +188,7 @@ describe('SessionPool', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = [ @@ -199,19 +199,19 @@ describe('SessionPool', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); const firstSpan = spans[0]; assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected an span status code' + 'Unexpected an span status code', ); assert.strictEqual( undefined, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); }); }); diff --git a/observability-test/spanner.ts b/observability-test/spanner.ts index 5f74ca036..113484623 100644 --- a/observability-test/spanner.ts +++ b/observability-test/spanner.ts @@ -78,7 +78,7 @@ interface setupResults { } async function setup( - observabilityOptions?: typeof ObservabilityOptions + observabilityOptions?: typeof ObservabilityOptions, ): Promise { const server = new grpc.Server(); @@ -96,17 +96,17 @@ async function setup( } else { resolve(assignedPort); } - } + }, ); }); spannerMock.putStatementResult( selectSql, - mock.StatementResult.resultSet(createSelect1ResultSet()) + mock.StatementResult.resultSet(createSelect1ResultSet()), ); spannerMock.putStatementResult( updateSql, - mock.StatementResult.updateCount(1) + mock.StatementResult.updateCount(1), ); const spanner = new Spanner({ @@ -136,8 +136,8 @@ describe('EndToEnd', async () => { const tracerProvider = new NodeTracerProvider({ sampler: sampler, exporter: traceExporter, + spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - tracerProvider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); const setupResult = await setup({ tracerProvider: tracerProvider, @@ -181,7 +181,7 @@ describe('EndToEnd', async () => { await verifySpansAndEvents( traceExporter, expectedSpanNames, - expectedEventNames + expectedEventNames, ); }); @@ -207,7 +207,7 @@ describe('EndToEnd', async () => { await verifySpansAndEvents( traceExporter, expectedSpanNames, - expectedEventNames + expectedEventNames, ); done(); }); @@ -219,14 +219,14 @@ describe('EndToEnd', async () => { assert.ifError(err); assert.ok(transaction); transaction!.end(); - transaction!.commit(); + void transaction!.commit(); const expectedSpanNames = ['CloudSpanner.Database.getTransaction']; const expectedEventNames = [...cacheSessionEvents, 'Using Session']; await verifySpansAndEvents( traceExporter, expectedSpanNames, - expectedEventNames + expectedEventNames, ); done(); }); @@ -250,7 +250,7 @@ describe('EndToEnd', async () => { await verifySpansAndEvents( traceExporter, expectedSpanNames, - expectedEventNames + expectedEventNames, ); done(); @@ -272,7 +272,7 @@ describe('EndToEnd', async () => { await verifySpansAndEvents( traceExporter, expectedSpanNames, - expectedEventNames + expectedEventNames, ); }); @@ -299,7 +299,7 @@ describe('EndToEnd', async () => { await verifySpansAndEvents( traceExporter, expectedSpanNames, - expectedEventNames + expectedEventNames, ); done(); }); @@ -324,7 +324,7 @@ describe('EndToEnd', async () => { await verifySpansAndEvents( traceExporter, expectedSpanNames, - expectedEventNames + expectedEventNames, ); }); @@ -346,7 +346,7 @@ describe('EndToEnd', async () => { await verifySpansAndEvents( traceExporter, expectedSpanNames, - expectedEventNames + expectedEventNames, ); done(); }); @@ -360,7 +360,7 @@ describe('EndToEnd', async () => { await verifySpansAndEvents( traceExporter, expectedSpanNames, - expectedEventNames + expectedEventNames, ); done(); @@ -388,7 +388,7 @@ describe('EndToEnd', async () => { verifySpansAndEvents( traceExporter, expectedSpanNames, - expectedEventNames + expectedEventNames, ); }); }); @@ -402,8 +402,8 @@ describe('ObservabilityOptions injection and propagation', async () => { const tracerProvider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), exporter: traceExporter, + spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - tracerProvider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); const observabilityOptions: typeof ObservabilityOptions = { tracerProvider: tracerProvider, @@ -430,31 +430,31 @@ describe('ObservabilityOptions injection and propagation', async () => { const instanceByHandle = spanner.instance('instance'); assert.deepStrictEqual( instanceByHandle._observabilityOptions, - observabilityOptions + observabilityOptions, ); // Create the Instance by means of a constructor directly. const instanceByConstructor = new Instance(spanner, 'myInstance'); assert.deepStrictEqual( instanceByConstructor._observabilityOptions, - observabilityOptions + observabilityOptions, ); // Acquire a handle to the Database through instance.database. const databaseByHandle = instanceByHandle.database('database'); assert.deepStrictEqual( databaseByHandle._observabilityOptions, - observabilityOptions + observabilityOptions, ); // Create the Database by means of a constructor directly. const databaseByConstructor = new Database( instanceByConstructor, - 'myDatabase' + 'myDatabase', ); assert.deepStrictEqual( databaseByConstructor._observabilityOptions, - observabilityOptions + observabilityOptions, ); }); @@ -463,8 +463,8 @@ describe('ObservabilityOptions injection and propagation', async () => { const tracerProvider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), exporter: traceExporter, + spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - tracerProvider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); const observabilityOptions: typeof ObservabilityOptions = { tracerProvider: tracerProvider, @@ -503,7 +503,7 @@ describe('ObservabilityOptions injection and propagation', async () => { const db = spanner.instance('instance').database('database'); const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - db.formattedName_ + db.formattedName_, ); it('run', done => { @@ -536,7 +536,7 @@ describe('ObservabilityOptions injection and propagation', async () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = [ @@ -548,7 +548,7 @@ describe('ObservabilityOptions injection and propagation', async () => { assert.strictEqual( actualEventNames.every(value => expectedEventNames.includes(value)), true, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -557,13 +557,13 @@ describe('ObservabilityOptions injection and propagation', async () => { }); it('Transaction.begin+Dml.runUpdate', done => { - database.getTransaction((err, tx) => { + database.getTransaction(async (err, tx) => { assert.ifError(err); // Firstly erase the prior spans so that we can have only Transaction spans. traceExporter.reset(); - tx!.begin(); + await tx!.begin(); tx!.runUpdate(updateSql, async err => { assert.ifError(err); tx!.end(); @@ -592,7 +592,7 @@ describe('ObservabilityOptions injection and propagation', async () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = [ @@ -603,7 +603,7 @@ describe('ObservabilityOptions injection and propagation', async () => { assert.deepStrictEqual( actualEventNames.every(value => expectedEventNames.includes(value)), true, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -645,7 +645,7 @@ describe('ObservabilityOptions injection and propagation', async () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = [ @@ -656,7 +656,7 @@ describe('ObservabilityOptions injection and propagation', async () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -665,13 +665,13 @@ describe('ObservabilityOptions injection and propagation', async () => { }); it('rollback', done => { - database.getTransaction((err, tx) => { + database.getTransaction(async (err, tx) => { assert.ifError(err); // Firstly erase the prior spans so that we can have only Transaction spans. traceExporter.reset(); - tx!.begin(); + await tx!.begin(); tx!.runUpdate(updateSql, async err => { assert.ifError(err); @@ -702,7 +702,7 @@ describe('ObservabilityOptions injection and propagation', async () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = [ @@ -712,10 +712,10 @@ describe('ObservabilityOptions injection and propagation', async () => { ]; assert.strictEqual( actualEventNames.every(value => - expectedEventNames.includes(value) + expectedEventNames.includes(value), ), true, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -730,20 +730,16 @@ describe('ObservabilityOptions injection and propagation', async () => { const globalTracerProvider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), exporter: globalTraceExporter, + spanProcessors: [new SimpleSpanProcessor(globalTraceExporter)], }); - globalTracerProvider.addSpanProcessor( - new SimpleSpanProcessor(globalTraceExporter) - ); globalTracerProvider.register(); const injectedTraceExporter = new InMemorySpanExporter(); const injectedTracerProvider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), exporter: injectedTraceExporter, + spanProcessors: [new SimpleSpanProcessor(injectedTraceExporter)], }); - injectedTracerProvider.addSpanProcessor( - new SimpleSpanProcessor(injectedTraceExporter) - ); const observabilityOptions: typeof ObservabilityOptions = { tracerProvider: injectedTracerProvider, @@ -766,7 +762,7 @@ describe('ObservabilityOptions injection and propagation', async () => { const database = instance.database('database'); const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - database.formattedName_ + database.formattedName_, ); database.run('SELECT 1', err => { @@ -780,12 +776,12 @@ describe('ObservabilityOptions injection and propagation', async () => { assert.strictEqual( spansFromGlobal.length, 0, - 'Expecting no spans from the global exporter' + 'Expecting no spans from the global exporter', ); assert.strictEqual( spansFromInjected.length > 0, true, - 'Expecting spans from the injected exporter' + 'Expecting spans from the injected exporter', ); spansFromInjected.sort((spanA, spanB) => { @@ -811,7 +807,7 @@ describe('ObservabilityOptions injection and propagation', async () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = [ @@ -822,7 +818,7 @@ describe('ObservabilityOptions injection and propagation', async () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); }); }); @@ -833,7 +829,7 @@ describe('E2E traces with async/await', async () => { let spanner: Spanner; let spannerMock: mock.MockSpanner; let traceExporter: typeof InMemorySpanExporter; - let provider: typeof TracerProvider; + let provider: typeof NodeTracerProvider; let observabilityOptions: typeof ObservabilityOptions; beforeEach(async () => { @@ -841,8 +837,8 @@ describe('E2E traces with async/await', async () => { provider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), exporter: traceExporter, + spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - provider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); observabilityOptions = { tracerProvider: provider, @@ -886,7 +882,7 @@ describe('E2E traces with async/await', async () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // We need to ensure a strict relationship between the spans. @@ -896,54 +892,54 @@ describe('E2E traces with async/await', async () => { const runSpan = spans[spans.length - 1]; assert.ok( runSpan.spanContext().traceId, - 'Expected that runSpan has a defined traceId' + 'Expected that runSpan has a defined traceId', ); assert.ok( runStreamSpan.spanContext().traceId, - 'Expected that runStreamSpan has a defined traceId' + 'Expected that runStreamSpan has a defined traceId', ); assert.deepStrictEqual( - runStreamSpan.parentSpanId, + runStreamSpan.parentSpanContext.spanId, runSpan.spanContext().spanId, - `Expected that runSpan(spanId=${runSpan.spanContext().spanId}) is the parent to runStreamSpan(parentSpanId=${runStreamSpan.parentSpanId})` + `Expected that runSpan(spanId=${runSpan.spanContext().spanId}) is the parent to runStreamSpan(parentSpanId=${runStreamSpan.parentSpanContext.spanId})`, ); assert.deepStrictEqual( runSpan.spanContext().traceId, runStreamSpan.spanContext().traceId, - 'Expected that both spans share a traceId' + 'Expected that both spans share a traceId', ); assert.ok( runStreamSpan.spanContext().spanId, - 'Expected that runStreamSpan has a defined spanId' + 'Expected that runStreamSpan has a defined spanId', ); assert.ok( runSpan.spanContext().spanId, - 'Expected that runSpan has a defined spanId' + 'Expected that runSpan has a defined spanId', ); const databaseBatchCreateSessionsSpan = spans[0]; assert.strictEqual( databaseBatchCreateSessionsSpan.name, - 'CloudSpanner.Database.batchCreateSessions' + 'CloudSpanner.Database.batchCreateSessions', ); const sessionPoolCreateSessionsSpan = spans[1]; assert.strictEqual( sessionPoolCreateSessionsSpan.name, - 'CloudSpanner.SessionPool.createSessions' + 'CloudSpanner.SessionPool.createSessions', ); assert.ok( sessionPoolCreateSessionsSpan.spanContext().traceId, - 'Expecting a defined sessionPoolCreateSessions traceId' + 'Expecting a defined sessionPoolCreateSessions traceId', ); assert.deepStrictEqual( sessionPoolCreateSessionsSpan.spanContext().traceId, databaseBatchCreateSessionsSpan.spanContext().traceId, - 'Expected the same traceId' + 'Expected the same traceId', ); assert.deepStrictEqual( - databaseBatchCreateSessionsSpan.parentSpanId, + databaseBatchCreateSessionsSpan.parentSpanContext.spanId, sessionPoolCreateSessionsSpan.spanContext().spanId, - 'Expected that sessionPool.createSessions is the parent to db.batchCreassionSessions' + 'Expected that sessionPool.createSessions is the parent to db.batchCreassionSessions', ); // Assert that despite all being exported, SessionPool.createSessions @@ -952,7 +948,7 @@ describe('E2E traces with async/await', async () => { assert.notEqual( sessionPoolCreateSessionsSpan.spanContext().traceId, runSpan.spanContext().traceId, - 'Did not expect the same traceId' + 'Did not expect the same traceId', ); // Finally check for the collective expected event names. @@ -964,7 +960,7 @@ describe('E2E traces with async/await', async () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); } @@ -1022,7 +1018,7 @@ describe('Negative cases', async () => { let spanner: Spanner; let spannerMock: mock.MockSpanner; let traceExporter: typeof InMemorySpanExporter; - let provider: typeof TracerProvider; + let provider: typeof NodeTracerProvider; let observabilityOptions: typeof ObservabilityOptions; const selectSql1p = 'SELECT 1p'; @@ -1039,8 +1035,8 @@ SELECT 1p provider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), exporter: traceExporter, + spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - provider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); observabilityOptions = { tracerProvider: provider, @@ -1057,7 +1053,7 @@ SELECT 1p } as mock.MockError; spannerMock.putStatementResult( selectSql1p, - mock.StatementResult.error(serverErr) + mock.StatementResult.error(serverErr), ); const insertAlreadyExistentErr = { @@ -1066,7 +1062,7 @@ SELECT 1p } as mock.MockError; spannerMock.putStatementResult( insertAlreadyExistentDataSql, - mock.StatementResult.error(insertAlreadyExistentErr) + mock.StatementResult.error(insertAlreadyExistentErr), ); }); @@ -1104,7 +1100,7 @@ SELECT 1p assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // We need to ensure a strict relationship between the spans. @@ -1114,54 +1110,54 @@ SELECT 1p const runSpan = spans[spans.length - 1]; assert.ok( runSpan.spanContext().traceId, - 'Expected that runSpan has a defined traceId' + 'Expected that runSpan has a defined traceId', ); assert.ok( runStreamSpan.spanContext().traceId, - 'Expected that runStreamSpan has a defined traceId' + 'Expected that runStreamSpan has a defined traceId', ); assert.deepStrictEqual( - runStreamSpan.parentSpanId, + runStreamSpan.parentSpanContext.spanId, runSpan.spanContext().spanId, - `Expected that runSpan(spanId=${runSpan.spanContext().spanId}) is the parent to runStreamSpan(parentSpanId=${runStreamSpan.parentSpanId})` + `Expected that runSpan(spanId=${runSpan.spanContext().spanId}) is the parent to runStreamSpan(parentSpanId=${runStreamSpan.parentSpanId})`, ); assert.deepStrictEqual( runSpan.spanContext().traceId, runStreamSpan.spanContext().traceId, - 'Expected that both spans share a traceId' + 'Expected that both spans share a traceId', ); assert.ok( runStreamSpan.spanContext().spanId, - 'Expected that runStreamSpan has a defined spanId' + 'Expected that runStreamSpan has a defined spanId', ); assert.ok( runSpan.spanContext().spanId, - 'Expected that runSpan has a defined spanId' + 'Expected that runSpan has a defined spanId', ); const databaseBatchCreateSessionsSpan = spans[0]; assert.strictEqual( databaseBatchCreateSessionsSpan.name, - 'CloudSpanner.Database.batchCreateSessions' + 'CloudSpanner.Database.batchCreateSessions', ); const sessionPoolCreateSessionsSpan = spans[1]; assert.strictEqual( sessionPoolCreateSessionsSpan.name, - 'CloudSpanner.SessionPool.createSessions' + 'CloudSpanner.SessionPool.createSessions', ); assert.ok( sessionPoolCreateSessionsSpan.spanContext().traceId, - 'Expecting a defined sessionPoolCreateSessions traceId' + 'Expecting a defined sessionPoolCreateSessions traceId', ); assert.deepStrictEqual( sessionPoolCreateSessionsSpan.spanContext().traceId, databaseBatchCreateSessionsSpan.spanContext().traceId, - 'Expected the same traceId' + 'Expected the same traceId', ); assert.deepStrictEqual( - databaseBatchCreateSessionsSpan.parentSpanId, + databaseBatchCreateSessionsSpan.parentSpanContext.spanId, sessionPoolCreateSessionsSpan.spanContext().spanId, - 'Expected that sessionPool.createSessions is the parent to db.batchCreassionSessions' + 'Expected that sessionPool.createSessions is the parent to db.batchCreassionSessions', ); // Assert that despite all being exported, SessionPool.createSessions @@ -1170,21 +1166,21 @@ SELECT 1p assert.notEqual( sessionPoolCreateSessionsSpan.spanContext().traceId, runSpan.spanContext().traceId, - 'Did not expect the same traceId' + 'Did not expect the same traceId', ); // Ensure that the last span has an error. assert.deepStrictEqual( runStreamSpan.status.code, SpanStatusCode.ERROR, - 'Expected an error status' + 'Expected an error status', ); const want = '3 INVALID_ARGUMENT: ' + messageBadSelect1p; assert.deepStrictEqual( runStreamSpan.status.message, want, - `Mismatched status message:\n\n\tGot: '${runStreamSpan.status.message}'\n\tWant: '${want}'` + `Mismatched status message:\n\n\tGot: '${runStreamSpan.status.message}'\n\tWant: '${want}'`, ); // Finally check for the collective expected event names. @@ -1196,7 +1192,7 @@ SELECT 1p assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); } @@ -1258,7 +1254,7 @@ SELECT 1p assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const spanSnapshotRun = spans[3]; assert.strictEqual(spanSnapshotRun.name, 'CloudSpanner.Snapshot.run'); @@ -1266,37 +1262,37 @@ SELECT 1p assert.deepStrictEqual( spanSnapshotRun.status.code, SpanStatusCode.ERROR, - 'Unexpected status code' + 'Unexpected status code', ); assert.deepStrictEqual( spanSnapshotRun.status.message, wantSpanErr, - 'Unexpexcted error message' + 'Unexpexcted error message', ); const databaseBatchCreateSessionsSpan = spans[0]; assert.strictEqual( databaseBatchCreateSessionsSpan.name, - 'CloudSpanner.Database.batchCreateSessions' + 'CloudSpanner.Database.batchCreateSessions', ); const sessionPoolCreateSessionsSpan = spans[1]; assert.strictEqual( sessionPoolCreateSessionsSpan.name, - 'CloudSpanner.SessionPool.createSessions' + 'CloudSpanner.SessionPool.createSessions', ); assert.ok( sessionPoolCreateSessionsSpan.spanContext().traceId, - 'Expecting a defined sessionPoolCreateSessions traceId' + 'Expecting a defined sessionPoolCreateSessions traceId', ); assert.deepStrictEqual( sessionPoolCreateSessionsSpan.spanContext().traceId, databaseBatchCreateSessionsSpan.spanContext().traceId, - 'Expected the same traceId' + 'Expected the same traceId', ); assert.deepStrictEqual( - databaseBatchCreateSessionsSpan.parentSpanId, + databaseBatchCreateSessionsSpan.parentSpanContext.spanId, sessionPoolCreateSessionsSpan.spanContext().spanId, - 'Expected that sessionPool.createSessions is the parent to db.batchCreassionSessions' + 'Expected that sessionPool.createSessions is the parent to db.batchCreassionSessions', ); // We need to ensure a strict relationship between the spans. @@ -1310,23 +1306,23 @@ SELECT 1p assert.deepStrictEqual( spanDatabaseRunTransactionAsync.name, 'CloudSpanner.Database.runTransactionAsync', - `${actualSpanNames}` + `${actualSpanNames}`, ); const spanTransactionCommit0 = spans[spans.length - 2]; assert.strictEqual( spanTransactionCommit0.name, - 'CloudSpanner.Transaction.commit' + 'CloudSpanner.Transaction.commit', ); assert.deepStrictEqual( - spanTransactionCommit0.parentSpanId, + spanTransactionCommit0.parentSpanContext.spanId, spanDatabaseRunTransactionAsync.spanContext().spanId, - 'Expected that Database.runTransaction is the parent to Transaction.commmit' + 'Expected that Database.runTransaction is the parent to Transaction.commmit', ); assert.deepStrictEqual( - spanSnapshotRun.parentSpanId, + spanSnapshotRun.parentSpanContext.spanId, spanDatabaseRunTransactionAsync.spanContext().spanId, - 'Expected that Database.runTransaction is the parent to Snapshot.run' + 'Expected that Database.runTransaction is the parent to Snapshot.run', ); // Assert that despite all being exported, SessionPool.createSessions @@ -1335,7 +1331,7 @@ SELECT 1p assert.notEqual( sessionPoolCreateSessionsSpan.spanContext().traceId, spanDatabaseRunTransactionAsync.spanContext().traceId, - 'Did not expect the same traceId' + 'Did not expect the same traceId', ); // Finally check for the collective expected event names. @@ -1355,7 +1351,7 @@ SELECT 1p assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); } @@ -1378,7 +1374,7 @@ SELECT 1p } catch (e) { assert.strictEqual( (e as grpc.ServiceError).code, - grpc.status.ALREADY_EXISTS + grpc.status.ALREADY_EXISTS, ); } @@ -1416,8 +1412,8 @@ describe('Traces for ExecuteStream broken stream retries', () => { const tracerProvider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), exporter: traceExporter, + spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - tracerProvider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); function newTestDatabase(): Database { return instance.database(`database-${dbCounter++}`); @@ -1435,36 +1431,36 @@ describe('Traces for ExecuteStream broken stream retries', () => { } else { resolve(assignedPort); } - } + }, ); }); spannerMock.putStatementResult( selectSql, - mock.StatementResult.resultSet(mock.createSimpleResultSet()) + mock.StatementResult.resultSet(mock.createSimpleResultSet()), ); spannerMock.putStatementResult( select1, - mock.StatementResult.resultSet(mock.createSelect1ResultSet()) + mock.StatementResult.resultSet(mock.createSelect1ResultSet()), ); spannerMock.putStatementResult( selectAllTypes, - mock.StatementResult.resultSet(mock.createResultSetWithAllDataTypes()) + mock.StatementResult.resultSet(mock.createResultSetWithAllDataTypes()), ); spannerMock.putStatementResult( invalidSql, - mock.StatementResult.error(fooNotFoundErr) + mock.StatementResult.error(fooNotFoundErr), ); spannerMock.putStatementResult( insertSql, - mock.StatementResult.updateCount(1) + mock.StatementResult.updateCount(1), ); spannerMock.putStatementResult( insertSqlForAllTypes, - mock.StatementResult.updateCount(1) + mock.StatementResult.updateCount(1), ); spannerMock.putStatementResult( updateSql, - mock.StatementResult.updateCount(2) + mock.StatementResult.updateCount(2), ); const observabilityOptions: typeof ObservabilityOptions = { @@ -1508,7 +1504,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { } as mock.MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - mock.SimulatedExecutionTime.ofError(err) + mock.SimulatedExecutionTime.ofError(err), ); const [rows] = await database.run(selectSql); assert.strictEqual(rows.length, 3); @@ -1523,7 +1519,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { } as mock.MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - mock.SimulatedExecutionTime.ofError(err) + mock.SimulatedExecutionTime.ofError(err), ); const database = newTestDatabase(); @@ -1540,15 +1536,15 @@ describe('Traces for ExecuteStream broken stream retries', () => { assert.strictEqual(requests.length, 2); assert.ok( requests[0].transaction?.begin!.readWrite, - 'inline txn is not set.' + 'inline txn is not set.', ); assert.ok( requests[1].transaction!.id, - 'Transaction ID is not used for retries.' + 'Transaction ID is not used for retries.', ); assert.ok( requests[1].resumeToken, - 'Resume token is not set for the retried' + 'Resume token is not set for the retried', ); }); @@ -1560,7 +1556,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { } as mock.MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - mock.SimulatedExecutionTime.ofError(err) + mock.SimulatedExecutionTime.ofError(err), ); const database = newTestDatabase(); @@ -1582,15 +1578,15 @@ describe('Traces for ExecuteStream broken stream retries', () => { assert.strictEqual(requests.length, 3); assert.ok( requests[0].transaction?.begin!.readWrite, - 'inline txn is not set.' + 'inline txn is not set.', ); assert.ok( requests[1].transaction!.id, - 'Transaction ID is not used for retries.' + 'Transaction ID is not used for retries.', ); assert.ok( requests[1].resumeToken, - 'Resume token is not set for the retried' + 'Resume token is not set for the retried', ); const commitRequests = spannerMock .getRequests() @@ -1599,11 +1595,11 @@ describe('Traces for ExecuteStream broken stream retries', () => { assert.strictEqual(commitRequests.length, 1); assert.deepStrictEqual( requests[1].transaction!.id, - requests[2].transaction!.id + requests[2].transaction!.id, ); assert.deepStrictEqual( requests[1].transaction!.id, - commitRequests[0].transactionId + commitRequests[0].transactionId, ); const beginTxnRequests = spannerMock .getRequests() @@ -1620,7 +1616,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { } as mock.MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - mock.SimulatedExecutionTime.ofError(err) + mock.SimulatedExecutionTime.ofError(err), ); try { await database.run(selectSql); @@ -1628,7 +1624,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { } catch (e) { assert.strictEqual( (e as grpc.ServiceError).message, - '2 UNKNOWN: Test error' + '2 UNKNOWN: Test error', ); } await database.close(); @@ -1643,15 +1639,15 @@ describe('Traces for ExecuteStream broken stream retries', () => { } as mock.MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - mock.SimulatedExecutionTime.ofError(err) + mock.SimulatedExecutionTime.ofError(err), ); database.run(selectSql, (err, rows) => { assert.ifError(err); assert.strictEqual(rows!.length, 3); database .close() - .catch(done) - .then(() => done()); + .then(() => done()) + .catch(err => done(err)); }); }); @@ -1663,15 +1659,15 @@ describe('Traces for ExecuteStream broken stream retries', () => { } as mock.MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - mock.SimulatedExecutionTime.ofError(err) + mock.SimulatedExecutionTime.ofError(err), ); database.run(selectSql, err => { assert.ok(err, 'Missing expected error'); assert.strictEqual(err!.message, '2 UNKNOWN: Non-retryable error'); database .close() - .catch(done) - .then(() => done()); + .then(() => done()) + .catch(err => done(err)); }); }); @@ -1684,7 +1680,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { } as mock.MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - mock.SimulatedExecutionTime.ofError(err) + mock.SimulatedExecutionTime.ofError(err), ); const receivedRows: Row[] = []; database @@ -1701,7 +1697,6 @@ describe('Traces for ExecuteStream broken stream retries', () => { assert.strictEqual(err.message, '2 UNKNOWN: Non-retryable error'); database .close() - .catch(done) .then(() => { traceExporter.forceFlush(); const spans = traceExporter.getFinishedSpans(); @@ -1727,7 +1722,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Finally check for the collective expected event names. @@ -1740,11 +1735,12 @@ describe('Traces for ExecuteStream broken stream retries', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); - }); + }) + .catch(err => done(err)); }); }); }); @@ -1762,7 +1758,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { } spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - mock.SimulatedExecutionTime.ofErrors(errors) + mock.SimulatedExecutionTime.ofErrors(errors), ); const [rows] = await database.run(selectSql); assert.strictEqual(rows.length, 3); @@ -1793,7 +1789,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Finally check for the collective expected event names. @@ -1810,7 +1806,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); }); @@ -1822,7 +1818,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { } as mock.MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - mock.SimulatedExecutionTime.ofError(err) + mock.SimulatedExecutionTime.ofError(err), ); await database.runTransactionAsync(async tx => { @@ -1859,7 +1855,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); // Finally check for the collective expected event names. @@ -1876,7 +1872,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); }); @@ -1891,7 +1887,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { } as mock.MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - mock.SimulatedExecutionTime.ofError(err) + mock.SimulatedExecutionTime.ofError(err), ); let attempts = 0; @@ -1912,7 +1908,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { assert.deepStrictEqual( attempts, 1, - 'runTransactionAsync.attempt must be 1' + 'runTransactionAsync.attempt must be 1', ); const expectedSpanNames = [ 'CloudSpanner.Database.batchCreateSessions', @@ -1927,7 +1923,7 @@ describe('Traces for ExecuteStream broken stream retries', () => { await verifySpansAndEvents( traceExporter, expectedSpanNames, - expectedEventNames + expectedEventNames, ); }); }); @@ -1970,7 +1966,7 @@ describe('End to end tracing headers', () => { metadataCountWithE2EHeader++; assert.strictEqual( metadata.get(END_TO_END_TRACING_HEADER)[0], - 'true' + 'true', ); } if (metadata.get('traceparent')[0] !== undefined) { diff --git a/observability-test/table.ts b/observability-test/table.ts index 09e47f52f..26db0ff42 100644 --- a/observability-test/table.ts +++ b/observability-test/table.ts @@ -77,8 +77,8 @@ describe('Table', () => { const provider = new NodeTracerProvider({ sampler: sampler, exporter: traceExporter, + spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - provider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); const NAME = 'table-name'; @@ -113,7 +113,7 @@ describe('Table', () => { assert.strictEqual( spans.length >= minCount, true, - `at least ${minCount} spans expected` + `at least ${minCount} spans expected`, ); // Sort the spans by duration. @@ -158,7 +158,7 @@ describe('Table', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); verifySpanAttributes(spans[0]); done(); @@ -179,7 +179,7 @@ describe('Table', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); verifySpanAttributes(spans[0]); done(); @@ -204,7 +204,7 @@ describe('Table', () => { assert.deepStrictEqual( gotSpanStatus, wantSpanStatus, - `mismatch in span status:\n\tGot: ${JSON.stringify(gotSpanStatus)}\n\tWant: ${JSON.stringify(wantSpanStatus)}` + `mismatch in span status:\n\tGot: ${JSON.stringify(gotSpanStatus)}\n\tWant: ${JSON.stringify(wantSpanStatus)}`, ); const actualSpanNames = spanNames(gotSpans); @@ -213,7 +213,7 @@ describe('Table', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); verifySpanAttributes(gotSpans[0]); done(); @@ -237,7 +237,7 @@ describe('Table', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); verifySpanAttributes(gotSpans[0]); done(); @@ -263,7 +263,7 @@ describe('Table', () => { assert.deepStrictEqual( gotSpanStatus, wantSpanStatus, - `mismatch in span status:\n\tGot: ${JSON.stringify(gotSpanStatus)}\n\tWant: ${JSON.stringify(wantSpanStatus)}` + `mismatch in span status:\n\tGot: ${JSON.stringify(gotSpanStatus)}\n\tWant: ${JSON.stringify(wantSpanStatus)}`, ); const actualSpanNames = spanNames(gotSpans); @@ -271,7 +271,7 @@ describe('Table', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); verifySpanAttributes[gotSpans[0]]; @@ -295,7 +295,7 @@ describe('Table', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); verifySpanAttributes(gotSpans[0]); @@ -320,7 +320,7 @@ describe('Table', () => { assert.deepStrictEqual( gotSpanStatus, wantSpanStatus, - `mismatch in span status:\n\tGot: ${JSON.stringify(gotSpanStatus)}\n\tWant: ${JSON.stringify(wantSpanStatus)}` + `mismatch in span status:\n\tGot: ${JSON.stringify(gotSpanStatus)}\n\tWant: ${JSON.stringify(wantSpanStatus)}`, ); const actualSpanNames = spanNames(gotSpans); @@ -328,7 +328,7 @@ describe('Table', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); verifySpanAttributes(gotSpans[0]); diff --git a/observability-test/transaction.ts b/observability-test/transaction.ts index 0780e5464..c89c1b0a9 100644 --- a/observability-test/transaction.ts +++ b/observability-test/transaction.ts @@ -57,7 +57,7 @@ describe('Transaction', () => { }; const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - DATABASE.formattedName_ + DATABASE.formattedName_, ); const SESSION = { @@ -97,8 +97,8 @@ describe('Transaction', () => { tracerProvider = new NodeTracerProvider({ sampler: sampler, exporter: traceExporter, + spanProcessors: [new SimpleSpanProcessor(traceExporter)], }); - tracerProvider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); const SNAPSHOT_OPTIONS = {a: 'b', c: 'd'}; sandbox.stub(Snapshot, 'encodeTimestampBounds').returns(SNAPSHOT_OPTIONS); @@ -171,7 +171,7 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = [ @@ -181,7 +181,7 @@ describe('Transaction', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -204,14 +204,14 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = ['Begin Transaction']; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the final span that got retries did not error. @@ -220,12 +220,12 @@ describe('Transaction', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Unexpected an span status code' + 'Unexpected an span status code', ); assert.strictEqual( 'begin.error', firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); done(); @@ -257,14 +257,14 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = []; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the final span that got retries did not error. @@ -273,12 +273,12 @@ describe('Transaction', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Unexpected an span status code' + 'Unexpected an span status code', ); assert.strictEqual( 'read.error', firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); done(); @@ -302,14 +302,14 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = []; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the final span that got retries did not error. @@ -318,12 +318,12 @@ describe('Transaction', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected an span status code' + 'Unexpected an span status code', ); assert.strictEqual( undefined, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); done(); @@ -359,14 +359,14 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = []; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the final span that got retries did not error. @@ -375,12 +375,12 @@ describe('Transaction', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected an span status code' + 'Unexpected an span status code', ); assert.strictEqual( undefined, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); done(); }); @@ -403,14 +403,14 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = []; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the final span that got retries did not error. @@ -419,12 +419,12 @@ describe('Transaction', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Unexpected an span status code' + 'Unexpected an span status code', ); assert.strictEqual( 'run.error', firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); done(); @@ -456,7 +456,7 @@ describe('Transaction', () => { stream.on('error', error => { assert.strictEqual( error.message, - 'Value of type undefined not recognized.' + 'Value of type undefined not recognized.', ); const exportResults = extractExportedSpans(); @@ -467,14 +467,14 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = ['Starting stream', 'exception']; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the final span that got retries did not error. @@ -483,12 +483,12 @@ describe('Transaction', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Unexpected an span status code' + 'Unexpected an span status code', ); assert.strictEqual( 'Value of type undefined not recognized.', firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); const attributes = exportResults.spans[0].attributes; @@ -524,7 +524,7 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const attributes = exportResults.spans[0].attributes; @@ -548,7 +548,7 @@ describe('Transaction', () => { it('no error with unset `id`', done => { const expectedError = new Error( - 'Transaction ID is unknown, nothing to rollback.' + 'Transaction ID is unknown, nothing to rollback.', ); delete transaction.id; @@ -563,7 +563,7 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = [ @@ -572,7 +572,7 @@ describe('Transaction', () => { assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the final span that got retries did not error. @@ -581,12 +581,12 @@ describe('Transaction', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected span status code' + 'Unexpected span status code', ); assert.strictEqual( undefined, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); done(); @@ -610,14 +610,14 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = []; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the final span that got retries did not error. @@ -626,12 +626,12 @@ describe('Transaction', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Unexpected span status code' + 'Unexpected span status code', ); assert.strictEqual( 'our request error', firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); done(); @@ -654,14 +654,14 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = []; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the final span that got retries did not error. @@ -670,12 +670,12 @@ describe('Transaction', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected span status code' + 'Unexpected span status code', ); assert.strictEqual( undefined, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); done(); @@ -705,14 +705,14 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = ['Starting Commit', 'Commit Done']; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the final span that got retries did not error. @@ -721,12 +721,12 @@ describe('Transaction', () => { assert.strictEqual( SpanStatusCode.UNSET, firstSpan.status.code, - 'Unexpected span status code' + 'Unexpected span status code', ); assert.strictEqual( undefined, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); done(); @@ -750,14 +750,14 @@ describe('Transaction', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = ['Starting Commit', 'Commit failed']; assert.deepStrictEqual( actualEventNames, expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); // Ensure that the final span that got retries did not error. @@ -767,12 +767,12 @@ describe('Transaction', () => { assert.strictEqual( SpanStatusCode.ERROR, firstSpan.status.code, - 'Unexpected span status code' + 'Unexpected span status code', ); assert.strictEqual( fakeError.message, firstSpan.status.message, - 'Unexpected span status message' + 'Unexpected span status message', ); withAllSpansHaveDBName(spans); diff --git a/owlbot.py b/owlbot.py index 2a9b067a1..1628cd2fa 100644 --- a/owlbot.py +++ b/owlbot.py @@ -64,7 +64,7 @@ common_templates = gcp.CommonTemplates() templates = common_templates.node_library(source_location='build/src') -s.copy(templates, excludes=[".kokoro/samples-test.sh", ".kokoro/trampoline_v2.sh", ".github/release-trigger.yml"]) +s.copy(templates, excludes=[".kokoro/samples-test.sh", ".kokoro/trampoline_v2.sh", ".github/release-trigger.yml", ".github/sync-repo-settings.yaml"]) node.postprocess_gapic_library_hermetic() diff --git a/package.json b/package.json index 962a31563..9906df3c9 100644 --- a/package.json +++ b/package.json @@ -1,11 +1,11 @@ { "name": "@google-cloud/spanner", "description": "Cloud Spanner Client Library for Node.js", - "version": "7.21.0", + "version": "8.0.0", "license": "Apache-2.0", "author": "Google Inc.", "engines": { - "node": ">=14.0.0" + "node": ">=18" }, "repository": "googleapis/nodejs-spanner", "main": "./build/src/index.js", @@ -54,80 +54,79 @@ "coverage": "c8 mocha build/test build/test/common && c8 report --check-coverage" }, "dependencies": { - "@google-cloud/common": "^5.0.0", - "@google-cloud/precise-date": "^4.0.0", - "@google-cloud/projectify": "^4.0.0", - "@google-cloud/promisify": "4.0.0", - "@grpc/proto-loader": "^0.7.0", + "@google-cloud/common": "^6.0.0", + "@google-cloud/precise-date": "^5.0.0", + "@google-cloud/projectify": "^5.0.0", + "@google-cloud/promisify": "^5.0.0", + "@grpc/proto-loader": "^0.7.13", "@opentelemetry/api": "^1.9.0", - "@opentelemetry/core": "^1.27.0", - "@opentelemetry/context-async-hooks": "^1.26.0", - "@opentelemetry/semantic-conventions": "^1.25.1", - "@types/big.js": "^6.0.0", - "@types/stack-trace": "0.0.33", - "arrify": "^2.0.0", - "big.js": "^6.0.0", - "checkpoint-stream": "^0.1.1", - "duplexify": "^4.1.1", + "@opentelemetry/context-async-hooks": "^2.0.0", + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/semantic-conventions": "^1.30.0", + "@types/big.js": "^6.2.2", + "@types/stack-trace": "^0.0.33", + "big.js": "^7.0.0", + "checkpoint-stream": "^0.1.2", + "duplexify": "^4.1.3", "events-intercept": "^2.0.0", "extend": "^3.0.2", - "google-auth-library": "^9.0.0", - "google-gax": "4.4.1", - "grpc-gcp": "^1.0.0", - "is": "^3.2.1", + "google-auth-library": "^10.0.0-rc.1", + "google-gax": "^5.0.1-rc.0", + "grpc-gcp": "^1.0.1", + "is": "^3.3.0", "lodash.snakecase": "^4.1.1", "merge-stream": "^2.0.0", "p-queue": "^6.0.2", - "protobufjs": "^7.0.0", - "retry-request": "^7.0.0", + "protobufjs": "^7.4.0", + "retry-request": "^8.0.0", "split-array-stream": "^2.0.0", "stack-trace": "0.0.10", - "stream-events": "^1.0.4", - "teeny-request": "^9.0.0", - "through2": "^4.0.0" + "stream-events": "^1.0.5", + "teeny-request": "^10.0.0", + "through2": "^4.0.2" }, "devDependencies": { - "@opentelemetry/sdk-trace-base": "^1.26.0", - "@opentelemetry/sdk-trace-node": "^1.26.0", - "@types/concat-stream": "^2.0.0", - "@types/extend": "^3.0.0", - "@types/is": "0.0.25", - "@types/lodash.snakecase": "^4.1.4", - "@types/merge-stream": "^1.1.2", - "@types/mocha": "^9.0.0", - "@types/mv": "^2.1.0", - "@types/ncp": "^2.0.1", - "@types/proxyquire": "^1.3.28", - "@types/request": "^2.48.3", - "@types/sinon": "^17.0.0", - "@types/through2": "^2.0.34", + "@opentelemetry/sdk-trace-base": "^2.0.0", + "@opentelemetry/sdk-trace-node": "^2.0.0", + "@types/concat-stream": "^2.0.3", + "@types/extend": "^3.0.4", + "@types/is": "^0.0.25", + "@types/lodash.snakecase": "^4.1.9", + "@types/merge-stream": "^2.0.0", + "@types/mocha": "^10.0.10", + "@types/mv": "^2.1.4", + "@types/ncp": "^2.0.8", + "@types/proxyquire": "^1.3.31", + "@types/request": "^2.48.12", + "@types/sinon": "^17.0.4", + "@types/through2": "^2.0.41", "@types/uuid": "^10.0.0", - "binary-search-bounds": "^2.0.4", - "c8": "^8.0.1", - "codecov": "^3.0.2", + "binary-search-bounds": "^2.0.5", + "c8": "^10.1.3", + "codecov": "^3.8.3", "concat-stream": "^2.0.0", - "dedent": "^1.0.0", + "dedent": "^1.5.3", "execa": "^5.0.0", - "gapic-tools": "^0.4.0", - "gts": "^5.0.0", - "jsdoc": "^4.0.0", + "gapic-tools": "^1.0.1", + "gts": "^6.0.2", + "jsdoc": "^4.0.4", "jsdoc-fresh": "^3.0.0", "jsdoc-region-tag": "^3.0.0", - "linkinator": "^3.0.0", + "linkinator": "^6.1.2", "lodash.random": "^3.2.0", - "mocha": "^9.2.2", + "mocha": "^11.1.0", "mv": "^2.1.1", "ncp": "^2.0.0", - "nise": "6.0.0", + "nise": "^6.1.1", "p-limit": "^3.0.1", - "path-to-regexp": "6.2.2", - "proxyquire": "^2.0.1", - "sinon": "^18.0.0", - "stats-lite": "^2.1.1", - "time-span": "^4.0.0", - "tmp": "^0.2.0", - "typescript": "^5.1.6", - "uuid": "^10.0.0", - "yargs": "^17.0.0" + "path-to-regexp": "^8.2.0", + "proxyquire": "^2.1.3", + "sinon": "^20.0.0", + "stats-lite": "^2.2.0", + "time-span": "4.0.0", + "tmp": "^0.2.3", + "typescript": "^5.8.2", + "uuid": "^11.1.0", + "yargs": "^17.7.2" } } diff --git a/protos/google/spanner/admin/database/v1/backup.proto b/protos/google/spanner/admin/database/v1/backup.proto index 54a7be3e3..6898814c4 100644 --- a/protos/google/spanner/admin/database/v1/backup.proto +++ b/protos/google/spanner/admin/database/v1/backup.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/admin/database/v1/backup_schedule.proto b/protos/google/spanner/admin/database/v1/backup_schedule.proto index 80a70fdb0..c273516ae 100644 --- a/protos/google/spanner/admin/database/v1/backup_schedule.proto +++ b/protos/google/spanner/admin/database/v1/backup_schedule.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/admin/database/v1/common.proto b/protos/google/spanner/admin/database/v1/common.proto index a91012306..c494b8cf7 100644 --- a/protos/google/spanner/admin/database/v1/common.proto +++ b/protos/google/spanner/admin/database/v1/common.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/admin/database/v1/spanner_database_admin.proto b/protos/google/spanner/admin/database/v1/spanner_database_admin.proto index 125196fe9..084f98c68 100644 --- a/protos/google/spanner/admin/database/v1/spanner_database_admin.proto +++ b/protos/google/spanner/admin/database/v1/spanner_database_admin.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/admin/instance/v1/common.proto b/protos/google/spanner/admin/instance/v1/common.proto index 11e00368c..0b5282c7d 100644 --- a/protos/google/spanner/admin/instance/v1/common.proto +++ b/protos/google/spanner/admin/instance/v1/common.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/admin/instance/v1/spanner_instance_admin.proto b/protos/google/spanner/admin/instance/v1/spanner_instance_admin.proto index 615a86c6f..d3e60257e 100644 --- a/protos/google/spanner/admin/instance/v1/spanner_instance_admin.proto +++ b/protos/google/spanner/admin/instance/v1/spanner_instance_admin.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/executor/v1/cloud_executor.proto b/protos/google/spanner/executor/v1/cloud_executor.proto index 05d662a5a..cf68519ef 100644 --- a/protos/google/spanner/executor/v1/cloud_executor.proto +++ b/protos/google/spanner/executor/v1/cloud_executor.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/v1/commit_response.proto b/protos/google/spanner/v1/commit_response.proto index d5f9b15d5..beeb3123e 100644 --- a/protos/google/spanner/v1/commit_response.proto +++ b/protos/google/spanner/v1/commit_response.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/v1/keys.proto b/protos/google/spanner/v1/keys.proto index 82f073b96..9eadda470 100644 --- a/protos/google/spanner/v1/keys.proto +++ b/protos/google/spanner/v1/keys.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/v1/mutation.proto b/protos/google/spanner/v1/mutation.proto index 7fbf93f8a..c8af1af8e 100644 --- a/protos/google/spanner/v1/mutation.proto +++ b/protos/google/spanner/v1/mutation.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/v1/query_plan.proto b/protos/google/spanner/v1/query_plan.proto index ba18055e3..104828457 100644 --- a/protos/google/spanner/v1/query_plan.proto +++ b/protos/google/spanner/v1/query_plan.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/v1/result_set.proto b/protos/google/spanner/v1/result_set.proto index 0b8aabf86..c80bff2ae 100644 --- a/protos/google/spanner/v1/result_set.proto +++ b/protos/google/spanner/v1/result_set.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -22,7 +22,6 @@ import "google/spanner/v1/query_plan.proto"; import "google/spanner/v1/transaction.proto"; import "google/spanner/v1/type.proto"; -option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Spanner.V1"; option go_package = "cloud.google.com/go/spanner/apiv1/spannerpb;spannerpb"; option java_multiple_files = true; @@ -38,11 +37,10 @@ message ResultSet { ResultSetMetadata metadata = 1; // Each element in `rows` is a row whose format is defined by - // [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. The ith element - // in each row matches the ith field in - // [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. Elements are - // encoded based on type as described - // [here][google.spanner.v1.TypeCode]. + // [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. The ith + // element in each row matches the ith field in + // [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. Elements + // are encoded based on type as described [here][google.spanner.v1.TypeCode]. repeated google.protobuf.ListValue rows = 2; // Query plan and execution statistics for the SQL statement that @@ -50,18 +48,16 @@ message ResultSet { // [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. // DML statements always produce stats containing the number of rows // modified, unless executed using the - // [ExecuteSqlRequest.QueryMode.PLAN][google.spanner.v1.ExecuteSqlRequest.QueryMode.PLAN] [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. - // Other fields may or may not be populated, based on the + // [ExecuteSqlRequest.QueryMode.PLAN][google.spanner.v1.ExecuteSqlRequest.QueryMode.PLAN] + // [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + // Other fields might or might not be populated, based on the // [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. ResultSetStats stats = 3; - // Optional. A precommit token will be included if the read-write transaction - // is on a multiplexed session. - // The precommit token with the highest sequence number from this transaction - // attempt should be passed to the + // Optional. A precommit token is included if the read-write transaction is on + // a multiplexed session. Pass the precommit token with the highest sequence + // number from this transaction attempt to the // [Commit][google.spanner.v1.Spanner.Commit] request for this transaction. - // This feature is not yet supported and will result in an UNIMPLEMENTED - // error. MultiplexedSessionPrecommitToken precommit_token = 5 [(google.api.field_behavior) = OPTIONAL]; } @@ -83,13 +79,14 @@ message PartialResultSet { // Most values are encoded based on type as described // [here][google.spanner.v1.TypeCode]. // - // It is possible that the last value in values is "chunked", + // It's possible that the last value in values is "chunked", // meaning that the rest of the value is sent in subsequent - // `PartialResultSet`(s). This is denoted by the [chunked_value][google.spanner.v1.PartialResultSet.chunked_value] - // field. Two or more chunked values can be merged to form a - // complete value as follows: + // `PartialResultSet`(s). This is denoted by the + // [chunked_value][google.spanner.v1.PartialResultSet.chunked_value] field. + // Two or more chunked values can be merged to form a complete value as + // follows: // - // * `bool/number/null`: cannot be chunked + // * `bool/number/null`: can't be chunked // * `string`: concatenate the strings // * `list`: concatenate the lists. If the last element in a list is a // `string`, `list`, or `object`, merge it with the first element in @@ -100,28 +97,28 @@ message PartialResultSet { // // Some examples of merging: // - // # Strings are concatenated. + // Strings are concatenated. // "foo", "bar" => "foobar" // - // # Lists of non-strings are concatenated. + // Lists of non-strings are concatenated. // [2, 3], [4] => [2, 3, 4] // - // # Lists are concatenated, but the last and first elements are merged - // # because they are strings. + // Lists are concatenated, but the last and first elements are merged + // because they are strings. // ["a", "b"], ["c", "d"] => ["a", "bc", "d"] // - // # Lists are concatenated, but the last and first elements are merged - // # because they are lists. Recursively, the last and first elements - // # of the inner lists are merged because they are strings. + // Lists are concatenated, but the last and first elements are merged + // because they are lists. Recursively, the last and first elements + // of the inner lists are merged because they are strings. // ["a", ["b", "c"]], [["d"], "e"] => ["a", ["b", "cd"], "e"] // - // # Non-overlapping object fields are combined. + // Non-overlapping object fields are combined. // {"a": "1"}, {"b": "2"} => {"a": "1", "b": 2"} // - // # Overlapping object fields are merged. + // Overlapping object fields are merged. // {"a": "1"}, {"a": "2"} => {"a": "12"} // - // # Examples of merging objects containing lists of strings. + // Examples of merging objects containing lists of strings. // {"a": ["1"]}, {"a": ["2"]} => {"a": ["12"]} // // For a more complete example, suppose a streaming SQL query is @@ -137,7 +134,6 @@ message PartialResultSet { // { // "values": ["orl"] // "chunked_value": true - // "resume_token": "Bqp2..." // } // { // "values": ["d"] @@ -147,11 +143,17 @@ message PartialResultSet { // This sequence of `PartialResultSet`s encodes two rows, one // containing the field value `"Hello"`, and a second containing the // field value `"World" = "W" + "orl" + "d"`. + // + // Not all `PartialResultSet`s contain a `resume_token`. Execution can only be + // resumed from a previously yielded `resume_token`. For the above sequence of + // `PartialResultSet`s, resuming the query with `"resume_token": "Af65..."` + // yields results from the `PartialResultSet` with value "orl". repeated google.protobuf.Value values = 2; - // If true, then the final value in [values][google.spanner.v1.PartialResultSet.values] is chunked, and must - // be combined with more values from subsequent `PartialResultSet`s - // to obtain a complete field value. + // If true, then the final value in + // [values][google.spanner.v1.PartialResultSet.values] is chunked, and must be + // combined with more values from subsequent `PartialResultSet`s to obtain a + // complete field value. bool chunked_value = 3; // Streaming calls might be interrupted for a variety of reasons, such @@ -163,27 +165,29 @@ message PartialResultSet { // Query plan and execution statistics for the statement that produced this // streaming result set. These can be requested by setting - // [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] and are sent - // only once with the last response in the stream. - // This field will also be present in the last response for DML - // statements. + // [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] + // and are sent only once with the last response in the stream. This field is + // also present in the last response for DML statements. ResultSetStats stats = 5; - // Optional. A precommit token will be included if the read-write transaction - // is on a multiplexed session. - // The precommit token with the highest sequence number from this transaction - // attempt should be passed to the + // Optional. A precommit token is included if the read-write transaction + // has multiplexed sessions enabled. Pass the precommit token with the highest + // sequence number from this transaction attempt to the // [Commit][google.spanner.v1.Spanner.Commit] request for this transaction. - // This feature is not yet supported and will result in an UNIMPLEMENTED - // error. MultiplexedSessionPrecommitToken precommit_token = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Indicates whether this is the last `PartialResultSet` in the + // stream. The server might optionally set this field. Clients shouldn't rely + // on this field being set in all cases. + bool last = 9 [(google.api.field_behavior) = OPTIONAL]; } -// Metadata about a [ResultSet][google.spanner.v1.ResultSet] or [PartialResultSet][google.spanner.v1.PartialResultSet]. +// Metadata about a [ResultSet][google.spanner.v1.ResultSet] or +// [PartialResultSet][google.spanner.v1.PartialResultSet]. message ResultSetMetadata { // Indicates the field names and types for the rows in the result - // set. For example, a SQL query like `"SELECT UserId, UserName FROM + // set. For example, a SQL query like `"SELECT UserId, UserName FROM // Users"` could return a `row_type` value like: // // "fields": [ @@ -209,9 +213,11 @@ message ResultSetMetadata { StructType undeclared_parameters = 3; } -// Additional statistics about a [ResultSet][google.spanner.v1.ResultSet] or [PartialResultSet][google.spanner.v1.PartialResultSet]. +// Additional statistics about a [ResultSet][google.spanner.v1.ResultSet] or +// [PartialResultSet][google.spanner.v1.PartialResultSet]. message ResultSetStats { - // [QueryPlan][google.spanner.v1.QueryPlan] for the query associated with this result. + // [QueryPlan][google.spanner.v1.QueryPlan] for the query associated with this + // result. QueryPlan query_plan = 1; // Aggregated statistics from the execution of the query. Only present when @@ -230,7 +236,7 @@ message ResultSetStats { // Standard DML returns an exact count of rows that were modified. int64 row_count_exact = 3; - // Partitioned DML does not offer exactly-once semantics, so it + // Partitioned DML doesn't offer exactly-once semantics, so it // returns a lower bound of the rows modified. int64 row_count_lower_bound = 4; } diff --git a/protos/google/spanner/v1/spanner.proto b/protos/google/spanner/v1/spanner.proto index d60174997..c8e2d080f 100644 --- a/protos/google/spanner/v1/spanner.proto +++ b/protos/google/spanner/v1/spanner.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/v1/transaction.proto b/protos/google/spanner/v1/transaction.proto index dced12c10..612e491a3 100644 --- a/protos/google/spanner/v1/transaction.proto +++ b/protos/google/spanner/v1/transaction.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/google/spanner/v1/type.proto b/protos/google/spanner/v1/type.proto index a8a73bf31..3e01729fc 100644 --- a/protos/google/spanner/v1/type.proto +++ b/protos/google/spanner/v1/type.proto @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/protos/protos.d.ts b/protos/protos.d.ts index fb8a47b4e..e07ab8c2b 100644 --- a/protos/protos.d.ts +++ b/protos/protos.d.ts @@ -3171,7 +3171,7 @@ export namespace google { doubleValue?: (number|null); /** UninterpretedOption stringValue */ - stringValue?: (Uint8Array|string|null); + stringValue?: (Uint8Array|Buffer|string|null); /** UninterpretedOption aggregateValue */ aggregateValue?: (string|null); @@ -3202,7 +3202,7 @@ export namespace google { public doubleValue: number; /** UninterpretedOption stringValue. */ - public stringValue: (Uint8Array|string); + public stringValue: (Uint8Array|Buffer|string); /** UninterpretedOption aggregateValue. */ public aggregateValue: string; @@ -4238,7 +4238,7 @@ export namespace google { type_url?: (string|null); /** Any value */ - value?: (Uint8Array|string|null); + value?: (Uint8Array|Buffer|string|null); } /** Represents an Any. */ @@ -4254,7 +4254,7 @@ export namespace google { public type_url: string; /** Any value. */ - public value: (Uint8Array|string); + public value: (Uint8Array|Buffer|string); /** * Creates a new Any instance using the specified properties. @@ -10958,7 +10958,7 @@ export namespace google { databaseDialect?: (google.spanner.admin.database.v1.DatabaseDialect|keyof typeof google.spanner.admin.database.v1.DatabaseDialect|null); /** CreateDatabaseRequest protoDescriptors */ - protoDescriptors?: (Uint8Array|string|null); + protoDescriptors?: (Uint8Array|Buffer|string|null); } /** Represents a CreateDatabaseRequest. */ @@ -10986,7 +10986,7 @@ export namespace google { public databaseDialect: (google.spanner.admin.database.v1.DatabaseDialect|keyof typeof google.spanner.admin.database.v1.DatabaseDialect); /** CreateDatabaseRequest protoDescriptors. */ - public protoDescriptors: (Uint8Array|string); + public protoDescriptors: (Uint8Array|Buffer|string); /** * Creates a new CreateDatabaseRequest instance using the specified properties. @@ -11485,7 +11485,7 @@ export namespace google { operationId?: (string|null); /** UpdateDatabaseDdlRequest protoDescriptors */ - protoDescriptors?: (Uint8Array|string|null); + protoDescriptors?: (Uint8Array|Buffer|string|null); } /** Represents an UpdateDatabaseDdlRequest. */ @@ -11507,7 +11507,7 @@ export namespace google { public operationId: string; /** UpdateDatabaseDdlRequest protoDescriptors. */ - public protoDescriptors: (Uint8Array|string); + public protoDescriptors: (Uint8Array|Buffer|string); /** * Creates a new UpdateDatabaseDdlRequest instance using the specified properties. @@ -12024,7 +12024,7 @@ export namespace google { statements?: (string[]|null); /** GetDatabaseDdlResponse protoDescriptors */ - protoDescriptors?: (Uint8Array|string|null); + protoDescriptors?: (Uint8Array|Buffer|string|null); } /** Represents a GetDatabaseDdlResponse. */ @@ -12040,7 +12040,7 @@ export namespace google { public statements: string[]; /** GetDatabaseDdlResponse protoDescriptors. */ - public protoDescriptors: (Uint8Array|string); + public protoDescriptors: (Uint8Array|Buffer|string); /** * Creates a new GetDatabaseDdlResponse instance using the specified properties. @@ -19591,9 +19591,6 @@ export namespace google { /** ReadAction limit. */ public limit: number; - /** ReadAction _index. */ - public _index?: "index"; - /** * Creates a new ReadAction instance using the specified properties. * @param [properties] Properties to set @@ -19912,9 +19909,6 @@ export namespace google { /** DmlAction autocommitIfSupported. */ public autocommitIfSupported?: (boolean|null); - /** DmlAction _autocommitIfSupported. */ - public _autocommitIfSupported?: "autocommitIfSupported"; - /** * Creates a new DmlAction instance using the specified properties. * @param [properties] Properties to set @@ -20106,7 +20100,7 @@ export namespace google { doubleValue?: (number|null); /** Value bytesValue */ - bytesValue?: (Uint8Array|string|null); + bytesValue?: (Uint8Array|Buffer|string|null); /** Value stringValue */ stringValue?: (string|null); @@ -20152,7 +20146,7 @@ export namespace google { public doubleValue?: (number|null); /** Value bytesValue. */ - public bytesValue?: (Uint8Array|string|null); + public bytesValue?: (Uint8Array|Buffer|string|null); /** Value stringValue. */ public stringValue?: (string|null); @@ -20178,9 +20172,6 @@ export namespace google { /** Value valueType. */ public valueType?: ("isNull"|"intValue"|"boolValue"|"doubleValue"|"bytesValue"|"stringValue"|"structValue"|"timestampValue"|"dateDaysValue"|"isCommitTimestamp"|"arrayValue"); - /** Value _arrayType. */ - public _arrayType?: "arrayType"; - /** * Creates a new Value instance using the specified properties. * @param [properties] Properties to set @@ -20290,9 +20281,6 @@ export namespace google { /** KeyRange type. */ public type?: (google.spanner.executor.v1.KeyRange.Type|keyof typeof google.spanner.executor.v1.KeyRange.Type|null); - /** KeyRange _type. */ - public _type?: "type"; - /** * Creates a new KeyRange instance using the specified properties. * @param [properties] Properties to set @@ -21156,9 +21144,6 @@ export namespace google { /** PartitionedUpdateAction update. */ public update?: (google.spanner.executor.v1.IQueryAction|null); - /** PartitionedUpdateAction _options. */ - public _options?: "options"; - /** * Creates a new PartitionedUpdateAction instance using the specified properties. * @param [properties] Properties to set @@ -21264,12 +21249,6 @@ export namespace google { /** ExecutePartitionedUpdateOptions tag. */ public tag?: (string|null); - /** ExecutePartitionedUpdateOptions _rpcPriority. */ - public _rpcPriority?: "rpcPriority"; - - /** ExecutePartitionedUpdateOptions _tag. */ - public _tag?: "tag"; - /** * Creates a new ExecutePartitionedUpdateOptions instance using the specified properties. * @param [properties] Properties to set @@ -21386,12 +21365,6 @@ export namespace google { /** StartTransactionAction executionOptions. */ public executionOptions?: (google.spanner.executor.v1.ITransactionExecutionOptions|null); - /** StartTransactionAction _concurrency. */ - public _concurrency?: "concurrency"; - - /** StartTransactionAction _executionOptions. */ - public _executionOptions?: "executionOptions"; - /** * Creates a new StartTransactionAction instance using the specified properties. * @param [properties] Properties to set @@ -22448,9 +22421,6 @@ export namespace google { /** UpdateUserInstanceConfigAction labels. */ public labels: { [k: string]: string }; - /** UpdateUserInstanceConfigAction _displayName. */ - public _displayName?: "displayName"; - /** * Creates a new UpdateUserInstanceConfigAction instance using the specified properties. * @param [properties] Properties to set @@ -22766,12 +22736,6 @@ export namespace google { /** ListCloudInstanceConfigsAction pageToken. */ public pageToken?: (string|null); - /** ListCloudInstanceConfigsAction _pageSize. */ - public _pageSize?: "pageSize"; - - /** ListCloudInstanceConfigsAction _pageToken. */ - public _pageToken?: "pageToken"; - /** * Creates a new ListCloudInstanceConfigsAction instance using the specified properties. * @param [properties] Properties to set @@ -22905,15 +22869,6 @@ export namespace google { /** CreateCloudInstanceAction labels. */ public labels: { [k: string]: string }; - /** CreateCloudInstanceAction _nodeCount. */ - public _nodeCount?: "nodeCount"; - - /** CreateCloudInstanceAction _processingUnits. */ - public _processingUnits?: "processingUnits"; - - /** CreateCloudInstanceAction _autoscalingConfig. */ - public _autoscalingConfig?: "autoscalingConfig"; - /** * Creates a new CreateCloudInstanceAction instance using the specified properties. * @param [properties] Properties to set @@ -23047,18 +23002,6 @@ export namespace google { /** UpdateCloudInstanceAction labels. */ public labels: { [k: string]: string }; - /** UpdateCloudInstanceAction _displayName. */ - public _displayName?: "displayName"; - - /** UpdateCloudInstanceAction _nodeCount. */ - public _nodeCount?: "nodeCount"; - - /** UpdateCloudInstanceAction _processingUnits. */ - public _processingUnits?: "processingUnits"; - - /** UpdateCloudInstanceAction _autoscalingConfig. */ - public _autoscalingConfig?: "autoscalingConfig"; - /** * Creates a new UpdateCloudInstanceAction instance using the specified properties. * @param [properties] Properties to set @@ -23262,7 +23205,7 @@ export namespace google { dialect?: (string|null); /** CreateCloudDatabaseAction protoDescriptors */ - protoDescriptors?: (Uint8Array|string|null); + protoDescriptors?: (Uint8Array|Buffer|string|null); } /** Represents a CreateCloudDatabaseAction. */ @@ -23293,13 +23236,7 @@ export namespace google { public dialect?: (string|null); /** CreateCloudDatabaseAction protoDescriptors. */ - public protoDescriptors?: (Uint8Array|string|null); - - /** CreateCloudDatabaseAction _dialect. */ - public _dialect?: "dialect"; - - /** CreateCloudDatabaseAction _protoDescriptors. */ - public _protoDescriptors?: "protoDescriptors"; + public protoDescriptors?: (Uint8Array|Buffer|string|null); /** * Creates a new CreateCloudDatabaseAction instance using the specified properties. @@ -23398,7 +23335,7 @@ export namespace google { operationId?: (string|null); /** UpdateCloudDatabaseDdlAction protoDescriptors */ - protoDescriptors?: (Uint8Array|string|null); + protoDescriptors?: (Uint8Array|Buffer|string|null); } /** Represents an UpdateCloudDatabaseDdlAction. */ @@ -23426,10 +23363,7 @@ export namespace google { public operationId: string; /** UpdateCloudDatabaseDdlAction protoDescriptors. */ - public protoDescriptors?: (Uint8Array|string|null); - - /** UpdateCloudDatabaseDdlAction _protoDescriptors. */ - public _protoDescriptors?: "protoDescriptors"; + public protoDescriptors?: (Uint8Array|Buffer|string|null); /** * Creates a new UpdateCloudDatabaseDdlAction instance using the specified properties. @@ -23758,9 +23692,6 @@ export namespace google { /** ChangeQuorumCloudDatabaseAction servingLocations. */ public servingLocations: string[]; - /** ChangeQuorumCloudDatabaseAction _databaseUri. */ - public _databaseUri?: "databaseUri"; - /** * Creates a new ChangeQuorumCloudDatabaseAction instance using the specified properties. * @param [properties] Properties to set @@ -23991,15 +23922,6 @@ export namespace google { /** ListCloudInstancesAction pageToken. */ public pageToken?: (string|null); - /** ListCloudInstancesAction _filter. */ - public _filter?: "filter"; - - /** ListCloudInstancesAction _pageSize. */ - public _pageSize?: "pageSize"; - - /** ListCloudInstancesAction _pageToken. */ - public _pageToken?: "pageToken"; - /** * Creates a new ListCloudInstancesAction instance using the specified properties. * @param [properties] Properties to set @@ -24593,9 +24515,6 @@ export namespace google { /** CreateCloudBackupAction encryptionConfig. */ public encryptionConfig?: (google.spanner.admin.database.v1.IEncryptionConfig|null); - /** CreateCloudBackupAction _versionTime. */ - public _versionTime?: "versionTime"; - /** * Creates a new CreateCloudBackupAction instance using the specified properties. * @param [properties] Properties to set @@ -25674,7 +25593,7 @@ export namespace google { batchTxnTime?: (google.protobuf.ITimestamp|null); /** StartBatchTransactionAction tid */ - tid?: (Uint8Array|string|null); + tid?: (Uint8Array|Buffer|string|null); /** StartBatchTransactionAction cloudDatabaseRole */ cloudDatabaseRole?: (string|null); @@ -25693,7 +25612,7 @@ export namespace google { public batchTxnTime?: (google.protobuf.ITimestamp|null); /** StartBatchTransactionAction tid. */ - public tid?: (Uint8Array|string|null); + public tid?: (Uint8Array|Buffer|string|null); /** StartBatchTransactionAction cloudDatabaseRole. */ public cloudDatabaseRole: string; @@ -25913,12 +25832,6 @@ export namespace google { /** GenerateDbPartitionsForReadAction maxPartitionCount. */ public maxPartitionCount?: (number|Long|string|null); - /** GenerateDbPartitionsForReadAction _desiredBytesPerPartition. */ - public _desiredBytesPerPartition?: "desiredBytesPerPartition"; - - /** GenerateDbPartitionsForReadAction _maxPartitionCount. */ - public _maxPartitionCount?: "maxPartitionCount"; - /** * Creates a new GenerateDbPartitionsForReadAction instance using the specified properties. * @param [properties] Properties to set @@ -26022,9 +25935,6 @@ export namespace google { /** GenerateDbPartitionsForQueryAction desiredBytesPerPartition. */ public desiredBytesPerPartition?: (number|Long|string|null); - /** GenerateDbPartitionsForQueryAction _desiredBytesPerPartition. */ - public _desiredBytesPerPartition?: "desiredBytesPerPartition"; - /** * Creates a new GenerateDbPartitionsForQueryAction instance using the specified properties. * @param [properties] Properties to set @@ -26107,10 +26017,10 @@ export namespace google { interface IBatchPartition { /** BatchPartition partition */ - partition?: (Uint8Array|string|null); + partition?: (Uint8Array|Buffer|string|null); /** BatchPartition partitionToken */ - partitionToken?: (Uint8Array|string|null); + partitionToken?: (Uint8Array|Buffer|string|null); /** BatchPartition table */ table?: (string|null); @@ -26129,10 +26039,10 @@ export namespace google { constructor(properties?: google.spanner.executor.v1.IBatchPartition); /** BatchPartition partition. */ - public partition: (Uint8Array|string); + public partition: (Uint8Array|Buffer|string); /** BatchPartition partitionToken. */ - public partitionToken: (Uint8Array|string); + public partitionToken: (Uint8Array|Buffer|string); /** BatchPartition table. */ public table?: (string|null); @@ -26140,12 +26050,6 @@ export namespace google { /** BatchPartition index. */ public index?: (string|null); - /** BatchPartition _table. */ - public _table?: "table"; - - /** BatchPartition _index. */ - public _index?: "index"; - /** * Creates a new BatchPartition instance using the specified properties. * @param [properties] Properties to set @@ -26382,21 +26286,6 @@ export namespace google { /** ExecuteChangeStreamQuery cloudDatabaseRole. */ public cloudDatabaseRole?: (string|null); - /** ExecuteChangeStreamQuery _endTime. */ - public _endTime?: "endTime"; - - /** ExecuteChangeStreamQuery _partitionToken. */ - public _partitionToken?: "partitionToken"; - - /** ExecuteChangeStreamQuery _heartbeatMilliseconds. */ - public _heartbeatMilliseconds?: "heartbeatMilliseconds"; - - /** ExecuteChangeStreamQuery _deadlineSeconds. */ - public _deadlineSeconds?: "deadlineSeconds"; - - /** ExecuteChangeStreamQuery _cloudDatabaseRole. */ - public _cloudDatabaseRole?: "cloudDatabaseRole"; - /** * Creates a new ExecuteChangeStreamQuery instance using the specified properties. * @param [properties] Properties to set @@ -26494,7 +26383,7 @@ export namespace google { transactionRestarted?: (boolean|null); /** SpannerActionOutcome batchTxnId */ - batchTxnId?: (Uint8Array|string|null); + batchTxnId?: (Uint8Array|Buffer|string|null); /** SpannerActionOutcome dbPartition */ dbPartition?: (google.spanner.executor.v1.IBatchPartition[]|null); @@ -26534,7 +26423,7 @@ export namespace google { public transactionRestarted?: (boolean|null); /** SpannerActionOutcome batchTxnId. */ - public batchTxnId?: (Uint8Array|string|null); + public batchTxnId?: (Uint8Array|Buffer|string|null); /** SpannerActionOutcome dbPartition. */ public dbPartition: google.spanner.executor.v1.IBatchPartition[]; @@ -26548,27 +26437,6 @@ export namespace google { /** SpannerActionOutcome changeStreamRecords. */ public changeStreamRecords: google.spanner.executor.v1.IChangeStreamRecord[]; - /** SpannerActionOutcome _status. */ - public _status?: "status"; - - /** SpannerActionOutcome _commitTime. */ - public _commitTime?: "commitTime"; - - /** SpannerActionOutcome _readResult. */ - public _readResult?: "readResult"; - - /** SpannerActionOutcome _queryResult. */ - public _queryResult?: "queryResult"; - - /** SpannerActionOutcome _transactionRestarted. */ - public _transactionRestarted?: "transactionRestarted"; - - /** SpannerActionOutcome _batchTxnId. */ - public _batchTxnId?: "batchTxnId"; - - /** SpannerActionOutcome _adminResult. */ - public _adminResult?: "adminResult"; - /** * Creates a new SpannerActionOutcome instance using the specified properties. * @param [properties] Properties to set @@ -27368,15 +27236,6 @@ export namespace google { /** ReadResult rowType. */ public rowType?: (google.spanner.v1.IStructType|null); - /** ReadResult _index. */ - public _index?: "index"; - - /** ReadResult _requestIndex. */ - public _requestIndex?: "requestIndex"; - - /** ReadResult _rowType. */ - public _rowType?: "rowType"; - /** * Creates a new ReadResult instance using the specified properties. * @param [properties] Properties to set @@ -27480,9 +27339,6 @@ export namespace google { /** QueryResult rowType. */ public rowType?: (google.spanner.v1.IStructType|null); - /** QueryResult _rowType. */ - public _rowType?: "rowType"; - /** * Creates a new QueryResult instance using the specified properties. * @param [properties] Properties to set @@ -30349,13 +30205,13 @@ export namespace google { paramTypes?: ({ [k: string]: google.spanner.v1.IType }|null); /** ExecuteSqlRequest resumeToken */ - resumeToken?: (Uint8Array|string|null); + resumeToken?: (Uint8Array|Buffer|string|null); /** ExecuteSqlRequest queryMode */ queryMode?: (google.spanner.v1.ExecuteSqlRequest.QueryMode|keyof typeof google.spanner.v1.ExecuteSqlRequest.QueryMode|null); /** ExecuteSqlRequest partitionToken */ - partitionToken?: (Uint8Array|string|null); + partitionToken?: (Uint8Array|Buffer|string|null); /** ExecuteSqlRequest seqno */ seqno?: (number|Long|string|null); @@ -30401,13 +30257,13 @@ export namespace google { public paramTypes: { [k: string]: google.spanner.v1.IType }; /** ExecuteSqlRequest resumeToken. */ - public resumeToken: (Uint8Array|string); + public resumeToken: (Uint8Array|Buffer|string); /** ExecuteSqlRequest queryMode. */ public queryMode: (google.spanner.v1.ExecuteSqlRequest.QueryMode|keyof typeof google.spanner.v1.ExecuteSqlRequest.QueryMode); /** ExecuteSqlRequest partitionToken. */ - public partitionToken: (Uint8Array|string); + public partitionToken: (Uint8Array|Buffer|string); /** ExecuteSqlRequest seqno. */ public seqno: (number|Long|string); @@ -31335,7 +31191,7 @@ export namespace google { interface IPartition { /** Partition partitionToken */ - partitionToken?: (Uint8Array|string|null); + partitionToken?: (Uint8Array|Buffer|string|null); } /** Represents a Partition. */ @@ -31348,7 +31204,7 @@ export namespace google { constructor(properties?: google.spanner.v1.IPartition); /** Partition partitionToken. */ - public partitionToken: (Uint8Array|string); + public partitionToken: (Uint8Array|Buffer|string); /** * Creates a new Partition instance using the specified properties. @@ -31556,10 +31412,10 @@ export namespace google { limit?: (number|Long|string|null); /** ReadRequest resumeToken */ - resumeToken?: (Uint8Array|string|null); + resumeToken?: (Uint8Array|Buffer|string|null); /** ReadRequest partitionToken */ - partitionToken?: (Uint8Array|string|null); + partitionToken?: (Uint8Array|Buffer|string|null); /** ReadRequest requestOptions */ requestOptions?: (google.spanner.v1.IRequestOptions|null); @@ -31608,10 +31464,10 @@ export namespace google { public limit: (number|Long|string); /** ReadRequest resumeToken. */ - public resumeToken: (Uint8Array|string); + public resumeToken: (Uint8Array|Buffer|string); /** ReadRequest partitionToken. */ - public partitionToken: (Uint8Array|string); + public partitionToken: (Uint8Array|Buffer|string); /** ReadRequest requestOptions. */ public requestOptions?: (google.spanner.v1.IRequestOptions|null); @@ -31845,7 +31701,7 @@ export namespace google { session?: (string|null); /** CommitRequest transactionId */ - transactionId?: (Uint8Array|string|null); + transactionId?: (Uint8Array|Buffer|string|null); /** CommitRequest singleUseTransaction */ singleUseTransaction?: (google.spanner.v1.ITransactionOptions|null); @@ -31879,7 +31735,7 @@ export namespace google { public session: string; /** CommitRequest transactionId. */ - public transactionId?: (Uint8Array|string|null); + public transactionId?: (Uint8Array|Buffer|string|null); /** CommitRequest singleUseTransaction. */ public singleUseTransaction?: (google.spanner.v1.ITransactionOptions|null); @@ -31987,7 +31843,7 @@ export namespace google { session?: (string|null); /** RollbackRequest transactionId */ - transactionId?: (Uint8Array|string|null); + transactionId?: (Uint8Array|Buffer|string|null); } /** Represents a RollbackRequest. */ @@ -32003,7 +31859,7 @@ export namespace google { public session: string; /** RollbackRequest transactionId. */ - public transactionId: (Uint8Array|string); + public transactionId: (Uint8Array|Buffer|string); /** * Creates a new RollbackRequest instance using the specified properties. @@ -32752,7 +32608,7 @@ export namespace google { readLockMode?: (google.spanner.v1.TransactionOptions.ReadWrite.ReadLockMode|keyof typeof google.spanner.v1.TransactionOptions.ReadWrite.ReadLockMode|null); /** ReadWrite multiplexedSessionPreviousTransactionId */ - multiplexedSessionPreviousTransactionId?: (Uint8Array|string|null); + multiplexedSessionPreviousTransactionId?: (Uint8Array|Buffer|string|null); } /** Represents a ReadWrite. */ @@ -32768,7 +32624,7 @@ export namespace google { public readLockMode: (google.spanner.v1.TransactionOptions.ReadWrite.ReadLockMode|keyof typeof google.spanner.v1.TransactionOptions.ReadWrite.ReadLockMode); /** ReadWrite multiplexedSessionPreviousTransactionId. */ - public multiplexedSessionPreviousTransactionId: (Uint8Array|string); + public multiplexedSessionPreviousTransactionId: (Uint8Array|Buffer|string); /** * Creates a new ReadWrite instance using the specified properties. @@ -33091,7 +32947,7 @@ export namespace google { interface ITransaction { /** Transaction id */ - id?: (Uint8Array|string|null); + id?: (Uint8Array|Buffer|string|null); /** Transaction readTimestamp */ readTimestamp?: (google.protobuf.ITimestamp|null); @@ -33110,7 +32966,7 @@ export namespace google { constructor(properties?: google.spanner.v1.ITransaction); /** Transaction id. */ - public id: (Uint8Array|string); + public id: (Uint8Array|Buffer|string); /** Transaction readTimestamp. */ public readTimestamp?: (google.protobuf.ITimestamp|null); @@ -33203,7 +33059,7 @@ export namespace google { singleUse?: (google.spanner.v1.ITransactionOptions|null); /** TransactionSelector id */ - id?: (Uint8Array|string|null); + id?: (Uint8Array|Buffer|string|null); /** TransactionSelector begin */ begin?: (google.spanner.v1.ITransactionOptions|null); @@ -33222,7 +33078,7 @@ export namespace google { public singleUse?: (google.spanner.v1.ITransactionOptions|null); /** TransactionSelector id. */ - public id?: (Uint8Array|string|null); + public id?: (Uint8Array|Buffer|string|null); /** TransactionSelector begin. */ public begin?: (google.spanner.v1.ITransactionOptions|null); @@ -33312,7 +33168,7 @@ export namespace google { interface IMultiplexedSessionPrecommitToken { /** MultiplexedSessionPrecommitToken precommitToken */ - precommitToken?: (Uint8Array|string|null); + precommitToken?: (Uint8Array|Buffer|string|null); /** MultiplexedSessionPrecommitToken seqNum */ seqNum?: (number|null); @@ -33328,7 +33184,7 @@ export namespace google { constructor(properties?: google.spanner.v1.IMultiplexedSessionPrecommitToken); /** MultiplexedSessionPrecommitToken precommitToken. */ - public precommitToken: (Uint8Array|string); + public precommitToken: (Uint8Array|Buffer|string); /** MultiplexedSessionPrecommitToken seqNum. */ public seqNum: number; @@ -34108,13 +33964,16 @@ export namespace google { chunkedValue?: (boolean|null); /** PartialResultSet resumeToken */ - resumeToken?: (Uint8Array|string|null); + resumeToken?: (Uint8Array|Buffer|string|null); /** PartialResultSet stats */ stats?: (google.spanner.v1.IResultSetStats|null); /** PartialResultSet precommitToken */ precommitToken?: (google.spanner.v1.IMultiplexedSessionPrecommitToken|null); + + /** PartialResultSet last */ + last?: (boolean|null); } /** Represents a PartialResultSet. */ @@ -34136,7 +33995,7 @@ export namespace google { public chunkedValue: boolean; /** PartialResultSet resumeToken. */ - public resumeToken: (Uint8Array|string); + public resumeToken: (Uint8Array|Buffer|string); /** PartialResultSet stats. */ public stats?: (google.spanner.v1.IResultSetStats|null); @@ -34144,6 +34003,9 @@ export namespace google { /** PartialResultSet precommitToken. */ public precommitToken?: (google.spanner.v1.IMultiplexedSessionPrecommitToken|null); + /** PartialResultSet last. */ + public last: boolean; + /** * Creates a new PartialResultSet instance using the specified properties. * @param [properties] Properties to set @@ -38965,7 +38827,7 @@ export namespace google { auditConfigs?: (google.iam.v1.IAuditConfig[]|null); /** Policy etag */ - etag?: (Uint8Array|string|null); + etag?: (Uint8Array|Buffer|string|null); } /** Represents a Policy. */ @@ -38987,7 +38849,7 @@ export namespace google { public auditConfigs: google.iam.v1.IAuditConfig[]; /** Policy etag. */ - public etag: (Uint8Array|string); + public etag: (Uint8Array|Buffer|string); /** * Creates a new Policy instance using the specified properties. diff --git a/protos/protos.js b/protos/protos.js index 6f610619b..225e78fc9 100644 --- a/protos/protos.js +++ b/protos/protos.js @@ -49870,12 +49870,7 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * ReadAction _index. - * @member {"index"|undefined} _index - * @memberof google.spanner.executor.v1.ReadAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ReadAction.prototype, "_index", { get: $util.oneOfGetter($oneOfFields = ["index"]), set: $util.oneOfSetter($oneOfFields) @@ -50693,12 +50688,7 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * DmlAction _autocommitIfSupported. - * @member {"autocommitIfSupported"|undefined} _autocommitIfSupported - * @memberof google.spanner.executor.v1.DmlAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(DmlAction.prototype, "_autocommitIfSupported", { get: $util.oneOfGetter($oneOfFields = ["autocommitIfSupported"]), set: $util.oneOfSetter($oneOfFields) @@ -51272,12 +51262,7 @@ set: $util.oneOfSetter($oneOfFields) }); - /** - * Value _arrayType. - * @member {"arrayType"|undefined} _arrayType - * @memberof google.spanner.executor.v1.Value - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(Value.prototype, "_arrayType", { get: $util.oneOfGetter($oneOfFields = ["arrayType"]), set: $util.oneOfSetter($oneOfFields) @@ -51768,12 +51753,7 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * KeyRange _type. - * @member {"type"|undefined} _type - * @memberof google.spanner.executor.v1.KeyRange - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(KeyRange.prototype, "_type", { get: $util.oneOfGetter($oneOfFields = ["type"]), set: $util.oneOfSetter($oneOfFields) @@ -54017,12 +53997,7 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * PartitionedUpdateAction _options. - * @member {"options"|undefined} _options - * @memberof google.spanner.executor.v1.PartitionedUpdateAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(PartitionedUpdateAction.prototype, "_options", { get: $util.oneOfGetter($oneOfFields = ["options"]), set: $util.oneOfSetter($oneOfFields) @@ -54272,23 +54247,13 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * ExecutePartitionedUpdateOptions _rpcPriority. - * @member {"rpcPriority"|undefined} _rpcPriority - * @memberof google.spanner.executor.v1.PartitionedUpdateAction.ExecutePartitionedUpdateOptions - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ExecutePartitionedUpdateOptions.prototype, "_rpcPriority", { get: $util.oneOfGetter($oneOfFields = ["rpcPriority"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * ExecutePartitionedUpdateOptions _tag. - * @member {"tag"|undefined} _tag - * @memberof google.spanner.executor.v1.PartitionedUpdateAction.ExecutePartitionedUpdateOptions - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ExecutePartitionedUpdateOptions.prototype, "_tag", { get: $util.oneOfGetter($oneOfFields = ["tag"]), set: $util.oneOfSetter($oneOfFields) @@ -54584,23 +54549,13 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * StartTransactionAction _concurrency. - * @member {"concurrency"|undefined} _concurrency - * @memberof google.spanner.executor.v1.StartTransactionAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(StartTransactionAction.prototype, "_concurrency", { get: $util.oneOfGetter($oneOfFields = ["concurrency"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * StartTransactionAction _executionOptions. - * @member {"executionOptions"|undefined} _executionOptions - * @memberof google.spanner.executor.v1.StartTransactionAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(StartTransactionAction.prototype, "_executionOptions", { get: $util.oneOfGetter($oneOfFields = ["executionOptions"]), set: $util.oneOfSetter($oneOfFields) @@ -57849,12 +57804,7 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * UpdateUserInstanceConfigAction _displayName. - * @member {"displayName"|undefined} _displayName - * @memberof google.spanner.executor.v1.UpdateUserInstanceConfigAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(UpdateUserInstanceConfigAction.prototype, "_displayName", { get: $util.oneOfGetter($oneOfFields = ["displayName"]), set: $util.oneOfSetter($oneOfFields) @@ -58627,23 +58577,13 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * ListCloudInstanceConfigsAction _pageSize. - * @member {"pageSize"|undefined} _pageSize - * @memberof google.spanner.executor.v1.ListCloudInstanceConfigsAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ListCloudInstanceConfigsAction.prototype, "_pageSize", { get: $util.oneOfGetter($oneOfFields = ["pageSize"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * ListCloudInstanceConfigsAction _pageToken. - * @member {"pageToken"|undefined} _pageToken - * @memberof google.spanner.executor.v1.ListCloudInstanceConfigsAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ListCloudInstanceConfigsAction.prototype, "_pageToken", { get: $util.oneOfGetter($oneOfFields = ["pageToken"]), set: $util.oneOfSetter($oneOfFields) @@ -58949,34 +58889,19 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * CreateCloudInstanceAction _nodeCount. - * @member {"nodeCount"|undefined} _nodeCount - * @memberof google.spanner.executor.v1.CreateCloudInstanceAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(CreateCloudInstanceAction.prototype, "_nodeCount", { get: $util.oneOfGetter($oneOfFields = ["nodeCount"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * CreateCloudInstanceAction _processingUnits. - * @member {"processingUnits"|undefined} _processingUnits - * @memberof google.spanner.executor.v1.CreateCloudInstanceAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(CreateCloudInstanceAction.prototype, "_processingUnits", { get: $util.oneOfGetter($oneOfFields = ["processingUnits"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * CreateCloudInstanceAction _autoscalingConfig. - * @member {"autoscalingConfig"|undefined} _autoscalingConfig - * @memberof google.spanner.executor.v1.CreateCloudInstanceAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(CreateCloudInstanceAction.prototype, "_autoscalingConfig", { get: $util.oneOfGetter($oneOfFields = ["autoscalingConfig"]), set: $util.oneOfSetter($oneOfFields) @@ -59384,45 +59309,25 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * UpdateCloudInstanceAction _displayName. - * @member {"displayName"|undefined} _displayName - * @memberof google.spanner.executor.v1.UpdateCloudInstanceAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(UpdateCloudInstanceAction.prototype, "_displayName", { get: $util.oneOfGetter($oneOfFields = ["displayName"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * UpdateCloudInstanceAction _nodeCount. - * @member {"nodeCount"|undefined} _nodeCount - * @memberof google.spanner.executor.v1.UpdateCloudInstanceAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(UpdateCloudInstanceAction.prototype, "_nodeCount", { get: $util.oneOfGetter($oneOfFields = ["nodeCount"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * UpdateCloudInstanceAction _processingUnits. - * @member {"processingUnits"|undefined} _processingUnits - * @memberof google.spanner.executor.v1.UpdateCloudInstanceAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(UpdateCloudInstanceAction.prototype, "_processingUnits", { get: $util.oneOfGetter($oneOfFields = ["processingUnits"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * UpdateCloudInstanceAction _autoscalingConfig. - * @member {"autoscalingConfig"|undefined} _autoscalingConfig - * @memberof google.spanner.executor.v1.UpdateCloudInstanceAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(UpdateCloudInstanceAction.prototype, "_autoscalingConfig", { get: $util.oneOfGetter($oneOfFields = ["autoscalingConfig"]), set: $util.oneOfSetter($oneOfFields) @@ -60063,23 +59968,13 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * CreateCloudDatabaseAction _dialect. - * @member {"dialect"|undefined} _dialect - * @memberof google.spanner.executor.v1.CreateCloudDatabaseAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(CreateCloudDatabaseAction.prototype, "_dialect", { get: $util.oneOfGetter($oneOfFields = ["dialect"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * CreateCloudDatabaseAction _protoDescriptors. - * @member {"protoDescriptors"|undefined} _protoDescriptors - * @memberof google.spanner.executor.v1.CreateCloudDatabaseAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(CreateCloudDatabaseAction.prototype, "_protoDescriptors", { get: $util.oneOfGetter($oneOfFields = ["protoDescriptors"]), set: $util.oneOfSetter($oneOfFields) @@ -60457,12 +60352,7 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * UpdateCloudDatabaseDdlAction _protoDescriptors. - * @member {"protoDescriptors"|undefined} _protoDescriptors - * @memberof google.spanner.executor.v1.UpdateCloudDatabaseDdlAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(UpdateCloudDatabaseDdlAction.prototype, "_protoDescriptors", { get: $util.oneOfGetter($oneOfFields = ["protoDescriptors"]), set: $util.oneOfSetter($oneOfFields) @@ -61308,12 +61198,7 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * ChangeQuorumCloudDatabaseAction _databaseUri. - * @member {"databaseUri"|undefined} _databaseUri - * @memberof google.spanner.executor.v1.ChangeQuorumCloudDatabaseAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ChangeQuorumCloudDatabaseAction.prototype, "_databaseUri", { get: $util.oneOfGetter($oneOfFields = ["databaseUri"]), set: $util.oneOfSetter($oneOfFields) @@ -61863,34 +61748,19 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * ListCloudInstancesAction _filter. - * @member {"filter"|undefined} _filter - * @memberof google.spanner.executor.v1.ListCloudInstancesAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ListCloudInstancesAction.prototype, "_filter", { get: $util.oneOfGetter($oneOfFields = ["filter"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * ListCloudInstancesAction _pageSize. - * @member {"pageSize"|undefined} _pageSize - * @memberof google.spanner.executor.v1.ListCloudInstancesAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ListCloudInstancesAction.prototype, "_pageSize", { get: $util.oneOfGetter($oneOfFields = ["pageSize"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * ListCloudInstancesAction _pageToken. - * @member {"pageToken"|undefined} _pageToken - * @memberof google.spanner.executor.v1.ListCloudInstancesAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ListCloudInstancesAction.prototype, "_pageToken", { get: $util.oneOfGetter($oneOfFields = ["pageToken"]), set: $util.oneOfSetter($oneOfFields) @@ -63318,12 +63188,7 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * CreateCloudBackupAction _versionTime. - * @member {"versionTime"|undefined} _versionTime - * @memberof google.spanner.executor.v1.CreateCloudBackupAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(CreateCloudBackupAction.prototype, "_versionTime", { get: $util.oneOfGetter($oneOfFields = ["versionTime"]), set: $util.oneOfSetter($oneOfFields) @@ -66483,23 +66348,13 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * GenerateDbPartitionsForReadAction _desiredBytesPerPartition. - * @member {"desiredBytesPerPartition"|undefined} _desiredBytesPerPartition - * @memberof google.spanner.executor.v1.GenerateDbPartitionsForReadAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(GenerateDbPartitionsForReadAction.prototype, "_desiredBytesPerPartition", { get: $util.oneOfGetter($oneOfFields = ["desiredBytesPerPartition"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * GenerateDbPartitionsForReadAction _maxPartitionCount. - * @member {"maxPartitionCount"|undefined} _maxPartitionCount - * @memberof google.spanner.executor.v1.GenerateDbPartitionsForReadAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(GenerateDbPartitionsForReadAction.prototype, "_maxPartitionCount", { get: $util.oneOfGetter($oneOfFields = ["maxPartitionCount"]), set: $util.oneOfSetter($oneOfFields) @@ -66819,12 +66674,7 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * GenerateDbPartitionsForQueryAction _desiredBytesPerPartition. - * @member {"desiredBytesPerPartition"|undefined} _desiredBytesPerPartition - * @memberof google.spanner.executor.v1.GenerateDbPartitionsForQueryAction - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(GenerateDbPartitionsForQueryAction.prototype, "_desiredBytesPerPartition", { get: $util.oneOfGetter($oneOfFields = ["desiredBytesPerPartition"]), set: $util.oneOfSetter($oneOfFields) @@ -67099,23 +66949,13 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * BatchPartition _table. - * @member {"table"|undefined} _table - * @memberof google.spanner.executor.v1.BatchPartition - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(BatchPartition.prototype, "_table", { get: $util.oneOfGetter($oneOfFields = ["table"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * BatchPartition _index. - * @member {"index"|undefined} _index - * @memberof google.spanner.executor.v1.BatchPartition - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(BatchPartition.prototype, "_index", { get: $util.oneOfGetter($oneOfFields = ["index"]), set: $util.oneOfSetter($oneOfFields) @@ -67673,56 +67513,31 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * ExecuteChangeStreamQuery _endTime. - * @member {"endTime"|undefined} _endTime - * @memberof google.spanner.executor.v1.ExecuteChangeStreamQuery - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ExecuteChangeStreamQuery.prototype, "_endTime", { get: $util.oneOfGetter($oneOfFields = ["endTime"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * ExecuteChangeStreamQuery _partitionToken. - * @member {"partitionToken"|undefined} _partitionToken - * @memberof google.spanner.executor.v1.ExecuteChangeStreamQuery - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ExecuteChangeStreamQuery.prototype, "_partitionToken", { get: $util.oneOfGetter($oneOfFields = ["partitionToken"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * ExecuteChangeStreamQuery _heartbeatMilliseconds. - * @member {"heartbeatMilliseconds"|undefined} _heartbeatMilliseconds - * @memberof google.spanner.executor.v1.ExecuteChangeStreamQuery - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ExecuteChangeStreamQuery.prototype, "_heartbeatMilliseconds", { get: $util.oneOfGetter($oneOfFields = ["heartbeatMilliseconds"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * ExecuteChangeStreamQuery _deadlineSeconds. - * @member {"deadlineSeconds"|undefined} _deadlineSeconds - * @memberof google.spanner.executor.v1.ExecuteChangeStreamQuery - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ExecuteChangeStreamQuery.prototype, "_deadlineSeconds", { get: $util.oneOfGetter($oneOfFields = ["deadlineSeconds"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * ExecuteChangeStreamQuery _cloudDatabaseRole. - * @member {"cloudDatabaseRole"|undefined} _cloudDatabaseRole - * @memberof google.spanner.executor.v1.ExecuteChangeStreamQuery - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ExecuteChangeStreamQuery.prototype, "_cloudDatabaseRole", { get: $util.oneOfGetter($oneOfFields = ["cloudDatabaseRole"]), set: $util.oneOfSetter($oneOfFields) @@ -68177,78 +67992,43 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * SpannerActionOutcome _status. - * @member {"status"|undefined} _status - * @memberof google.spanner.executor.v1.SpannerActionOutcome - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(SpannerActionOutcome.prototype, "_status", { get: $util.oneOfGetter($oneOfFields = ["status"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * SpannerActionOutcome _commitTime. - * @member {"commitTime"|undefined} _commitTime - * @memberof google.spanner.executor.v1.SpannerActionOutcome - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(SpannerActionOutcome.prototype, "_commitTime", { get: $util.oneOfGetter($oneOfFields = ["commitTime"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * SpannerActionOutcome _readResult. - * @member {"readResult"|undefined} _readResult - * @memberof google.spanner.executor.v1.SpannerActionOutcome - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(SpannerActionOutcome.prototype, "_readResult", { get: $util.oneOfGetter($oneOfFields = ["readResult"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * SpannerActionOutcome _queryResult. - * @member {"queryResult"|undefined} _queryResult - * @memberof google.spanner.executor.v1.SpannerActionOutcome - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(SpannerActionOutcome.prototype, "_queryResult", { get: $util.oneOfGetter($oneOfFields = ["queryResult"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * SpannerActionOutcome _transactionRestarted. - * @member {"transactionRestarted"|undefined} _transactionRestarted - * @memberof google.spanner.executor.v1.SpannerActionOutcome - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(SpannerActionOutcome.prototype, "_transactionRestarted", { get: $util.oneOfGetter($oneOfFields = ["transactionRestarted"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * SpannerActionOutcome _batchTxnId. - * @member {"batchTxnId"|undefined} _batchTxnId - * @memberof google.spanner.executor.v1.SpannerActionOutcome - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(SpannerActionOutcome.prototype, "_batchTxnId", { get: $util.oneOfGetter($oneOfFields = ["batchTxnId"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * SpannerActionOutcome _adminResult. - * @member {"adminResult"|undefined} _adminResult - * @memberof google.spanner.executor.v1.SpannerActionOutcome - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(SpannerActionOutcome.prototype, "_adminResult", { get: $util.oneOfGetter($oneOfFields = ["adminResult"]), set: $util.oneOfSetter($oneOfFields) @@ -70569,34 +70349,19 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * ReadResult _index. - * @member {"index"|undefined} _index - * @memberof google.spanner.executor.v1.ReadResult - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ReadResult.prototype, "_index", { get: $util.oneOfGetter($oneOfFields = ["index"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * ReadResult _requestIndex. - * @member {"requestIndex"|undefined} _requestIndex - * @memberof google.spanner.executor.v1.ReadResult - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ReadResult.prototype, "_requestIndex", { get: $util.oneOfGetter($oneOfFields = ["requestIndex"]), set: $util.oneOfSetter($oneOfFields) }); - /** - * ReadResult _rowType. - * @member {"rowType"|undefined} _rowType - * @memberof google.spanner.executor.v1.ReadResult - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(ReadResult.prototype, "_rowType", { get: $util.oneOfGetter($oneOfFields = ["rowType"]), set: $util.oneOfSetter($oneOfFields) @@ -70916,12 +70681,7 @@ // OneOf field names bound to virtual getters and setters var $oneOfFields; - /** - * QueryResult _rowType. - * @member {"rowType"|undefined} _rowType - * @memberof google.spanner.executor.v1.QueryResult - * @instance - */ + // Virtual OneOf for proto3 optional field Object.defineProperty(QueryResult.prototype, "_rowType", { get: $util.oneOfGetter($oneOfFields = ["rowType"]), set: $util.oneOfSetter($oneOfFields) @@ -87748,6 +87508,7 @@ * @property {Uint8Array|null} [resumeToken] PartialResultSet resumeToken * @property {google.spanner.v1.IResultSetStats|null} [stats] PartialResultSet stats * @property {google.spanner.v1.IMultiplexedSessionPrecommitToken|null} [precommitToken] PartialResultSet precommitToken + * @property {boolean|null} [last] PartialResultSet last */ /** @@ -87814,6 +87575,14 @@ */ PartialResultSet.prototype.precommitToken = null; + /** + * PartialResultSet last. + * @member {boolean} last + * @memberof google.spanner.v1.PartialResultSet + * @instance + */ + PartialResultSet.prototype.last = false; + /** * Creates a new PartialResultSet instance using the specified properties. * @function create @@ -87851,6 +87620,8 @@ $root.google.spanner.v1.ResultSetStats.encode(message.stats, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); if (message.precommitToken != null && Object.hasOwnProperty.call(message, "precommitToken")) $root.google.spanner.v1.MultiplexedSessionPrecommitToken.encode(message.precommitToken, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.last != null && Object.hasOwnProperty.call(message, "last")) + writer.uint32(/* id 9, wireType 0 =*/72).bool(message.last); return writer; }; @@ -87913,6 +87684,10 @@ message.precommitToken = $root.google.spanner.v1.MultiplexedSessionPrecommitToken.decode(reader, reader.uint32()); break; } + case 9: { + message.last = reader.bool(); + break; + } default: reader.skipType(tag & 7); break; @@ -87978,6 +87753,9 @@ if (error) return "precommitToken." + error; } + if (message.last != null && message.hasOwnProperty("last")) + if (typeof message.last !== "boolean") + return "last: boolean expected"; return null; }; @@ -88025,6 +87803,8 @@ throw TypeError(".google.spanner.v1.PartialResultSet.precommitToken: object expected"); message.precommitToken = $root.google.spanner.v1.MultiplexedSessionPrecommitToken.fromObject(object.precommitToken); } + if (object.last != null) + message.last = Boolean(object.last); return message; }; @@ -88055,6 +87835,7 @@ } object.stats = null; object.precommitToken = null; + object.last = false; } if (message.metadata != null && message.hasOwnProperty("metadata")) object.metadata = $root.google.spanner.v1.ResultSetMetadata.toObject(message.metadata, options); @@ -88071,6 +87852,8 @@ object.stats = $root.google.spanner.v1.ResultSetStats.toObject(message.stats, options); if (message.precommitToken != null && message.hasOwnProperty("precommitToken")) object.precommitToken = $root.google.spanner.v1.MultiplexedSessionPrecommitToken.toObject(message.precommitToken, options); + if (message.last != null && message.hasOwnProperty("last")) + object.last = message.last; return object; }; diff --git a/protos/protos.json b/protos/protos.json index 1c3d1771c..f12164254 100644 --- a/protos/protos.json +++ b/protos/protos.json @@ -7673,8 +7673,7 @@ "php_namespace": "Google\\Cloud\\Spanner\\V1", "ruby_package": "Google::Cloud::Spanner::V1", "(google.api.resource_definition).type": "spanner.googleapis.com/Database", - "(google.api.resource_definition).pattern": "projects/{project}/instances/{instance}/databases/{database}", - "cc_enable_arenas": true + "(google.api.resource_definition).pattern": "projects/{project}/instances/{instance}/databases/{database}" }, "nested": { "Spanner": { @@ -9155,6 +9154,13 @@ "options": { "(google.api.field_behavior)": "OPTIONAL" } + }, + "last": { + "type": "bool", + "id": 9, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } } }, diff --git a/samples/add-and-drop-new-database-role.js b/samples/add-and-drop-new-database-role.js index 100c2c55b..ef361c2ee 100644 --- a/samples/add-and-drop-new-database-role.js +++ b/samples/add-and-drop-new-database-role.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_add_and_drop_database_role] /** @@ -54,7 +54,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -75,7 +75,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); diff --git a/samples/archived/add-and-drop-new-database-role.js b/samples/archived/add-and-drop-new-database-role.js index 7423430af..8b3e11c20 100644 --- a/samples/archived/add-and-drop-new-database-role.js +++ b/samples/archived/add-and-drop-new-database-role.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_add_and_drop_database_role] /** diff --git a/samples/archived/backups-copy.js b/samples/archived/backups-copy.js index 9636be972..26754b958 100644 --- a/samples/archived/backups-copy.js +++ b/samples/archived/backups-copy.js @@ -22,7 +22,7 @@ function main( instanceId = 'my-instance', backupId = 'my-backup', sourceBackupPath = 'projects/my-project-id/instances/my-source-instance/backups/my-source-backup', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_copy_backup] /** @@ -48,7 +48,7 @@ function main( // Expire copy backup 14 days in the future const expireTime = Spanner.timestamp( - Date.now() + 1000 * 60 * 60 * 24 * 14 + Date.now() + 1000 * 60 * 60 * 24 * 14, ).toStruct(); // Copy the source backup @@ -59,13 +59,13 @@ function main( backupId, { expireTime: expireTime, - } + }, ); console.log( `Waiting for backup copy ${ instance.backup(backupId).formattedName_ - } to complete...` + } to complete...`, ); await operation.promise(); @@ -78,7 +78,7 @@ function main( `${copyBackupInfo.sizeBytes} bytes was created at ` + `${new PreciseDate(copyBackupInfo.createTime).toISOString()} ` + 'with version time ' + - `${new PreciseDate(copyBackupInfo.versionTime).toISOString()}` + `${new PreciseDate(copyBackupInfo.versionTime).toISOString()}`, ); } else { console.error('ERROR: Copy of backup is not ready.'); diff --git a/samples/archived/backups-create-with-encryption-key.js b/samples/archived/backups-create-with-encryption-key.js index 96433971e..f0e0c8b04 100644 --- a/samples/archived/backups-create-with-encryption-key.js +++ b/samples/archived/backups-create-with-encryption-key.js @@ -20,7 +20,7 @@ async function createBackupWithEncryptionKey( databaseId, backupId, projectId, - keyName + keyName, ) { // [START spanner_create_backup_with_encryption_key] // Imports the Google Cloud client library and precise date library @@ -75,7 +75,7 @@ async function createBackupWithEncryptionKey( `Backup ${backupInfo.name} of size ` + `${backupInfo.sizeBytes} bytes was created at ` + `${new PreciseDate(backupInfo.createTime).toISOString()} ` + - `using encryption key ${backupInfo.encryptionInfo.kmsKeyVersion}` + `using encryption key ${backupInfo.encryptionInfo.kmsKeyVersion}`, ); } else { console.error('ERROR: Backup is not ready.'); diff --git a/samples/archived/backups-create.js b/samples/archived/backups-create.js index 6a8af90e4..1524bcfe9 100644 --- a/samples/archived/backups-create.js +++ b/samples/archived/backups-create.js @@ -20,7 +20,7 @@ async function createBackup( databaseId, backupId, projectId, - versionTime + versionTime, ) { // [START spanner_create_backup] // Imports the Google Cloud client library and precise date library @@ -71,7 +71,7 @@ async function createBackup( `${backupInfo.sizeBytes} bytes was created at ` + `${new PreciseDate(backupInfo.createTime).toISOString()} ` + 'for version of database at ' + - `${new PreciseDate(backupInfo.versionTime).toISOString()}` + `${new PreciseDate(backupInfo.versionTime).toISOString()}`, ); } else { console.error('ERROR: Backup is not ready.'); diff --git a/samples/archived/backups-get-database-operations.js b/samples/archived/backups-get-database-operations.js index d2c00ca54..c2cdcda72 100644 --- a/samples/archived/backups-get-database-operations.js +++ b/samples/archived/backups-get-database-operations.js @@ -44,11 +44,11 @@ async function getDatabaseOperations(instanceId, projectId) { databaseOperations.forEach(databaseOperation => { const metadata = protos.google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata.decode( - databaseOperation.metadata.value + databaseOperation.metadata.value, ); console.log( `Database ${metadata.name} restored from backup is ` + - `${metadata.progress.progressPercent}% optimized.` + `${metadata.progress.progressPercent}% optimized.`, ); }); } catch (err) { diff --git a/samples/archived/backups-get-operations.js b/samples/archived/backups-get-operations.js index 9369b3e74..314305929 100644 --- a/samples/archived/backups-get-operations.js +++ b/samples/archived/backups-get-operations.js @@ -19,7 +19,7 @@ async function getBackupOperations( instanceId, databaseId, backupId, - projectId + projectId, ) { // [START spanner_list_backup_operations] // Imports the Google Cloud client library @@ -52,11 +52,11 @@ async function getBackupOperations( backupOperations.forEach(backupOperation => { const metadata = protos.google.spanner.admin.database.v1.CreateBackupMetadata.decode( - backupOperation.metadata.value + backupOperation.metadata.value, ); console.log( `Backup ${metadata.name} on database ${metadata.database} is ` + - `${metadata.progress.progressPercent}% complete.` + `${metadata.progress.progressPercent}% complete.`, ); }); } catch (err) { @@ -67,7 +67,7 @@ async function getBackupOperations( try { console.log( '(metadata.@type:type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) ' + - `AND (metadata.source_backup:${backupId})` + `AND (metadata.source_backup:${backupId})`, ); const [backupOperations] = await instance.getBackupOperations({ filter: @@ -78,11 +78,11 @@ async function getBackupOperations( backupOperations.forEach(backupOperation => { const metadata = protos.google.spanner.admin.database.v1.CopyBackupMetadata.decode( - backupOperation.metadata.value + backupOperation.metadata.value, ); console.log( `Backup ${metadata.name} copied from source backup ${metadata.sourceBackup} is ` + - `${metadata.progress.progressPercent}% complete.` + `${metadata.progress.progressPercent}% complete.`, ); }); } catch (err) { diff --git a/samples/archived/backups-restore-with-encryption-key.js b/samples/archived/backups-restore-with-encryption-key.js index c9ebb4003..ef4170b76 100644 --- a/samples/archived/backups-restore-with-encryption-key.js +++ b/samples/archived/backups-restore-with-encryption-key.js @@ -20,7 +20,7 @@ async function restoreBackupWithEncryptionKey( databaseId, backupId, projectId, - keyName + keyName, ) { // [START spanner_restore_backup_with_encryption_key] // Imports the Google Cloud client library and precise date library @@ -47,7 +47,7 @@ async function restoreBackupWithEncryptionKey( // Restore the database console.log( - `Restoring database ${database.formattedName_} from backup ${backupId}.` + `Restoring database ${database.formattedName_} from backup ${backupId}.`, ); const [, restoreOperation] = await database.restore( `projects/${projectId}/instances/${instanceId}/backups/${backupId}`, @@ -56,7 +56,7 @@ async function restoreBackupWithEncryptionKey( encryptionType: 'CUSTOMER_MANAGED_ENCRYPTION', kmsKeyName: keyName, }, - } + }, ); // Wait for restore to complete @@ -69,7 +69,7 @@ async function restoreBackupWithEncryptionKey( console.log( `Database ${restoreInfo.backupInfo.sourceDatabase} was restored ` + `to ${databaseId} from backup ${restoreInfo.backupInfo.backup} ` + - `using encryption key ${data.metadata.encryptionConfig.kmsKeyName}.` + `using encryption key ${data.metadata.encryptionConfig.kmsKeyName}.`, ); // [END spanner_restore_backup_with_encryption_key] } diff --git a/samples/archived/backups-restore.js b/samples/archived/backups-restore.js index 467c7049e..ea0ead2fb 100644 --- a/samples/archived/backups-restore.js +++ b/samples/archived/backups-restore.js @@ -40,10 +40,10 @@ async function restoreBackup(instanceId, databaseId, backupId, projectId) { // Restore the database console.log( - `Restoring database ${database.formattedName_} from backup ${backupId}.` + `Restoring database ${database.formattedName_} from backup ${backupId}.`, ); const [, restoreOperation] = await database.restore( - `projects/${projectId}/instances/${instanceId}/backups/${backupId}` + `projects/${projectId}/instances/${instanceId}/backups/${backupId}`, ); // Wait for restore to complete @@ -56,7 +56,7 @@ async function restoreBackup(instanceId, databaseId, backupId, projectId) { `Database ${restoreInfo.backupInfo.sourceDatabase} was restored ` + `to ${databaseId} from backup ${restoreInfo.backupInfo.backup} ` + 'with version time ' + - `${new PreciseDate(restoreInfo.backupInfo.versionTime).toISOString()}.` + `${new PreciseDate(restoreInfo.backupInfo.versionTime).toISOString()}.`, ); // [END spanner_restore_backup] } diff --git a/samples/archived/backups-update.js b/samples/archived/backups-update.js index 639513821..ca37c156a 100644 --- a/samples/archived/backups-update.js +++ b/samples/archived/backups-update.js @@ -48,7 +48,7 @@ async function updateBackup(instanceId, backupId, projectId) { currentExpireTime < maxExpireTime ? currentExpireTime : maxExpireTime; const newExpireTime = new PreciseDate(min(wantExpireTime, maxExpireTime)); console.log( - `Backup ${backupId} current expire time: ${currentExpireTime.toISOString()}` + `Backup ${backupId} current expire time: ${currentExpireTime.toISOString()}`, ); console.log(`Updating expire time to ${newExpireTime.toISOString()}`); await backup.updateExpireTime(newExpireTime); diff --git a/samples/archived/backups.js b/samples/archived/backups.js index 29aeb6117..42a5200ab 100644 --- a/samples/archived/backups.js +++ b/samples/archived/backups.js @@ -42,8 +42,8 @@ require('yargs') opts.databaseName, opts.backupName, opts.projectId, - Date.parse(opts.versionTime) - ) + Date.parse(opts.versionTime), + ), ) .command( 'createBackupWithEncryptionKey ', @@ -55,8 +55,8 @@ require('yargs') opts.databaseName, opts.backupName, opts.projectId, - opts.keyName - ) + opts.keyName, + ), ) .command( 'cancelBackup ', @@ -67,8 +67,8 @@ require('yargs') opts.instanceName, opts.databaseName, opts.backupName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'getBackups ', @@ -79,8 +79,8 @@ require('yargs') opts.instanceName, opts.databaseName, opts.backupName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'getBackupOperations ', @@ -91,20 +91,20 @@ require('yargs') opts.instanceName, opts.databaseName, opts.backupName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'getDatabaseOperations ', 'Lists all database operations in the instance.', {}, - opts => getDatabaseOperations(opts.instanceName, opts.projectId) + opts => getDatabaseOperations(opts.instanceName, opts.projectId), ) .command( 'updateBackup ', 'Updates the expire time of a backup.', {}, - opts => updateBackup(opts.instanceName, opts.backupName, opts.projectId) + opts => updateBackup(opts.instanceName, opts.backupName, opts.projectId), ) .command( 'restoreBackup ', @@ -115,8 +115,8 @@ require('yargs') opts.instanceName, opts.databaseName, opts.backupName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'restoreBackupWithEncryptionKey ', @@ -128,8 +128,8 @@ require('yargs') opts.databaseName, opts.backupName, opts.projectId, - opts.keyName - ) + opts.keyName, + ), ) .command( 'deleteBackup ', @@ -140,11 +140,11 @@ require('yargs') opts.instanceName, opts.databaseName, opts.backupName, - opts.projectId - ) + opts.projectId, + ), ) .example( - 'node $0 createBackup "my-instance" "my-database" "my-backup" "my-project-id"' + 'node $0 createBackup "my-instance" "my-database" "my-backup" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/archived/database-create-with-default-leader.js b/samples/archived/database-create-with-default-leader.js index 7f804451c..8d1113f9e 100644 --- a/samples/archived/database-create-with-default-leader.js +++ b/samples/archived/database-create-with-default-leader.js @@ -75,7 +75,7 @@ function main(instanceId, databaseId, defaultLeader, projectId) { console.log(`Waiting for creation of ${database.id} to complete...`); await operation.promise(); console.log( - `Created database ${databaseId} with default leader ${defaultLeader}.` + `Created database ${databaseId} with default leader ${defaultLeader}.`, ); } createDatabaseWithDefaultLeader(); diff --git a/samples/archived/database-create-with-encryption-key.js b/samples/archived/database-create-with-encryption-key.js index 01d677978..a8e3a77b5 100644 --- a/samples/archived/database-create-with-encryption-key.js +++ b/samples/archived/database-create-with-encryption-key.js @@ -18,7 +18,7 @@ async function createDatabaseWithEncryptionKey( instanceId, databaseId, projectId, - keyName + keyName, ) { // [START spanner_create_database_with_encryption_key] // Imports the Google Cloud client library @@ -50,7 +50,7 @@ async function createDatabaseWithEncryptionKey( // Creates a database const [database, operation] = await instance.createDatabase( databaseId, - request + request, ); console.log(`Waiting for operation on ${database.id} to complete...`); @@ -62,7 +62,7 @@ async function createDatabaseWithEncryptionKey( const [data] = await database.get(); console.log( - `Database encrypted with key ${data.metadata.encryptionConfig.kmsKeyName}.` + `Database encrypted with key ${data.metadata.encryptionConfig.kmsKeyName}.`, ); // [END spanner_create_database_with_encryption_key] } diff --git a/samples/archived/database-create-with-version-retention-period.js b/samples/archived/database-create-with-version-retention-period.js index 33cfd5d5e..bbe9d8814 100644 --- a/samples/archived/database-create-with-version-retention-period.js +++ b/samples/archived/database-create-with-version-retention-period.js @@ -18,7 +18,7 @@ async function createDatabaseWithVersionRetentionPeriod( instanceId, databaseId, - projectId + projectId, ) { // [START spanner_create_database_with_version_retention_period] // Imports the Google Cloud client library @@ -56,10 +56,10 @@ async function createDatabaseWithVersionRetentionPeriod( const [data] = await database.get(); console.log( - `Version retention period: ${data.metadata.versionRetentionPeriod}` + `Version retention period: ${data.metadata.versionRetentionPeriod}`, ); const earliestVersionTime = Spanner.timestamp( - data.metadata.earliestVersionTime + data.metadata.earliestVersionTime, ); console.log(`Earliest version time: ${earliestVersionTime}`); } catch (err) { diff --git a/samples/archived/database-update-default-leader.js b/samples/archived/database-update-default-leader.js index 62fd80d18..80174add7 100644 --- a/samples/archived/database-update-default-leader.js +++ b/samples/archived/database-update-default-leader.js @@ -50,7 +50,7 @@ function main(instanceId, databaseId, defaultLeader, projectId) { console.log(`Waiting for updating of ${database.id} to complete...`); await operation.promise(); console.log( - `Updated database ${databaseId} with default leader ${defaultLeader}.` + `Updated database ${databaseId} with default leader ${defaultLeader}.`, ); } updateDatabaseWithDefaultLeader(); diff --git a/samples/archived/database-update.js b/samples/archived/database-update.js index 583eda219..f188d920e 100644 --- a/samples/archived/database-update.js +++ b/samples/archived/database-update.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_update_database] /** @@ -50,7 +50,7 @@ function main( enableDropProtection: true, }); console.log( - `Waiting for update operation for ${database.id} to complete...` + `Waiting for update operation for ${database.id} to complete...`, ); await operation.promise(); console.log(`Updated database ${database.id}.`); diff --git a/samples/archived/datatypes.js b/samples/archived/datatypes.js index c3d8acdac..7f55d6f4a 100644 --- a/samples/archived/datatypes.js +++ b/samples/archived/datatypes.js @@ -194,7 +194,7 @@ async function queryWithArray(instanceId, databaseId, projectId) { console.log( `VenueId: ${json.VenueId}, VenueName: ${ json.VenueName - }, AvailableDate: ${JSON.stringify(availableDate).substring(1, 11)}` + }, AvailableDate: ${JSON.stringify(availableDate).substring(1, 11)}`, ); }); } catch (err) { @@ -252,7 +252,7 @@ async function queryWithBool(instanceId, databaseId, projectId) { const json = row.toJSON(); console.log( `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` OutdoorVenue: ${json.OutdoorVenue}` + ` OutdoorVenue: ${json.OutdoorVenue}`, ); }); } catch (err) { @@ -366,7 +366,7 @@ async function queryWithDate(instanceId, databaseId, projectId) { const json = row.toJSON(); console.log( `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` LastContactDate: ${JSON.stringify(date).substring(1, 11)}` + ` LastContactDate: ${JSON.stringify(date).substring(1, 11)}`, ); }); } catch (err) { @@ -424,7 +424,7 @@ async function queryWithFloat(instanceId, databaseId, projectId) { const json = row.toJSON(); console.log( `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` PopularityScore: ${json.PopularityScore}` + ` PopularityScore: ${json.PopularityScore}`, ); }); } catch (err) { @@ -482,7 +482,7 @@ async function queryWithInt(instanceId, databaseId, projectId) { const json = row.toJSON(); console.log( `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` Capacity: ${json.Capacity}` + ` Capacity: ${json.Capacity}`, ); }); } catch (err) { @@ -595,7 +595,7 @@ async function queryWithTimestamp(instanceId, databaseId, projectId) { const json = row.toJSON(); console.log( `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` LastUpdateTime: ${json.LastUpdateTime}` + ` LastUpdateTime: ${json.LastUpdateTime}`, ); }); } catch (err) { @@ -622,70 +622,73 @@ require('yargs') 'Creates sample "Venues" table containing example datatype columns in a Cloud Spanner database.', {}, opts => - createVenuesTable(opts.instanceName, opts.databaseName, opts.projectId) + createVenuesTable(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'insertData ', 'Inserts new rows of data into an sample "Venues" Cloud Spanner table.', {}, - opts => insertData(opts.instanceName, opts.databaseName, opts.projectId) + opts => insertData(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithArray ', "Query data from the sample 'Venues' table with an ARRAY datatype.", {}, - opts => queryWithArray(opts.instanceName, opts.databaseName, opts.projectId) + opts => + queryWithArray(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithBool ', "Query data from the sample 'Venues' table with a BOOL datatype.", {}, - opts => queryWithBool(opts.instanceName, opts.databaseName, opts.projectId) + opts => queryWithBool(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithBytes ', "Query data from the sample 'Venues' table with a BYTES datatype.", {}, - opts => queryWithBytes(opts.instanceName, opts.databaseName, opts.projectId) + opts => + queryWithBytes(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithDate ', "Query data from the sample 'Venues' table with a DATE datatype.", {}, - opts => queryWithDate(opts.instanceName, opts.databaseName, opts.projectId) + opts => queryWithDate(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithFloat ', "Query data from the sample 'Venues' table with a FLOAT64 datatype.", {}, - opts => queryWithFloat(opts.instanceName, opts.databaseName, opts.projectId) + opts => + queryWithFloat(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithInt ', "Query data from the sample 'Venues' table with a INT64 datatype.", {}, - opts => queryWithInt(opts.instanceName, opts.databaseName, opts.projectId) + opts => queryWithInt(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithString ', "Query data from the sample 'Venues' table with a STRING datatype.", {}, opts => - queryWithString(opts.instanceName, opts.databaseName, opts.projectId) + queryWithString(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithTimestamp ', "Query data from the sample 'Venues' table with a TIMESTAMP datatype.", {}, opts => - queryWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + queryWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'addNumericColumn ', 'Adds a "Revenue" column to sample "Venues" table in a Cloud Spanner database.', {}, opts => - addNumericColumn(opts.instanceName, opts.databaseName, opts.projectId) + addNumericColumn(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'updateWithNumericData ', @@ -695,8 +698,8 @@ require('yargs') updateWithNumericData( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'queryWithNumericParameter ', @@ -706,21 +709,21 @@ require('yargs') queryWithNumericParameter( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'addJsonColumn ', 'Adds a "VenueDetails" column to sample "Venues" table in a Cloud Spanner database.', {}, - opts => addJsonColumn(opts.instanceName, opts.databaseName, opts.projectId) + opts => addJsonColumn(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'updateWithJsonData ', 'Updates rows to include "VenueDetails" in sample "Venues" Cloud Spanner table.', {}, opts => - updateWithJsonData(opts.instanceName, opts.databaseName, opts.projectId) + updateWithJsonData(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithJsonParameter ', @@ -730,11 +733,11 @@ require('yargs') queryWithJsonParameter( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .example( - 'node $0 createVenuesTable "my-instance" "my-database" "my-project-id"' + 'node $0 createVenuesTable "my-instance" "my-database" "my-project-id"', ) .example('node $0 insertData "my-instance" "my-database" "my-project-id"') .example('node $0 queryWithArray "my-instance" "my-database" "my-project-id"') @@ -744,19 +747,19 @@ require('yargs') .example('node $0 queryWithFloat "my-instance" "my-database" "my-project-id"') .example('node $0 queryWithInt "my-instance" "my-database" "my-project-id"') .example( - 'node $0 queryWithString "my-instance" "my-database" "my-project-id"' + 'node $0 queryWithString "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 queryWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 queryWithTimestamp "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 addNumericColumn "my-instance" "my-database" "my-project-id"' + 'node $0 addNumericColumn "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 updateWithNumericData "my-instance" "my-database" "my-project-id"' + 'node $0 updateWithNumericData "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 queryWithNumericParameter "my-instance" "my-database" "my-project-id"' + 'node $0 queryWithNumericParameter "my-instance" "my-database" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/archived/enable-fine-grained-access.js b/samples/archived/enable-fine-grained-access.js index fd7f38efd..35bf80e59 100644 --- a/samples/archived/enable-fine-grained-access.js +++ b/samples/archived/enable-fine-grained-access.js @@ -24,7 +24,7 @@ function main( projectId = 'my-project-id', iamMember = 'user:alice@example.com', databaseRole = 'parent', - title = 'condition title' + title = 'condition title', ) { // [START spanner_enable_fine_grained_access] /** diff --git a/samples/archived/get-database-roles.js b/samples/archived/get-database-roles.js index 99a4451e2..f550a608d 100644 --- a/samples/archived/get-database-roles.js +++ b/samples/archived/get-database-roles.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_list_database_roles] /** diff --git a/samples/archived/get-instance-config.js b/samples/archived/get-instance-config.js index b769e41c9..0fcdfb93a 100644 --- a/samples/archived/get-instance-config.js +++ b/samples/archived/get-instance-config.js @@ -43,7 +43,7 @@ function main(projectId) { `Available leader options for instance config ${instanceConfig.name} ('${ instanceConfig.displayName }'): - ${instanceConfig.leaderOptions.join()}` + ${instanceConfig.leaderOptions.join()}`, ); } getInstanceConfig(); diff --git a/samples/archived/index-create-storing.js b/samples/archived/index-create-storing.js index 72924b874..a17d3082f 100644 --- a/samples/archived/index-create-storing.js +++ b/samples/archived/index-create-storing.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_create_storing_index] /** diff --git a/samples/archived/index-create.js b/samples/archived/index-create.js index b220e8991..77066cdeb 100644 --- a/samples/archived/index-create.js +++ b/samples/archived/index-create.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_create_index] /** diff --git a/samples/archived/indexing.js b/samples/archived/indexing.js index 375bcd6bd..2a4e84877 100644 --- a/samples/archived/indexing.js +++ b/samples/archived/indexing.js @@ -106,7 +106,7 @@ async function queryDataWithIndex( databaseId, startTitle, endTitle, - projectId + projectId, ) { // [START spanner_query_data_with_index] // Imports the Google Cloud client library @@ -150,7 +150,7 @@ async function queryDataWithIndex( ? json.MarketingBudget : null; // This value is nullable console.log( - `AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}, MarketingBudget: ${marketingBudget}` + `AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}, MarketingBudget: ${marketingBudget}`, ); }); } catch (err) { @@ -274,14 +274,14 @@ require('yargs') 'createIndex ', 'Creates a new index in an example Cloud Spanner table.', {}, - opts => createIndex(opts.instanceName, opts.databaseName, opts.projectId) + opts => createIndex(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'createStoringIndex ', 'Creates a new value-storing index in an example Cloud Spanner table.', {}, opts => - createStoringIndex(opts.instanceName, opts.databaseName, opts.projectId) + createStoringIndex(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryIndex ', @@ -305,15 +305,15 @@ require('yargs') opts.databaseName, opts.startTitle, opts.endTitle, - opts.projectId - ) + opts.projectId, + ), ) .command( 'readIndex ', 'Reads data from an example Cloud Spanner table using an existing index.', {}, opts => - readDataWithIndex(opts.instanceName, opts.databaseName, opts.projectId) + readDataWithIndex(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'readStoringIndex ', @@ -323,17 +323,17 @@ require('yargs') readDataWithStoringIndex( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .example('node $0 createIndex "my-instance" "my-database" "my-project-id"') .example( - 'node $0 createStoringIndex "my-instance" "my-database" "my-project-id"' + 'node $0 createStoringIndex "my-instance" "my-database" "my-project-id"', ) .example('node $0 queryIndex "my-instance" "my-database" "my-project-id"') .example('node $0 readIndex "my-instance" "my-database" "my-project-id"') .example( - 'node $0 readStoringIndex "my-instance" "my-database" "my-project-id"' + 'node $0 readStoringIndex "my-instance" "my-database" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/archived/instance-config-create.js b/samples/archived/instance-config-create.js index 7ae6cccff..0969abb96 100644 --- a/samples/archived/instance-config-create.js +++ b/samples/archived/instance-config-create.js @@ -22,7 +22,7 @@ function main( instanceConfigId = 'custom-my-instance-config', baseInstanceConfigId = 'my-base-instance-config', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_create_instance_config] @@ -51,11 +51,11 @@ function main( displayName: instanceConfigId, baseConfig: baseInstanceConfig.name, replicas: baseInstanceConfig.replicas.concat( - baseInstanceConfig.optionalReplicas[0] + baseInstanceConfig.optionalReplicas[0], ), }); console.log( - `Waiting for create operation for ${instanceConfig.id} to complete...` + `Waiting for create operation for ${instanceConfig.id} to complete...`, ); await operation.promise(); console.log(`Created instance config ${instanceConfigId}.`); @@ -64,7 +64,7 @@ function main( 'ERROR: Creating instance config ', instanceConfigId, ' failed with error message ', - err + err, ); } } diff --git a/samples/archived/instance-config-delete.js b/samples/archived/instance-config-delete.js index d28bec969..f7719ba1d 100644 --- a/samples/archived/instance-config-delete.js +++ b/samples/archived/instance-config-delete.js @@ -21,7 +21,7 @@ function main( instanceConfigId = 'custom-my-instance-config', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_delete_instance_config] @@ -51,7 +51,7 @@ function main( console.error( 'Error: Instance config ', instanceConfigId, - ' still exists' + ' still exists', ); } else { console.log(`Deleted instance config ${instanceConfigId}.\n`); @@ -61,7 +61,7 @@ function main( 'ERROR: Deleting instance config ', instanceConfigId, ' failed with error message ', - err + err, ); } } diff --git a/samples/archived/instance-config-get-operations.js b/samples/archived/instance-config-get-operations.js index b62e0a439..a6d31604d 100644 --- a/samples/archived/instance-config-get-operations.js +++ b/samples/archived/instance-config-get-operations.js @@ -38,7 +38,7 @@ function main(projectId = 'my-project-id') { // Lists the instance config operations. try { console.log( - `Getting list of instance config operations on project ${projectId}...\n` + `Getting list of instance config operations on project ${projectId}...\n`, ); const [instanceConfigOperations] = await spanner.getInstanceConfigOperations({ @@ -46,19 +46,19 @@ function main(projectId = 'my-project-id') { '(metadata.@type=type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata)', }); console.log( - `Available instance config operations for project ${projectId}:` + `Available instance config operations for project ${projectId}:`, ); instanceConfigOperations.forEach(instanceConfigOperation => { const metadata = instanceConfigOperation.metadata; const instanceConfig = protos.google.spanner.admin.instance.v1.CreateInstanceConfigMetadata.decode( - instanceConfigOperation.metadata.value + instanceConfigOperation.metadata.value, ).instanceConfig; console.log( `Instance config operation for ${instanceConfig.name} of type` + ` ${metadata.type_url} has status ${ instanceConfigOperation.done ? 'done' : 'running' - }.` + }.`, ); }); } catch (err) { diff --git a/samples/archived/instance-config-update.js b/samples/archived/instance-config-update.js index acb7dc237..474ed8bac 100644 --- a/samples/archived/instance-config-update.js +++ b/samples/archived/instance-config-update.js @@ -21,7 +21,7 @@ function main( instanceConfigId = 'custom-my-instance-config', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_update_instance_config] @@ -52,7 +52,7 @@ function main( }, }); console.log( - `Waiting for update operation for ${instanceConfig.id} to complete...` + `Waiting for update operation for ${instanceConfig.id} to complete...`, ); await operation.promise(); console.log(`Updated instance config ${instanceConfigId}.`); @@ -61,7 +61,7 @@ function main( 'ERROR: Updating instance config ', instanceConfigId, ' failed with error message ', - err + err, ); } } diff --git a/samples/archived/instance-with-processing-units.js b/samples/archived/instance-with-processing-units.js index 8d47e87d3..bb77a41b8 100644 --- a/samples/archived/instance-with-processing-units.js +++ b/samples/archived/instance-with-processing-units.js @@ -55,7 +55,7 @@ async function createInstanceWithProcessingUnits(instanceId, projectId) { }); console.log( `Instance ${instanceId} has ${metadata.processingUnits} ` + - 'processing units.' + 'processing units.', ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/archived/instance.js b/samples/archived/instance.js index cd9281488..42f6de350 100644 --- a/samples/archived/instance.js +++ b/samples/archived/instance.js @@ -66,17 +66,18 @@ require('yargs') 'createInstance ', 'Creates an example instance in a Cloud Spanner instance.', {}, - opts => createInstance(opts.instanceName, opts.projectId) + opts => createInstance(opts.instanceName, opts.projectId), ) .example('node $0 createInstance "my-instance" "my-project-id"') .command( 'createInstanceWithProcessingUnits ', 'Creates an example instance in a Cloud Spanner instance with processing units.', {}, - opts => createInstanceWithProcessingUnits(opts.instanceName, opts.projectId) + opts => + createInstanceWithProcessingUnits(opts.instanceName, opts.projectId), ) .example( - 'node $0 createInstanceWithProcessingUnits "my-instance" "my-project-id"' + 'node $0 createInstanceWithProcessingUnits "my-instance" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/archived/json-add-column.js b/samples/archived/json-add-column.js index 0eed31d0f..4d02b408a 100644 --- a/samples/archived/json-add-column.js +++ b/samples/archived/json-add-column.js @@ -45,7 +45,7 @@ async function addJsonColumn(instanceId, databaseId, projectId) { await operation.promise(); console.log( - `Added VenueDetails column to Venues table in database ${databaseId}.` + `Added VenueDetails column to Venues table in database ${databaseId}.`, ); // [END spanner_add_json_column] } diff --git a/samples/archived/list-instance-configs.js b/samples/archived/list-instance-configs.js index 7171acf80..351d44217 100644 --- a/samples/archived/list-instance-configs.js +++ b/samples/archived/list-instance-configs.js @@ -45,7 +45,7 @@ function main(projectId) { `Available leader options for instance config ${ instanceConfig.name } ('${instanceConfig.displayName}'): - ${instanceConfig.leaderOptions.join()}` + ${instanceConfig.leaderOptions.join()}`, ); }); } diff --git a/samples/archived/numeric-add-column.js b/samples/archived/numeric-add-column.js index 4ab81a0ea..462307912 100644 --- a/samples/archived/numeric-add-column.js +++ b/samples/archived/numeric-add-column.js @@ -45,7 +45,7 @@ async function addNumericColumn(instanceId, databaseId, projectId) { await operation.promise(); console.log( - `Added Revenue column to Venues table in database ${databaseId}.` + `Added Revenue column to Venues table in database ${databaseId}.`, ); // [END spanner_add_numeric_column] } diff --git a/samples/archived/pg-add-column.js b/samples/archived/pg-add-column.js index a64ef4872..881e0a7b3 100644 --- a/samples/archived/pg-add-column.js +++ b/samples/archived/pg-add-column.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_add_column] /** @@ -53,7 +53,7 @@ function main( await operation.promise(); console.log( - `Added MarketingBudget column to Albums table in database ${databaseId}.` + `Added MarketingBudget column to Albums table in database ${databaseId}.`, ); } pgAddColumn(); diff --git a/samples/archived/pg-database-create.js b/samples/archived/pg-database-create.js index 4eb5abc7d..70d15f6a0 100644 --- a/samples/archived/pg-database-create.js +++ b/samples/archived/pg-database-create.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_create_database] /** @@ -52,14 +52,14 @@ function main( // DDL statements. We need to execute these separately after the database has been created. const [database, operationCreate] = await instance.createDatabase( databaseId, - request + request, ); console.log(`Waiting for operation on ${database.id} to complete...`); await operationCreate.promise(); await database.getMetadata(); console.log( - `Created database ${databaseId} on instance ${instanceId} with dialect ${database.metadata.databaseDialect}.` + `Created database ${databaseId} on instance ${instanceId} with dialect ${database.metadata.databaseDialect}.`, ); // Create a couple of tables using a separate request. We must use PostgreSQL style DDL as the diff --git a/samples/archived/pg-index-create-storing.js b/samples/archived/pg-index-create-storing.js index 878745ab3..b4fb1a4d1 100644 --- a/samples/archived/pg-index-create-storing.js +++ b/samples/archived/pg-index-create-storing.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_create_storing_index] /** diff --git a/samples/archived/pg-interleaving.js b/samples/archived/pg-interleaving.js index c61a594b6..36effb54b 100644 --- a/samples/archived/pg-interleaving.js +++ b/samples/archived/pg-interleaving.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_interleaved_table] /** @@ -66,7 +66,7 @@ function main( await operation.promise(); console.log( - `Created an interleaved table hierarchy in database ${databaseId} using PostgreSQL dialect.` + `Created an interleaved table hierarchy in database ${databaseId} using PostgreSQL dialect.`, ); } pgInterleaving(); diff --git a/samples/archived/pg-jsonb-add-column.js b/samples/archived/pg-jsonb-add-column.js index 8b7ea79b7..4db5663f0 100644 --- a/samples/archived/pg-jsonb-add-column.js +++ b/samples/archived/pg-jsonb-add-column.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_jsonb_add_column] /** @@ -51,7 +51,7 @@ function main( console.log(`Waiting for operation on ${databaseId} to complete...`); await operation.promise(); console.log( - `Added jsonb column to table venues to database ${databaseId}.` + `Added jsonb column to table venues to database ${databaseId}.`, ); } pgJsonbAddColumn(); diff --git a/samples/archived/pg-sequence-alter.js b/samples/archived/pg-sequence-alter.js index a0c41e9d5..53dae3ad4 100644 --- a/samples/archived/pg-sequence-alter.js +++ b/samples/archived/pg-sequence-alter.js @@ -49,7 +49,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.', ); } catch (err) { console.error('ERROR:', err); @@ -68,7 +68,7 @@ async function main(instanceId, databaseId, projectId) { console.log( `Inserted customer record with CustomerId: ${ row.toJSON({wrapNumbers: true}).customerid.value - }` + }`, ); }); diff --git a/samples/archived/pg-sequence-create.js b/samples/archived/pg-sequence-create.js index 45030bf91..3bdc48fac 100644 --- a/samples/archived/pg-sequence-create.js +++ b/samples/archived/pg-sequence-create.js @@ -53,7 +53,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Created Seq sequence and Customers table, where the key column CustomerId uses the sequence as a default value' + 'Created Seq sequence and Customers table, where the key column CustomerId uses the sequence as a default value', ); } catch (err) { console.error('ERROR:', err); @@ -72,7 +72,7 @@ async function main(instanceId, databaseId, projectId) { console.log( `Inserted customer record with CustomerId: ${ row.toJSON({wrapNumbers: true}).customerid.value - }` + }`, ); }); diff --git a/samples/archived/pg-sequence-drop.js b/samples/archived/pg-sequence-drop.js index 624c80a66..cf17e9a82 100644 --- a/samples/archived/pg-sequence-drop.js +++ b/samples/archived/pg-sequence-drop.js @@ -53,7 +53,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.', ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/archived/schema.js b/samples/archived/schema.js index 8c746c2e6..540f98ce1 100644 --- a/samples/archived/schema.js +++ b/samples/archived/schema.js @@ -57,7 +57,7 @@ async function createDatabase(instanceId, databaseId, projectId) { // Creates a database const [database, operation] = await instance.createDatabase( databaseId, - request + request, ); console.log(`Waiting for operation on ${database.id} to complete...`); @@ -149,7 +149,7 @@ async function queryDataWithNewColumn(instanceId, databaseId, projectId) { json.AlbumId }, MarketingBudget: ${ json.MarketingBudget ? json.MarketingBudget : null - }` + }`, ); }); } catch (err) { @@ -174,7 +174,8 @@ require('yargs') 'createDatabase ', 'Creates an example database with two tables in a Cloud Spanner instance.', {}, - opts => createDatabase(opts.instanceName, opts.databaseName, opts.projectId) + opts => + createDatabase(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'createDatabaseWithEncryptionKey ', @@ -185,14 +186,14 @@ require('yargs') opts.instanceName, opts.databaseName, opts.projectId, - opts.keyName - ) + opts.keyName, + ), ) .command( 'addColumn ', 'Adds an example MarketingBudget column to an example Cloud Spanner table.', {}, - opts => addColumn(opts.instanceName, opts.databaseName, opts.projectId) + opts => addColumn(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryNewColumn ', @@ -202,8 +203,8 @@ require('yargs') queryDataWithNewColumn( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'createDatabaseWithVersionRetentionPeriod ', @@ -213,17 +214,17 @@ require('yargs') createDatabaseWithVersionRetentionPeriod( opts.instanceName, opts.databaseId, - opts.projectId - ) + opts.projectId, + ), ) .example('node $0 createDatabase "my-instance" "my-database" "my-project-id"') .example( - 'node $0 createDatabaseWithEncryptionKey "my-instance" "my-database" "my-project-id" "key-name"' + 'node $0 createDatabaseWithEncryptionKey "my-instance" "my-database" "my-project-id" "key-name"', ) .example('node $0 addColumn "my-instance" "my-database" "my-project-id"') .example('node $0 queryNewColumn "my-instance" "my-database" "my-project-id"') .example( - 'node $0 createDatabaseWithVersionRetentionPeriod "my-instance" "my-database-id" "my-project-id"' + 'node $0 createDatabaseWithVersionRetentionPeriod "my-instance" "my-database-id" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/archived/sequence-alter.js b/samples/archived/sequence-alter.js index 7b5f363ae..14ad00eb6 100644 --- a/samples/archived/sequence-alter.js +++ b/samples/archived/sequence-alter.js @@ -51,7 +51,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.', ); } catch (err) { console.error('ERROR:', err); @@ -70,7 +70,7 @@ async function main(instanceId, databaseId, projectId) { console.log( `Inserted customer record with CustomerId: ${ row.toJSON({wrapNumbers: true}).CustomerId.value - }` + }`, ); }); diff --git a/samples/archived/sequence-create.js b/samples/archived/sequence-create.js index de95647e6..04a337339 100644 --- a/samples/archived/sequence-create.js +++ b/samples/archived/sequence-create.js @@ -53,7 +53,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Created Seq sequence and Customers table, where the key column CustomerId uses the sequence as a default value.' + 'Created Seq sequence and Customers table, where the key column CustomerId uses the sequence as a default value.', ); } catch (err) { console.error('ERROR:', err); @@ -72,7 +72,7 @@ async function main(instanceId, databaseId, projectId) { console.log( `Inserted customer record with CustomerId: ${ row.toJSON({wrapNumbers: true}).CustomerId.value - }` + }`, ); }); diff --git a/samples/archived/sequence-drop.js b/samples/archived/sequence-drop.js index 915eb8eea..913da8299 100644 --- a/samples/archived/sequence-drop.js +++ b/samples/archived/sequence-drop.js @@ -53,7 +53,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.', ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/archived/table-create-with-foreign-key-delete-cascade.js b/samples/archived/table-create-with-foreign-key-delete-cascade.js index a41faf7f5..f48fdfd61 100644 --- a/samples/archived/table-create-with-foreign-key-delete-cascade.js +++ b/samples/archived/table-create-with-foreign-key-delete-cascade.js @@ -59,7 +59,7 @@ function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId' + 'Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId', ); } createTableWithForeignKeyDeleteCascade(); diff --git a/samples/archived/table-drop-foreign-key-constraint-delete-cascade.js b/samples/archived/table-drop-foreign-key-constraint-delete-cascade.js index def4292c2..8a83b2564 100644 --- a/samples/archived/table-drop-foreign-key-constraint-delete-cascade.js +++ b/samples/archived/table-drop-foreign-key-constraint-delete-cascade.js @@ -50,7 +50,7 @@ function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Altered ShoppingCarts table to drop FKShoppingCartsCustomerName' + 'Altered ShoppingCarts table to drop FKShoppingCartsCustomerName', ); } dropForeignKeyConstraintDeleteCascade(); diff --git a/samples/archived/timestamp.js b/samples/archived/timestamp.js index c5f13ad9d..e183e2e9a 100644 --- a/samples/archived/timestamp.js +++ b/samples/archived/timestamp.js @@ -156,7 +156,7 @@ async function queryTableWithTimestamp(instanceId, databaseId, projectId) { rows.forEach(row => { const json = row.toJSON(); console.log( - `SingerId: ${json.SingerId}, VenueId: ${json.VenueId}, EventDate: ${json.EventDate}, Revenue: ${json.Revenue}, LastUpdateTime: ${json.LastUpdateTime}` + `SingerId: ${json.SingerId}, VenueId: ${json.VenueId}, EventDate: ${json.EventDate}, Revenue: ${json.Revenue}, LastUpdateTime: ${json.LastUpdateTime}`, ); }); } catch (err) { @@ -203,7 +203,7 @@ async function addTimestampColumn(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Added LastUpdateTime as a commit timestamp column in Albums table.' + 'Added LastUpdateTime as a commit timestamp column in Albums table.', ); } catch (err) { console.error('ERROR:', err); @@ -331,7 +331,7 @@ async function queryWithTimestamp(instanceId, databaseId, projectId) { json.AlbumId }, MarketingBudget: ${ json.MarketingBudget ? json.MarketingBudget : null - }, LastUpdateTime: ${json.LastUpdateTime}` + }, LastUpdateTime: ${json.LastUpdateTime}`, ); }); } catch (err) { @@ -353,15 +353,15 @@ require('yargs') createTableWithTimestamp( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'insertWithTimestamp ', 'Inserts new rows of data including commit timestamps into an example Cloud Spanner table.', {}, opts => - insertWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + insertWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryTableWithTimestamp ', @@ -371,22 +371,22 @@ require('yargs') queryTableWithTimestamp( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'addTimestampColumn ', 'Adds a example commit timestamp column to an existing example Cloud Spanner table.', {}, opts => - addTimestampColumn(opts.instanceName, opts.databaseName, opts.projectId) + addTimestampColumn(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'updateWithTimestamp ', 'Modifies existing rows of data in an example Cloud Spanner table with a commit timestamp column..', {}, opts => - updateWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + updateWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithTimestamp ', @@ -394,25 +394,25 @@ require('yargs') column (LastUpdateTime) added by addTimestampColumn.`, {}, opts => - queryWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + queryWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId), ) .example( - 'node $0 createTableWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 createTableWithTimestamp "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 insertWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 insertWithTimestamp "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 queryTableWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 queryTableWithTimestamp "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 addTimestampColumn "my-instance" "my-database" "my-project-id"' + 'node $0 addTimestampColumn "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 updateWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 updateWithTimestamp "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 queryWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 queryWithTimestamp "my-instance" "my-database" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/backups-cancel.js b/samples/backups-cancel.js index cae7a0cde..a02bcf95d 100644 --- a/samples/backups-cancel.js +++ b/samples/backups-cancel.js @@ -42,8 +42,8 @@ async function cancelBackup(instanceId, databaseId, backupId, projectId) { `Creating backup of database ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )}.` + databaseId, + )}.`, ); // Expire backup one day in the future @@ -55,7 +55,7 @@ async function cancelBackup(instanceId, databaseId, backupId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), expireTime: Spanner.timestamp(expireTime).toStruct(), name: databaseAdminClient.backupPath(projectId, instanceId, backupId), diff --git a/samples/backups-copy-with-multiple-kms-keys.js b/samples/backups-copy-with-multiple-kms-keys.js index f1e2fb52c..ddcc07580 100644 --- a/samples/backups-copy-with-multiple-kms-keys.js +++ b/samples/backups-copy-with-multiple-kms-keys.js @@ -23,7 +23,7 @@ function main( backupId = 'my-backup', sourceBackupPath = 'projects/my-project-id/instances/my-source-instance/backups/my-source-backup', projectId = 'my-project-id', - kmsKeyNames = 'key1,key2' + kmsKeyNames = 'key1,key2', ) { // [START spanner_copy_backup_with_MR_CMEK] /** @@ -52,7 +52,7 @@ function main( async function spannerCopyBackupWithMultipleKmsKeys() { // Expire copy backup 14 days in the future const expireTime = Spanner.timestamp( - Date.now() + 1000 * 60 * 60 * 24 * 14 + Date.now() + 1000 * 60 * 60 * 24 * 14, ).toStruct(); // Copy the source backup @@ -70,8 +70,8 @@ function main( `Waiting for backup copy ${databaseAdminClient.backupPath( projectId, instanceId, - backupId - )} to complete...` + backupId, + )} to complete...`, ); await operation.promise(); @@ -86,7 +86,7 @@ function main( `${copyBackup.sizeBytes} bytes was created at ` + `${new PreciseDate(copyBackup.createTime).toISOString()} ` + 'with version time ' + - `${new PreciseDate(copyBackup.versionTime).toISOString()}` + `${new PreciseDate(copyBackup.versionTime).toISOString()}`, ); } else { console.error('ERROR: Copy of backup is not ready.'); diff --git a/samples/backups-copy.js b/samples/backups-copy.js index 9ff142fcd..d830d179a 100644 --- a/samples/backups-copy.js +++ b/samples/backups-copy.js @@ -22,7 +22,7 @@ function main( instanceId = 'my-instance', backupId = 'my-backup', sourceBackupPath = 'projects/my-project-id/instances/my-source-instance/backups/my-source-backup', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_copy_backup] /** @@ -48,7 +48,7 @@ function main( async function spannerCopyBackup() { // Expire copy backup 14 days in the future const expireTime = Spanner.timestamp( - Date.now() + 1000 * 60 * 60 * 24 * 14 + Date.now() + 1000 * 60 * 60 * 24 * 14, ).toStruct(); // Copy the source backup @@ -65,8 +65,8 @@ function main( `Waiting for backup copy ${databaseAdminClient.backupPath( projectId, instanceId, - backupId - )} to complete...` + backupId, + )} to complete...`, ); await operation.promise(); @@ -81,7 +81,7 @@ function main( `${copyBackup.sizeBytes} bytes was created at ` + `${new PreciseDate(copyBackup.createTime).toISOString()} ` + 'with version time ' + - `${new PreciseDate(copyBackup.versionTime).toISOString()}` + `${new PreciseDate(copyBackup.versionTime).toISOString()}`, ); } else { console.error('ERROR: Copy of backup is not ready.'); diff --git a/samples/backups-create-with-encryption-key.js b/samples/backups-create-with-encryption-key.js index 008a2b6ea..510f514e3 100644 --- a/samples/backups-create-with-encryption-key.js +++ b/samples/backups-create-with-encryption-key.js @@ -20,7 +20,7 @@ async function createBackupWithEncryptionKey( databaseId, backupId, projectId, - keyName + keyName, ) { // [START spanner_create_backup_with_encryption_key] @@ -52,8 +52,8 @@ async function createBackupWithEncryptionKey( `Creating backup of database ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )}.` + databaseId, + )}.`, ); // Expire backup 14 days in the future @@ -67,7 +67,7 @@ async function createBackupWithEncryptionKey( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), expireTime: Spanner.timestamp(expireTime).toStruct(), name: databaseAdminClient.backupPath(projectId, instanceId, backupId), @@ -82,8 +82,8 @@ async function createBackupWithEncryptionKey( `Waiting for backup ${databaseAdminClient.backupPath( projectId, instanceId, - backupId - )} to complete...` + backupId, + )} to complete...`, ); await operation.promise(); @@ -96,7 +96,7 @@ async function createBackupWithEncryptionKey( `Backup ${backupInfo.name} of size ` + `${backupInfo.sizeBytes} bytes was created at ` + `${new PreciseDate(backupInfo.createTime).toISOString()} ` + - `using encryption key ${backupInfo.encryptionInfo.kmsKeyVersion}` + `using encryption key ${backupInfo.encryptionInfo.kmsKeyVersion}`, ); } else { console.error('ERROR: Backup is not ready.'); diff --git a/samples/backups-create-with-multiple-kms-keys.js b/samples/backups-create-with-multiple-kms-keys.js index f59263c13..d65f81461 100644 --- a/samples/backups-create-with-multiple-kms-keys.js +++ b/samples/backups-create-with-multiple-kms-keys.js @@ -20,7 +20,7 @@ function main( databaseId = 'my-database', backupId = 'my-backup', projectId = 'my-project-id', - kmsKeyNames = 'key1,key2' + kmsKeyNames = 'key1,key2', ) { // [START spanner_create_backup_with_MR_CMEK] /** @@ -52,8 +52,8 @@ function main( `Creating backup of database ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )}.` + databaseId, + )}.`, ); // Expire backup 14 days in the future @@ -67,7 +67,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), expireTime: Spanner.timestamp(expireTime).toStruct(), name: databaseAdminClient.backupPath(projectId, instanceId, backupId), @@ -82,8 +82,8 @@ function main( `Waiting for backup ${databaseAdminClient.backupPath( projectId, instanceId, - backupId - )} to complete...` + backupId, + )} to complete...`, ); await operation.promise(); @@ -101,7 +101,7 @@ function main( `Backup ${backupInfo.name} of size ` + `${backupInfo.sizeBytes} bytes was created at ` + `${new PreciseDate(backupInfo.createTime).toISOString()} ` + - `using encryption key ${kmsKeyVersions}` + `using encryption key ${kmsKeyVersions}`, ); } else { console.error('ERROR: Backup is not ready.'); diff --git a/samples/backups-create.js b/samples/backups-create.js index 6af1578cc..443b8bc07 100644 --- a/samples/backups-create.js +++ b/samples/backups-create.js @@ -20,7 +20,7 @@ async function createBackup( databaseId, backupId, projectId, - versionTime + versionTime, ) { // [START spanner_create_backup] @@ -51,8 +51,8 @@ async function createBackup( `Creating backup of database ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )}.` + databaseId, + )}.`, ); // Expire backup 14 days in the future @@ -66,7 +66,7 @@ async function createBackup( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), expireTime: Spanner.timestamp(expireTime).toStruct(), versionTime: Spanner.timestamp(versionTime).toStruct(), @@ -78,8 +78,8 @@ async function createBackup( `Waiting for backup ${databaseAdminClient.backupPath( projectId, instanceId, - backupId - )} to complete...` + backupId, + )} to complete...`, ); await operation.promise(); @@ -93,7 +93,7 @@ async function createBackup( `${backupInfo.sizeBytes} bytes was created at ` + `${new PreciseDate(backupInfo.createTime).toISOString()} ` + 'for version of database at ' + - `${new PreciseDate(backupInfo.versionTime).toISOString()}` + `${new PreciseDate(backupInfo.versionTime).toISOString()}`, ); } else { console.error('ERROR: Backup is not ready.'); diff --git a/samples/backups-get-database-operations.js b/samples/backups-get-database-operations.js index b4be6ffc0..756e8965a 100644 --- a/samples/backups-get-database-operations.js +++ b/samples/backups-get-database-operations.js @@ -47,11 +47,11 @@ async function getDatabaseOperations(instanceId, projectId) { databaseOperations.forEach(databaseOperation => { const metadata = protos.google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata.decode( - databaseOperation.metadata.value + databaseOperation.metadata.value, ); console.log( `Database ${metadata.name} restored from backup is ` + - `${metadata.progress.progressPercent}% optimized.` + `${metadata.progress.progressPercent}% optimized.`, ); }); } catch (err) { diff --git a/samples/backups-get-operations.js b/samples/backups-get-operations.js index ae6b2f8a4..1bb904927 100644 --- a/samples/backups-get-operations.js +++ b/samples/backups-get-operations.js @@ -19,7 +19,7 @@ async function getBackupOperations( instanceId, databaseId, backupId, - projectId + projectId, ) { // [START spanner_list_backup_operations] @@ -54,11 +54,11 @@ async function getBackupOperations( backupOperations.forEach(backupOperation => { const metadata = protos.google.spanner.admin.database.v1.CreateBackupMetadata.decode( - backupOperation.metadata.value + backupOperation.metadata.value, ); console.log( `Backup ${metadata.name} on database ${metadata.database} is ` + - `${metadata.progress.progressPercent}% complete.` + `${metadata.progress.progressPercent}% complete.`, ); }); } catch (err) { @@ -69,7 +69,7 @@ async function getBackupOperations( try { console.log( '(metadata.@type:type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) ' + - `AND (metadata.source_backup:${backupId})` + `AND (metadata.source_backup:${backupId})`, ); const [backupOperations] = await databaseAdminClient.listBackupOperations({ parent: databaseAdminClient.instancePath(projectId, instanceId), @@ -81,11 +81,11 @@ async function getBackupOperations( backupOperations.forEach(backupOperation => { const metadata = protos.google.spanner.admin.database.v1.CopyBackupMetadata.decode( - backupOperation.metadata.value + backupOperation.metadata.value, ); console.log( `Backup ${metadata.name} copied from source backup ${metadata.sourceBackup} is ` + - `${metadata.progress.progressPercent}% complete.` + `${metadata.progress.progressPercent}% complete.`, ); }); } catch (err) { diff --git a/samples/backups-restore-with-encryption-key.js b/samples/backups-restore-with-encryption-key.js index 7a2323246..7a644bb4b 100644 --- a/samples/backups-restore-with-encryption-key.js +++ b/samples/backups-restore-with-encryption-key.js @@ -20,7 +20,7 @@ async function restoreBackupWithEncryptionKey( databaseId, backupId, projectId, - keyName + keyName, ) { // [START spanner_restore_backup_with_encryption_key] @@ -50,8 +50,8 @@ async function restoreBackupWithEncryptionKey( `Restoring database ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )} from backup ${backupId}.` + databaseId, + )} from backup ${backupId}.`, ); const [restoreOperation] = await databaseAdminClient.restoreDatabase({ parent: databaseAdminClient.instancePath(projectId, instanceId), @@ -74,7 +74,7 @@ async function restoreBackupWithEncryptionKey( console.log( `Database ${metadata.restoreInfo.backupInfo.sourceDatabase} was restored ` + `to ${databaseId} from backup ${metadata.restoreInfo.backupInfo.backup} ` + - `using encryption key ${metadata.encryptionConfig.kmsKeyName}.` + `using encryption key ${metadata.encryptionConfig.kmsKeyName}.`, ); // [END spanner_restore_backup_with_encryption_key] } diff --git a/samples/backups-restore-with-multiple-kms-keys.js b/samples/backups-restore-with-multiple-kms-keys.js index 1bb4c5d6b..3783b5bae 100644 --- a/samples/backups-restore-with-multiple-kms-keys.js +++ b/samples/backups-restore-with-multiple-kms-keys.js @@ -20,7 +20,7 @@ function main( databaseId = 'my-database', backupId = 'my-backup', projectId = 'my-project', - kmsKeyNames = 'key1,key2' + kmsKeyNames = 'key1,key2', ) { // [START spanner_restore_backup_with_MR_CMEK] /** @@ -51,8 +51,8 @@ function main( `Restoring database ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )} from backup ${backupId}.` + databaseId, + )} from backup ${backupId}.`, ); const [restoreOperation] = await databaseAdminClient.restoreDatabase({ parent: databaseAdminClient.instancePath(projectId, instanceId), @@ -75,7 +75,7 @@ function main( console.log( `Database ${metadata.restoreInfo.backupInfo.sourceDatabase} was restored ` + `to ${databaseId} from backup ${metadata.restoreInfo.backupInfo.backup} ` + - `using encryption key ${metadata.encryptionConfig.kmsKeyNames}.` + `using encryption key ${metadata.encryptionConfig.kmsKeyNames}.`, ); } restoreBackupWithMultipleKmsKeys(); diff --git a/samples/backups-restore.js b/samples/backups-restore.js index 182513479..4c5e0f63b 100644 --- a/samples/backups-restore.js +++ b/samples/backups-restore.js @@ -42,8 +42,8 @@ async function restoreBackup(instanceId, databaseId, backupId, projectId) { `Restoring database ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )} from backup ${backupId}.` + databaseId, + )} from backup ${backupId}.`, ); const [restoreOperation] = await databaseAdminClient.restoreDatabase({ parent: databaseAdminClient.instancePath(projectId, instanceId), @@ -64,8 +64,8 @@ async function restoreBackup(instanceId, databaseId, backupId, projectId) { `to ${databaseId} from backup ${metadata.restoreInfo.backupInfo.backup} ` + 'with version time ' + `${new PreciseDate( - metadata.restoreInfo.backupInfo.versionTime - ).toISOString()}.` + metadata.restoreInfo.backupInfo.versionTime, + ).toISOString()}.`, ); // [END spanner_restore_backup] } diff --git a/samples/backups-update.js b/samples/backups-update.js index 3bb540d4f..fa9b52489 100644 --- a/samples/backups-update.js +++ b/samples/backups-update.js @@ -54,13 +54,13 @@ async function updateBackup(instanceId, backupId, projectId) { const newExpireTime = new PreciseDate(min(wantExpireTime, maxExpireTime)); console.log( `Backup ${backupId} current expire time: ${Spanner.timestamp( - currentExpireTime - ).toISOString()}` + currentExpireTime, + ).toISOString()}`, ); console.log( `Updating expire time to ${Spanner.timestamp( - newExpireTime - ).toISOString()}` + newExpireTime, + ).toISOString()}`, ); await databaseAdminClient.updateBackup({ diff --git a/samples/backups.js b/samples/backups.js index efec54cde..acb959253 100644 --- a/samples/backups.js +++ b/samples/backups.js @@ -42,11 +42,11 @@ require('yargs') opts.databaseName, opts.backupName, opts.projectId, - Date.parse(opts.versionTime) - ) + Date.parse(opts.versionTime), + ), ) .example( - 'node $0 createBackup "my-instance" "my-database" "my-backup" "my-project-id" "my-version-time"' + 'node $0 createBackup "my-instance" "my-database" "my-backup" "my-project-id" "my-version-time"', ) .command( 'createBackupWithEncryptionKey ', @@ -58,11 +58,11 @@ require('yargs') opts.databaseName, opts.backupName, opts.projectId, - opts.keyName - ) + opts.keyName, + ), ) .example( - 'node $0 createBackupWithEncryptionKey "my-instance" "my-database" "my-backup" "my-project-id" "my-key-name"' + 'node $0 createBackupWithEncryptionKey "my-instance" "my-database" "my-backup" "my-project-id" "my-key-name"', ) .command( 'cancelBackup ', @@ -73,11 +73,11 @@ require('yargs') opts.instanceName, opts.databaseName, opts.backupName, - opts.projectId - ) + opts.projectId, + ), ) .example( - 'node $0 cancelBackup "my-instance" "my-database" "my-backup" "my-project-id"' + 'node $0 cancelBackup "my-instance" "my-database" "my-backup" "my-project-id"', ) .command( 'getBackups ', @@ -88,11 +88,11 @@ require('yargs') opts.instanceName, opts.databaseName, opts.backupName, - opts.projectId - ) + opts.projectId, + ), ) .example( - 'node $0 getBackups "my-instance" "my-database" "my-backup" "my-project-id"' + 'node $0 getBackups "my-instance" "my-database" "my-backup" "my-project-id"', ) .command( 'getBackupOperations ', @@ -103,24 +103,24 @@ require('yargs') opts.instanceName, opts.databaseName, opts.backupName, - opts.projectId - ) + opts.projectId, + ), ) .example( - 'node $0 getBackupOperations "my-instance" "my-database" "my-backup" "my-project-id"' + 'node $0 getBackupOperations "my-instance" "my-database" "my-backup" "my-project-id"', ) .command( 'getDatabaseOperations ', 'Lists all database operations in the instance.', {}, - opts => getDatabaseOperations(opts.instanceName, opts.projectId) + opts => getDatabaseOperations(opts.instanceName, opts.projectId), ) .example('node $0 getDatabaseOperations "my-instance" "my-project-id"') .command( 'updateBackup ', 'Updates the expire time of a backup.', {}, - opts => updateBackup(opts.instanceName, opts.backupName, opts.projectId) + opts => updateBackup(opts.instanceName, opts.backupName, opts.projectId), ) .example('node $0 updateBackup "my-instance" "my-backup" "my-project-id"') .command( @@ -132,11 +132,11 @@ require('yargs') opts.instanceName, opts.databaseName, opts.backupName, - opts.projectId - ) + opts.projectId, + ), ) .example( - 'node $0 restoreBackup "my-instance" "my-database" "my-backup" "my-project-id"' + 'node $0 restoreBackup "my-instance" "my-database" "my-backup" "my-project-id"', ) .command( 'restoreBackupWithEncryptionKey ', @@ -148,17 +148,17 @@ require('yargs') opts.databaseName, opts.backupName, opts.projectId, - opts.keyName - ) + opts.keyName, + ), ) .example( - 'node $0 restoreBackupWithEncryptionKey "my-instance" "my-database" "my-backup" "my-project-id" "my-key-name"' + 'node $0 restoreBackupWithEncryptionKey "my-instance" "my-database" "my-backup" "my-project-id" "my-key-name"', ) .command( 'deleteBackup ', 'Deletes a backup.', {}, - opts => deleteBackup(opts.instanceName, opts.backupName, opts.projectId) + opts => deleteBackup(opts.instanceName, opts.backupName, opts.projectId), ) .example('node $0 deleteBackup "my-instance" "my-backup" "my-project-id"') .wrap(120) diff --git a/samples/batch-write.js b/samples/batch-write.js index 8f77557f4..ce7c5dd3f 100644 --- a/samples/batch-write.js +++ b/samples/batch-write.js @@ -22,7 +22,7 @@ async function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_batch_write_at_least_once] @@ -95,14 +95,14 @@ async function main( `Mutation group indexes ${ response.indexes }, have been applied with commit timestamp ${Spanner.timestamp( - response.commitTimestamp - ).toJSON()}` + response.commitTimestamp, + ).toJSON()}`, ); } // Mutation groups that fail to commit trigger a response with a non-zero status code. else { console.log( - `Mutation group indexes ${response.indexes}, could not be applied with error code ${response.status.code}, and error message ${response.status.message}` + `Mutation group indexes ${response.indexes}, could not be applied with error code ${response.status.code}, and error message ${response.status.message}`, ); } }) diff --git a/samples/batch.js b/samples/batch.js index 7faa9cb08..c47a2fc52 100644 --- a/samples/batch.js +++ b/samples/batch.js @@ -17,7 +17,7 @@ async function createAndExecuteQueryPartitions( instanceId, databaseId, - projectId + projectId, ) { // [START spanner_batch_client] // Imports the Google Cloud client library @@ -58,13 +58,13 @@ async function createAndExecuteQueryPartitions( transaction.execute(partition).then(results => { const rows = results[0].map(row => row.toJSON()); row_count += rows.length; - }) + }), ); }); Promise.all(promises) .then(() => { console.log( - `Successfully received ${row_count} from executed partitions.` + `Successfully received ${row_count} from executed partitions.`, ); transaction.close(); }) @@ -79,7 +79,7 @@ async function executePartition( databaseId, identifier, partition, - projectId + projectId, ) { // [START spanner_batch_execute_partitions] // Imports the Google Cloud client library @@ -119,8 +119,8 @@ require('yargs') createAndExecuteQueryPartitions( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'execute-partition ', @@ -132,14 +132,14 @@ require('yargs') opts.databaseName, JSON.parse(opts.identifier), JSON.parse(opts.partition), - opts.projectId - ) + opts.projectId, + ), ) .example( - 'node $0 create-and-execute-query-partitions "my-instance" "my-database" "my-project-id"' + 'node $0 create-and-execute-query-partitions "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 execute-partition "my-instance" "my-database" "{}" "{}" "my-project-id"' + 'node $0 execute-partition "my-instance" "my-database" "{}" "{}" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/create-full-backup-schedule.js b/samples/create-full-backup-schedule.js index 4188b2be3..3e66845bd 100644 --- a/samples/create-full-backup-schedule.js +++ b/samples/create-full-backup-schedule.js @@ -23,7 +23,7 @@ function main( projectId = 'my-project-id', instanceId = 'my-instance-id', databaseId = 'my-database-id', - scheduleId = 'my-schedule-id' + scheduleId = 'my-schedule-id', ) { async function createFullBackupSchedule() { // [START spanner_create_full_backup_schedule] diff --git a/samples/create-incremental-backup-schedule.js b/samples/create-incremental-backup-schedule.js index fc0bc88d8..64bfdf1da 100644 --- a/samples/create-incremental-backup-schedule.js +++ b/samples/create-incremental-backup-schedule.js @@ -23,7 +23,7 @@ function main( projectId = 'my-project-id', instanceId = 'my-instance-id', databaseId = 'my-database-id', - scheduleId = 'my-schedule-id' + scheduleId = 'my-schedule-id', ) { async function createIncrementalBackupSchedule() { // [START spanner_create_incremental_backup_schedule] diff --git a/samples/create-instance-without-default-backup-schedules.js b/samples/create-instance-without-default-backup-schedules.js index b464f4df6..bf8ef3c7c 100644 --- a/samples/create-instance-without-default-backup-schedules.js +++ b/samples/create-instance-without-default-backup-schedules.js @@ -41,7 +41,7 @@ function main(instanceId, projectId) { instance: { config: instanceAdminClient.instanceConfigPath( projectId, - 'regional-me-central2' + 'regional-me-central2', ), nodeCount: 1, displayName: 'Display name for the instance.', @@ -57,7 +57,7 @@ function main(instanceId, projectId) { await operation.promise(); console.log( - `Created instance ${instanceId} without default backup schedules.` + `Created instance ${instanceId} without default backup schedules.`, ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/crud.js b/samples/crud.js index 41af3ecab..3ea486e92 100644 --- a/samples/crud.js +++ b/samples/crud.js @@ -216,7 +216,7 @@ async function queryData(instanceId, databaseId, projectId) { rows.forEach(row => { const json = row.toJSON(); console.log( - `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}` + `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}`, ); }); } catch (err) { @@ -265,7 +265,7 @@ async function readData(instanceId, databaseId, projectId) { rows.forEach(row => { const json = row.toJSON(); console.log( - `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}` + `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}`, ); }); } catch (err) { @@ -323,7 +323,7 @@ async function readStaleData(instanceId, databaseId, projectId) { const title = json.AlbumTitle; const budget = json.MarketingBudget ? json.MarketingBudget : ''; console.log( - `SingerId: ${id}, AlbumId: ${album}, AlbumTitle: ${title}, MarketingBudget: ${budget}` + `SingerId: ${id}, AlbumId: ${album}, AlbumTitle: ${title}, MarketingBudget: ${budget}`, ); }); } catch (err) { @@ -343,43 +343,44 @@ require('yargs') 'update ', 'Modifies existing rows of data in an example Cloud Spanner table.', {}, - opts => updateData(opts.instanceName, opts.databaseName, opts.projectId) + opts => updateData(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'query ', 'Executes a read-only SQL query against an example Cloud Spanner table.', {}, - opts => queryData(opts.instanceName, opts.databaseName, opts.projectId) + opts => queryData(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'insert ', 'Inserts new rows of data into an example Cloud Spanner table.', {}, - opts => insertData(opts.instanceName, opts.databaseName, opts.projectId) + opts => insertData(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'delete ', 'Deletes rows from an example Cloud Spanner table.', {}, - opts => deleteData(opts.instanceName, opts.databaseName, opts.projectId) + opts => deleteData(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'read ', 'Reads data in an example Cloud Spanner table.', {}, - opts => readData(opts.instanceName, opts.databaseName, opts.projectId) + opts => readData(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'read-stale ', 'Reads stale data in an example Cloud Spanner table.', {}, - opts => readStaleData(opts.instanceName, opts.databaseName, opts.projectId) + opts => readStaleData(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'getCommitStats ', 'Updates rows in example Cloud Spanner table and reads CommitStats.', {}, - opts => getCommitStats(opts.instanceName, opts.databaseName, opts.projectId) + opts => + getCommitStats(opts.instanceName, opts.databaseName, opts.projectId), ) .example('node $0 update "my-instance" "my-database" "my-project-id"') .example('node $0 query "my-instance" "my-database" "my-project-id"') diff --git a/samples/database-add-split-points.js b/samples/database-add-split-points.js index fa73f84ab..996ea1ab0 100644 --- a/samples/database-add-split-points.js +++ b/samples/database-add-split-points.js @@ -22,7 +22,7 @@ function main( instanceId = 'my-instance-id', databaseId = 'my-database-id', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { async function addSplitPoints() { // [START spanner_database_add_split_points] @@ -62,7 +62,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -76,7 +76,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), splitPoints: [ { diff --git a/samples/database-create-with-default-leader.js b/samples/database-create-with-default-leader.js index 8f70e16b6..c02bf08df 100644 --- a/samples/database-create-with-default-leader.js +++ b/samples/database-create-with-default-leader.js @@ -47,8 +47,8 @@ function main(instanceId, databaseId, defaultLeader, projectId) { `Creating database ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )}.` + databaseId, + )}.`, ); const createSingersTableStatement = ` CREATE TABLE Singers ( @@ -84,7 +84,7 @@ function main(instanceId, databaseId, defaultLeader, projectId) { console.log(`Waiting for creation of ${databaseId} to complete...`); await operation.promise(); console.log( - `Created database ${databaseId} with default leader ${defaultLeader}.` + `Created database ${databaseId} with default leader ${defaultLeader}.`, ); } createDatabaseWithDefaultLeader(); diff --git a/samples/database-create-with-encryption-key.js b/samples/database-create-with-encryption-key.js index 047db39ba..3b697858d 100644 --- a/samples/database-create-with-encryption-key.js +++ b/samples/database-create-with-encryption-key.js @@ -18,7 +18,7 @@ async function createDatabaseWithEncryptionKey( instanceId, databaseId, projectId, - keyName + keyName, ) { // [START spanner_create_database_with_encryption_key] @@ -63,7 +63,7 @@ async function createDatabaseWithEncryptionKey( }); console.log( - `Database encrypted with key ${metadata.encryptionConfig.kmsKeyName}.` + `Database encrypted with key ${metadata.encryptionConfig.kmsKeyName}.`, ); // [END spanner_create_database_with_encryption_key] } diff --git a/samples/database-create-with-multiple-kms-keys.js b/samples/database-create-with-multiple-kms-keys.js index 760a6ad67..87e9984ac 100644 --- a/samples/database-create-with-multiple-kms-keys.js +++ b/samples/database-create-with-multiple-kms-keys.js @@ -18,7 +18,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', projectId = 'my-project', - kmsKeyNames = 'key1,key2,key3' + kmsKeyNames = 'key1,key2,key3', ) { // [START spanner_create_database_with_MR_CMEK] /** @@ -63,7 +63,7 @@ function main( }); console.log( - `Database encrypted with keys ${metadata.encryptionConfig.kmsKeyNames}.` + `Database encrypted with keys ${metadata.encryptionConfig.kmsKeyNames}.`, ); } createDatabaseWithMultipleKmsKeys(); diff --git a/samples/database-create-with-version-retention-period.js b/samples/database-create-with-version-retention-period.js index bbae86fd0..0bb8988c3 100644 --- a/samples/database-create-with-version-retention-period.js +++ b/samples/database-create-with-version-retention-period.js @@ -18,7 +18,7 @@ async function createDatabaseWithVersionRetentionPeriod( instanceId, databaseId, - projectId + projectId, ) { // [START spanner_create_database_with_version_retention_period] @@ -45,8 +45,8 @@ async function createDatabaseWithVersionRetentionPeriod( console.log( `Creating database ${databaseAdminClient.instancePath( projectId, - instanceId - )}.` + instanceId, + )}.`, ); const versionRetentionStatement = ` ALTER DATABASE \`${databaseId}\` diff --git a/samples/database-get-ddl.js b/samples/database-get-ddl.js index 2b2bd9608..6e40ad3a3 100644 --- a/samples/database-get-ddl.js +++ b/samples/database-get-ddl.js @@ -44,7 +44,7 @@ function main(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), }); @@ -52,8 +52,8 @@ function main(instanceId, databaseId, projectId) { `Retrieved database DDL for ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )}:` + databaseId, + )}:`, ); ddlStatements.statements.forEach(element => { console.log(element); diff --git a/samples/database-get-default-leader.js b/samples/database-get-default-leader.js index fe11365a9..77f6ec625 100644 --- a/samples/database-get-default-leader.js +++ b/samples/database-get-default-leader.js @@ -51,11 +51,11 @@ function main(instanceId, databaseId, projectId) { if (rows.length > 0) { const option = rows[0]; console.log( - `The ${option.OPTION_NAME} for ${databaseId} is ${option.OPTION_VALUE}` + `The ${option.OPTION_NAME} for ${databaseId} is ${option.OPTION_VALUE}`, ); } else { console.log( - `Database ${databaseId} does not have a value for option 'default_leader'` + `Database ${databaseId} does not have a value for option 'default_leader'`, ); } } diff --git a/samples/database-update-default-leader.js b/samples/database-update-default-leader.js index 7a5c928a0..60edace6a 100644 --- a/samples/database-update-default-leader.js +++ b/samples/database-update-default-leader.js @@ -44,8 +44,8 @@ function main(instanceId, databaseId, defaultLeader, projectId) { `Updating database ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )}.` + databaseId, + )}.`, ); const setDefaultLeaderStatement = ` ALTER DATABASE \`${databaseId}\` @@ -54,7 +54,7 @@ function main(instanceId, databaseId, defaultLeader, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: [setDefaultLeaderStatement], }); @@ -62,7 +62,7 @@ function main(instanceId, databaseId, defaultLeader, projectId) { console.log(`Waiting for updating of ${databaseId} to complete...`); await operation.promise(); console.log( - `Updated database ${databaseId} with default leader ${defaultLeader}.` + `Updated database ${databaseId} with default leader ${defaultLeader}.`, ); } updateDatabaseWithDefaultLeader(); diff --git a/samples/database-update.js b/samples/database-update.js index 60331e1a5..d1d8fbe11 100644 --- a/samples/database-update.js +++ b/samples/database-update.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_update_database] /** @@ -48,15 +48,15 @@ function main( `Updating database ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )}.` + databaseId, + )}.`, ); const [operation] = await databaseAdminClient.updateDatabase({ database: { name: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), enableDropProtection: true, }, @@ -66,7 +66,7 @@ function main( }), }); console.log( - `Waiting for update operation for ${databaseId} to complete...` + `Waiting for update operation for ${databaseId} to complete...`, ); await operation.promise(); console.log(`Updated database ${databaseId}.`); diff --git a/samples/datatypes.js b/samples/datatypes.js index 4e9d89b37..fb7db8b52 100644 --- a/samples/datatypes.js +++ b/samples/datatypes.js @@ -53,7 +53,7 @@ async function createVenuesTable(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -200,7 +200,7 @@ async function queryWithArray(instanceId, databaseId, projectId) { console.log( `VenueId: ${json.VenueId}, VenueName: ${ json.VenueName - }, AvailableDate: ${JSON.stringify(availableDate).substring(1, 11)}` + }, AvailableDate: ${JSON.stringify(availableDate).substring(1, 11)}`, ); }); } catch (err) { @@ -258,7 +258,7 @@ async function queryWithBool(instanceId, databaseId, projectId) { const json = row.toJSON(); console.log( `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` OutdoorVenue: ${json.OutdoorVenue}` + ` OutdoorVenue: ${json.OutdoorVenue}`, ); }); } catch (err) { @@ -372,7 +372,7 @@ async function queryWithDate(instanceId, databaseId, projectId) { const json = row.toJSON(); console.log( `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` LastContactDate: ${JSON.stringify(date).substring(1, 11)}` + ` LastContactDate: ${JSON.stringify(date).substring(1, 11)}`, ); }); } catch (err) { @@ -430,7 +430,7 @@ async function queryWithFloat(instanceId, databaseId, projectId) { const json = row.toJSON(); console.log( `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` PopularityScore: ${json.PopularityScore}` + ` PopularityScore: ${json.PopularityScore}`, ); }); } catch (err) { @@ -488,7 +488,7 @@ async function queryWithInt(instanceId, databaseId, projectId) { const json = row.toJSON(); console.log( `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` Capacity: ${json.Capacity}` + ` Capacity: ${json.Capacity}`, ); }); } catch (err) { @@ -601,7 +601,7 @@ async function queryWithTimestamp(instanceId, databaseId, projectId) { const json = row.toJSON(); console.log( `VenueId: ${json.VenueId}, VenueName: ${json.VenueName},` + - ` LastUpdateTime: ${json.LastUpdateTime}` + ` LastUpdateTime: ${json.LastUpdateTime}`, ); }); } catch (err) { @@ -627,70 +627,73 @@ require('yargs') 'Creates sample "Venues" table containing example datatype columns in a Cloud Spanner database.', {}, opts => - createVenuesTable(opts.instanceName, opts.databaseName, opts.projectId) + createVenuesTable(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'insertData ', 'Inserts new rows of data into an sample "Venues" Cloud Spanner table.', {}, - opts => insertData(opts.instanceName, opts.databaseName, opts.projectId) + opts => insertData(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithArray ', "Query data from the sample 'Venues' table with an ARRAY datatype.", {}, - opts => queryWithArray(opts.instanceName, opts.databaseName, opts.projectId) + opts => + queryWithArray(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithBool ', "Query data from the sample 'Venues' table with a BOOL datatype.", {}, - opts => queryWithBool(opts.instanceName, opts.databaseName, opts.projectId) + opts => queryWithBool(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithBytes ', "Query data from the sample 'Venues' table with a BYTES datatype.", {}, - opts => queryWithBytes(opts.instanceName, opts.databaseName, opts.projectId) + opts => + queryWithBytes(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithDate ', "Query data from the sample 'Venues' table with a DATE datatype.", {}, - opts => queryWithDate(opts.instanceName, opts.databaseName, opts.projectId) + opts => queryWithDate(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithFloat ', "Query data from the sample 'Venues' table with a FLOAT64 datatype.", {}, - opts => queryWithFloat(opts.instanceName, opts.databaseName, opts.projectId) + opts => + queryWithFloat(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithInt ', "Query data from the sample 'Venues' table with a INT64 datatype.", {}, - opts => queryWithInt(opts.instanceName, opts.databaseName, opts.projectId) + opts => queryWithInt(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithString ', "Query data from the sample 'Venues' table with a STRING datatype.", {}, opts => - queryWithString(opts.instanceName, opts.databaseName, opts.projectId) + queryWithString(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithTimestamp ', "Query data from the sample 'Venues' table with a TIMESTAMP datatype.", {}, opts => - queryWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + queryWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'addNumericColumn ', 'Adds a "Revenue" column to sample "Venues" table in a Cloud Spanner database.', {}, opts => - addNumericColumn(opts.instanceName, opts.databaseName, opts.projectId) + addNumericColumn(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'updateWithNumericData ', @@ -700,8 +703,8 @@ require('yargs') updateWithNumericData( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'queryWithNumericParameter ', @@ -711,21 +714,21 @@ require('yargs') queryWithNumericParameter( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'addJsonColumn ', 'Adds a "VenueDetails" column to sample "Venues" table in a Cloud Spanner database.', {}, - opts => addJsonColumn(opts.instanceName, opts.databaseName, opts.projectId) + opts => addJsonColumn(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'updateWithJsonData ', 'Updates rows to include "VenueDetails" in sample "Venues" Cloud Spanner table.', {}, opts => - updateWithJsonData(opts.instanceName, opts.databaseName, opts.projectId) + updateWithJsonData(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithJsonParameter ', @@ -735,11 +738,11 @@ require('yargs') queryWithJsonParameter( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .example( - 'node $0 createVenuesTable "my-instance" "my-database" "my-project-id"' + 'node $0 createVenuesTable "my-instance" "my-database" "my-project-id"', ) .example('node $0 insertData "my-instance" "my-database" "my-project-id"') .example('node $0 queryWithArray "my-instance" "my-database" "my-project-id"') @@ -749,19 +752,19 @@ require('yargs') .example('node $0 queryWithFloat "my-instance" "my-database" "my-project-id"') .example('node $0 queryWithInt "my-instance" "my-database" "my-project-id"') .example( - 'node $0 queryWithString "my-instance" "my-database" "my-project-id"' + 'node $0 queryWithString "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 queryWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 queryWithTimestamp "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 addNumericColumn "my-instance" "my-database" "my-project-id"' + 'node $0 addNumericColumn "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 updateWithNumericData "my-instance" "my-database" "my-project-id"' + 'node $0 updateWithNumericData "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 queryWithNumericParameter "my-instance" "my-database" "my-project-id"' + 'node $0 queryWithNumericParameter "my-instance" "my-database" "my-project-id"', ) .example('node $0 addJsonColumn "my-instance" "my-database" "my-project-id"') .wrap(120) diff --git a/samples/delete-backup-schedule.js b/samples/delete-backup-schedule.js index 4b48db26a..bdc04dbc3 100644 --- a/samples/delete-backup-schedule.js +++ b/samples/delete-backup-schedule.js @@ -23,7 +23,7 @@ function main( projectId = 'my-project-id', instanceId = 'my-instance-id', databaseId = 'my-database-id', - scheduleId = 'my-schedule-id' + scheduleId = 'my-schedule-id', ) { async function deleteBackupSchedule() { // [START spanner_delete_backup_schedule] @@ -49,7 +49,7 @@ function main( projectId, instanceId, databaseId, - scheduleId + scheduleId, ), }); console.log('Deleted backup schedule'); diff --git a/samples/directed-reads.js b/samples/directed-reads.js index 83e202477..233ef9401 100644 --- a/samples/directed-reads.js +++ b/samples/directed-reads.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_directed_read] // Imports the Google Cloud Spanner client library @@ -92,11 +92,11 @@ function main( rows.forEach(row => { const json = row.toJSON(); console.log( - `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}` + `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}`, ); }); console.log( - 'Successfully executed read-only transaction with directedReadOptions' + 'Successfully executed read-only transaction with directedReadOptions', ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/dml-returning-delete.js b/samples/dml-returning-delete.js index a1d754eb7..2c68ffe31 100644 --- a/samples/dml-returning-delete.js +++ b/samples/dml-returning-delete.js @@ -52,7 +52,7 @@ async function main(instanceId, databaseId, projectId) { const rowCount = Math.floor(stats[stats.rowCount]); console.log( - `Successfully deleted ${rowCount} record from the Singers table.` + `Successfully deleted ${rowCount} record from the Singers table.`, ); rows.forEach(row => { console.log(row.toJSON().FullName); diff --git a/samples/dml-returning-insert.js b/samples/dml-returning-insert.js index 272c98594..48370d79f 100644 --- a/samples/dml-returning-insert.js +++ b/samples/dml-returning-insert.js @@ -57,7 +57,7 @@ async function main(instanceId, databaseId, projectId) { const rowCount = Math.floor(stats[stats.rowCount]); console.log( - `Successfully inserted ${rowCount} record into the Singers table.` + `Successfully inserted ${rowCount} record into the Singers table.`, ); rows.forEach(row => { console.log(row.toJSON().FullName); diff --git a/samples/dml-returning-update.js b/samples/dml-returning-update.js index 51cb8d2cf..790ff72e5 100644 --- a/samples/dml-returning-update.js +++ b/samples/dml-returning-update.js @@ -52,7 +52,7 @@ async function main(instanceId, databaseId, projectId) { const rowCount = Math.floor(stats[stats.rowCount]); console.log( - `Successfully updated ${rowCount} record into the Albums table.` + `Successfully updated ${rowCount} record into the Albums table.`, ); rows.forEach(row => { console.log(row.toJSON().MarketingBudget); diff --git a/samples/dml.js b/samples/dml.js index dface9327..2b7ded982 100644 --- a/samples/dml.js +++ b/samples/dml.js @@ -53,7 +53,7 @@ function insertUsingDml(instanceId, databaseId, projectId) { }); console.log( - `Successfully inserted ${rowCount} record into the Singers table.` + `Successfully inserted ${rowCount} record into the Singers table.`, ); await transaction.commit(); @@ -385,7 +385,7 @@ async function queryDataWithParameter(instanceId, databaseId, projectId) { rows.forEach(row => { const json = row.toJSON(); console.log( - `SingerId: ${json.SingerId}, FirstName: ${json.FirstName}, LastName: ${json.LastName}` + `SingerId: ${json.SingerId}, FirstName: ${json.FirstName}, LastName: ${json.LastName}`, ); }); } catch (err) { @@ -448,7 +448,7 @@ function writeWithTransactionUsingDml(instanceId, databaseId, projectId) { // Makes sure the second album's budget is large enough if (secondBudget < transferAmount) { throw new Error( - `The second album's budget (${secondBudget}) is less than the transfer amount (${transferAmount}).` + `The second album's budget (${secondBudget}) is less than the transfer amount (${transferAmount}).`, ); } }), @@ -488,7 +488,7 @@ function writeWithTransactionUsingDml(instanceId, databaseId, projectId) { params: { Budget: secondBudget, }, - }) + }), ); }) .then(() => { @@ -497,7 +497,7 @@ function writeWithTransactionUsingDml(instanceId, databaseId, projectId) { }) .then(() => { console.log( - `Successfully executed read-write transaction using DML to transfer ${transferAmount} from Album 2 to Album 1.` + `Successfully executed read-write transaction using DML to transfer ${transferAmount} from Album 2 to Album 1.`, ); }) .then(() => { @@ -616,7 +616,7 @@ async function updateUsingBatchDml(instanceId, databaseId, projectId) { const [rowCounts] = await transaction.batchUpdate(dmlStatements); await transaction.commit(); console.log( - `Successfully executed ${rowCounts.length} SQL statements using Batch DML.` + `Successfully executed ${rowCounts.length} SQL statements using Batch DML.`, ); }); } catch (err) { @@ -632,7 +632,7 @@ async function updateUsingBatchDml(instanceId, databaseId, projectId) { async function insertWithCustomTimeoutAndRetrySettings( instanceId, databaseId, - projectId + projectId, ) { // [START spanner_set_custom_timeout_and_retry] // Imports the Google Cloud client library @@ -703,19 +703,22 @@ require('yargs') 'insertUsingDml ', 'Inserts one record using DML into an example Cloud Spanner table.', {}, - opts => insertUsingDml(opts.instanceName, opts.databaseName, opts.projectId) + opts => + insertUsingDml(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'updateUsingDml ', 'Updates one record using DML.', {}, - opts => updateUsingDml(opts.instanceName, opts.databaseName, opts.projectId) + opts => + updateUsingDml(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'deleteUsingDml ', 'Deletes one record using DML.', {}, - opts => deleteUsingDml(opts.instanceName, opts.databaseName, opts.projectId) + opts => + deleteUsingDml(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'updateUsingDmlWithTimestamp ', @@ -725,15 +728,19 @@ require('yargs') updateUsingDmlWithTimestamp( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'writeAndReadUsingDml ', 'Inserts and reads one record using DML.', {}, opts => - writeAndReadUsingDml(opts.instanceName, opts.databaseName, opts.projectId) + writeAndReadUsingDml( + opts.instanceName, + opts.databaseName, + opts.projectId, + ), ) .command( 'updateUsingDmlWithStruct ', @@ -743,14 +750,14 @@ require('yargs') updateUsingDmlWithStruct( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'writeUsingDml ', 'Inserts multiple records using DML.', {}, - opts => writeUsingDml(opts.instanceName, opts.databaseName, opts.projectId) + opts => writeUsingDml(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithParameter ', @@ -760,8 +767,8 @@ require('yargs') queryDataWithParameter( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'writeWithTransactionUsingDml ', @@ -771,8 +778,8 @@ require('yargs') writeWithTransactionUsingDml( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'updateUsingPartitionedDml ', @@ -782,8 +789,8 @@ require('yargs') updateUsingPartitionedDml( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'deleteUsingPartitionedDml ', @@ -793,15 +800,15 @@ require('yargs') deleteUsingPartitionedDml( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'updateUsingBatchDml ', 'Insert and Update records using Batch DML.', {}, opts => - updateUsingBatchDml(opts.instanceName, opts.databaseName, opts.projectId) + updateUsingBatchDml(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'insertWithCustomTimeoutAndRetrySettings ', @@ -811,39 +818,39 @@ require('yargs') insertWithCustomTimeoutAndRetrySettings( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .example('node $0 insertUsingDml "my-instance" "my-database" "my-project-id"') .example('node $0 updateUsingDml "my-instance" "my-database" "my-project-id"') .example('node $0 deleteUsingDml "my-instance" "my-database" "my-project-id"') .example( - 'node $0 updateUsingDmlWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 updateUsingDmlWithTimestamp "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 writeAndReadUsingDml "my-instance" "my-database" "my-project-id"' + 'node $0 writeAndReadUsingDml "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 updateUsingDmlWithStruct "my-instance" "my-database" "my-project-id"' + 'node $0 updateUsingDmlWithStruct "my-instance" "my-database" "my-project-id"', ) .example('node $0 writeUsingDml "my-instance" "my-database" "my-project-id"') .example( - 'node $0 queryWithParameter "my-instance" "my-database" "my-project-id"' + 'node $0 queryWithParameter "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 writeWithTransactionUsingDml "my-instance" "my-database" "my-project-id"' + 'node $0 writeWithTransactionUsingDml "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 updateUsingPartitionedDml "my-instance" "my-database" "my-project-id"' + 'node $0 updateUsingPartitionedDml "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 deleteUsingPartitionedDml "my-instance" "my-database" "my-project-id"' + 'node $0 deleteUsingPartitionedDml "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 updateUsingBatchDml "my-instance" "my-database" "my-project-id"' + 'node $0 updateUsingBatchDml "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 insertWithCustomTimeoutAndRetrySettings "my-instance" "my-database" "my-project-id"' + 'node $0 insertWithCustomTimeoutAndRetrySettings "my-instance" "my-database" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/enable-fine-grained-access.js b/samples/enable-fine-grained-access.js index 0b333f4d1..217668268 100644 --- a/samples/enable-fine-grained-access.js +++ b/samples/enable-fine-grained-access.js @@ -24,7 +24,7 @@ function main( projectId = 'my-project-id', iamMember = 'user:alice@example.com', databaseRole = 'parent', - title = 'condition title' + title = 'condition title', ) { // [START spanner_enable_fine_grained_access] /** @@ -52,7 +52,7 @@ function main( resource: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), options: (protos.google.iam.v1.GetPolicyOptions = { requestedPolicyVersion: 3, @@ -75,7 +75,7 @@ function main( resource: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), policy: policy, }); @@ -85,7 +85,7 @@ function main( resource: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), options: (protos.google.iam.v1.GetPolicyOptions = { requestedPolicyVersion: 3, diff --git a/samples/get-backup-schedule.js b/samples/get-backup-schedule.js index 1ad5dced0..3671021c6 100644 --- a/samples/get-backup-schedule.js +++ b/samples/get-backup-schedule.js @@ -23,7 +23,7 @@ function main( projectId = 'my-project-id', instanceId = 'my-instance-id', databaseId = 'my-database-id', - scheduleId = 'my-schedule-id' + scheduleId = 'my-schedule-id', ) { async function getBackupSchedule() { // [START spanner_get_backup_schedule] @@ -49,7 +49,7 @@ function main( projectId, instanceId, databaseId, - scheduleId + scheduleId, ), }); console.log('Backup schedule:', response); diff --git a/samples/get-commit-stats.js b/samples/get-commit-stats.js index 26a963b2b..ada5916a4 100644 --- a/samples/get-commit-stats.js +++ b/samples/get-commit-stats.js @@ -45,10 +45,10 @@ async function getCommitStats(instanceId, databaseId, projectId) { {SingerId: '1', AlbumId: '1', MarketingBudget: '200000'}, {SingerId: '2', AlbumId: '2', MarketingBudget: '400000'}, ], - {returnCommitStats: true} + {returnCommitStats: true}, ); console.log( - `Updated data with ${response.commitStats.mutationCount} mutations.` + `Updated data with ${response.commitStats.mutationCount} mutations.`, ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/get-database-roles.js b/samples/get-database-roles.js index a150a6f85..e1dd53874 100644 --- a/samples/get-database-roles.js +++ b/samples/get-database-roles.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_list_database_roles] /** @@ -47,15 +47,15 @@ function main( parent: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), }); console.log( `Roles for Database: ${databaseAdminClient.databasePath( projectId, instanceId, - databaseId - )}` + databaseId, + )}`, ); databaseRoles.forEach(role => { console.log(`Role: ${role.name}`); diff --git a/samples/get-instance-config.js b/samples/get-instance-config.js index 01a4d4952..2fa37f55d 100644 --- a/samples/get-instance-config.js +++ b/samples/get-instance-config.js @@ -48,7 +48,7 @@ function main(projectId) { `Available leader options for instance config ${instanceConfig.name} ('${ instanceConfig.displayName }'): - ${instanceConfig.leaderOptions.join()}` + ${instanceConfig.leaderOptions.join()}`, ); } getInstanceConfig(); diff --git a/samples/index-create-storing.js b/samples/index-create-storing.js index ec360a274..ecb1a22c9 100644 --- a/samples/index-create-storing.js +++ b/samples/index-create-storing.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_create_storing_index] /** @@ -52,7 +52,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); diff --git a/samples/index-create.js b/samples/index-create.js index f0cc17163..c298a11ca 100644 --- a/samples/index-create.js +++ b/samples/index-create.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_create_index] /** @@ -50,7 +50,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); diff --git a/samples/index-query-data.js b/samples/index-query-data.js index 87b92df6a..9fb516773 100644 --- a/samples/index-query-data.js +++ b/samples/index-query-data.js @@ -23,7 +23,7 @@ function main( databaseId = 'my-database', projectId = 'my-project-id', startTitle = 'Ardvark', - endTitle = 'Goo' + endTitle = 'Goo', ) { // [START spanner_query_data_with_index] /** @@ -68,7 +68,7 @@ function main( ? json.MarketingBudget : null; // This value is nullable console.log( - `AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}, MarketingBudget: ${marketingBudget}` + `AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}, MarketingBudget: ${marketingBudget}`, ); }); } catch (err) { diff --git a/samples/index-read-data-with-storing.js b/samples/index-read-data-with-storing.js index 8d644912e..3d1f6d0f6 100644 --- a/samples/index-read-data-with-storing.js +++ b/samples/index-read-data-with-storing.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_read_data_with_storing_index] /** diff --git a/samples/index-read-data.js b/samples/index-read-data.js index ec5dedc6a..e81ac98db 100644 --- a/samples/index-read-data.js +++ b/samples/index-read-data.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_read_data_with_index] /** diff --git a/samples/indexing.js b/samples/indexing.js index 6ede50e8d..111c2cb0a 100644 --- a/samples/indexing.js +++ b/samples/indexing.js @@ -41,7 +41,7 @@ async function createIndex(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -94,7 +94,7 @@ async function createStoringIndex(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -118,7 +118,7 @@ async function queryDataWithIndex( databaseId, startTitle, endTitle, - projectId + projectId, ) { // [START spanner_query_data_with_index] // Imports the Google Cloud client library @@ -162,7 +162,7 @@ async function queryDataWithIndex( ? json.MarketingBudget : null; // This value is nullable console.log( - `AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}, MarketingBudget: ${marketingBudget}` + `AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}, MarketingBudget: ${marketingBudget}`, ); }); } catch (err) { @@ -286,14 +286,14 @@ require('yargs') 'createIndex ', 'Creates a new index in an example Cloud Spanner table.', {}, - opts => createIndex(opts.instanceName, opts.databaseName, opts.projectId) + opts => createIndex(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'createStoringIndex ', 'Creates a new value-storing index in an example Cloud Spanner table.', {}, opts => - createStoringIndex(opts.instanceName, opts.databaseName, opts.projectId) + createStoringIndex(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryIndex ', @@ -317,15 +317,15 @@ require('yargs') opts.databaseName, opts.startTitle, opts.endTitle, - opts.projectId - ) + opts.projectId, + ), ) .command( 'readIndex ', 'Reads data from an example Cloud Spanner table using an existing index.', {}, opts => - readDataWithIndex(opts.instanceName, opts.databaseName, opts.projectId) + readDataWithIndex(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'readStoringIndex ', @@ -335,17 +335,17 @@ require('yargs') readDataWithStoringIndex( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .example('node $0 createIndex "my-instance" "my-database" "my-project-id"') .example( - 'node $0 createStoringIndex "my-instance" "my-database" "my-project-id"' + 'node $0 createStoringIndex "my-instance" "my-database" "my-project-id"', ) .example('node $0 queryIndex "my-instance" "my-database" "my-project-id"') .example('node $0 readIndex "my-instance" "my-database" "my-project-id"') .example( - 'node $0 readStoringIndex "my-instance" "my-database" "my-project-id"' + 'node $0 readStoringIndex "my-instance" "my-database" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/instance-config-create.js b/samples/instance-config-create.js index 95686b0d7..139f7e406 100644 --- a/samples/instance-config-create.js +++ b/samples/instance-config-create.js @@ -22,7 +22,7 @@ function main( instanceConfigId = 'custom-my-instance-config', baseInstanceConfigId = 'my-base-instance-config', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_create_instance_config] @@ -48,15 +48,15 @@ function main( const [baseInstanceConfig] = await instanceAdminClient.getInstanceConfig({ name: instanceAdminClient.instanceConfigPath( projectId, - baseInstanceConfigId + baseInstanceConfigId, ), }); try { console.log( `Creating instance config ${instanceAdminClient.instanceConfigPath( projectId, - instanceConfigId - )}.` + instanceConfigId, + )}.`, ); const [operation] = await instanceAdminClient.createInstanceConfig({ instanceConfigId: instanceConfigId, @@ -64,20 +64,20 @@ function main( instanceConfig: { name: instanceAdminClient.instanceConfigPath( projectId, - instanceConfigId + instanceConfigId, ), baseConfig: instanceAdminClient.instanceConfigPath( projectId, - baseInstanceConfigId + baseInstanceConfigId, ), displayName: instanceConfigId, replicas: baseInstanceConfig.replicas.concat( - baseInstanceConfig.optionalReplicas[0] + baseInstanceConfig.optionalReplicas[0], ), }, }); console.log( - `Waiting for create operation for ${instanceConfigId} to complete...` + `Waiting for create operation for ${instanceConfigId} to complete...`, ); await operation.promise(); console.log(`Created instance config ${instanceConfigId}.`); @@ -86,7 +86,7 @@ function main( 'ERROR: Creating instance config ', instanceConfigId, ' failed with error message ', - err + err, ); } } diff --git a/samples/instance-config-delete.js b/samples/instance-config-delete.js index b5de388b0..6a879450a 100644 --- a/samples/instance-config-delete.js +++ b/samples/instance-config-delete.js @@ -21,7 +21,7 @@ function main( instanceConfigId = 'custom-my-instance-config', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_delete_instance_config] /** @@ -49,7 +49,7 @@ function main( await instanceAdminClient.deleteInstanceConfig({ name: instanceAdminClient.instanceConfigPath( projectId, - instanceConfigId + instanceConfigId, ), }); console.log(`Deleted instance config ${instanceConfigId}.\n`); @@ -58,7 +58,7 @@ function main( 'ERROR: Deleting instance config ', instanceConfigId, ' failed with error message ', - err + err, ); } } diff --git a/samples/instance-config-get-operations.js b/samples/instance-config-get-operations.js index 4737ff40e..9d8d98eba 100644 --- a/samples/instance-config-get-operations.js +++ b/samples/instance-config-get-operations.js @@ -41,7 +41,7 @@ function main(projectId = 'my-project-id') { // Lists the instance config operations. try { console.log( - `Getting list of instance config operations on project ${projectId}...\n` + `Getting list of instance config operations on project ${projectId}...\n`, ); const [instanceConfigOperations] = await instanceAdminClient.listInstanceConfigOperations({ @@ -51,19 +51,19 @@ function main(projectId = 'my-project-id') { '(metadata.@type=type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata)', }); console.log( - `Available instance config operations for project ${projectId}:` + `Available instance config operations for project ${projectId}:`, ); instanceConfigOperations.forEach(instanceConfigOperation => { const metadata = instanceConfigOperation.metadata; const instanceConfig = protos.google.spanner.admin.instance.v1.CreateInstanceConfigMetadata.decode( - instanceConfigOperation.metadata.value + instanceConfigOperation.metadata.value, ).instanceConfig; console.log( `Instance config operation for ${instanceConfig.name} of type` + ` ${metadata.type_url} has status ${ instanceConfigOperation.done ? 'done' : 'running' - }.` + }.`, ); }); } catch (err) { diff --git a/samples/instance-config-update.js b/samples/instance-config-update.js index 59b8bae46..7bacc7e0b 100644 --- a/samples/instance-config-update.js +++ b/samples/instance-config-update.js @@ -21,7 +21,7 @@ function main( instanceConfigId = 'custom-my-instance-config', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_update_instance_config] @@ -47,14 +47,14 @@ function main( console.log( `Updating instance config ${instanceAdminClient.instanceConfigPath( projectId, - instanceConfigId - )}.` + instanceConfigId, + )}.`, ); const [operation] = await instanceAdminClient.updateInstanceConfig({ instanceConfig: { name: instanceAdminClient.instanceConfigPath( projectId, - instanceConfigId + instanceConfigId, ), displayName: 'updated custom instance config', labels: { @@ -69,7 +69,7 @@ function main( }), }); console.log( - `Waiting for update operation for ${instanceConfigId} to complete...` + `Waiting for update operation for ${instanceConfigId} to complete...`, ); await operation.promise(); console.log(`Updated instance config ${instanceConfigId}.`); @@ -78,7 +78,7 @@ function main( 'ERROR: Updating instance config ', instanceConfigId, ' failed with error message ', - err + err, ); } } diff --git a/samples/instance-partition-create.js b/samples/instance-partition-create.js index b7536fe93..3af48fb5a 100644 --- a/samples/instance-partition-create.js +++ b/samples/instance-partition-create.js @@ -22,7 +22,7 @@ function main( instanceId = 'my-instance', instancePartitionId = 'my-instance-partition', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { async function createInstancePartition() { // [START spanner_create_instance_partition] @@ -50,8 +50,8 @@ function main( `Creating instance partition ${instanceAdminClient.instancePartitionPath( projectId, instanceId, - instancePartitionId - )}.` + instancePartitionId, + )}.`, ); const [operation] = await instanceAdminClient.createInstancePartition({ instancePartitionId: instancePartitionId, @@ -64,7 +64,7 @@ function main( }); console.log( - `Waiting for operation on ${instancePartitionId} to complete...` + `Waiting for operation on ${instancePartitionId} to complete...`, ); await operation.promise(); diff --git a/samples/instance-update.js b/samples/instance-update.js index 8db5ca0ca..3dea1fae4 100644 --- a/samples/instance-update.js +++ b/samples/instance-update.js @@ -44,8 +44,8 @@ function main(instanceId, projectId) { console.log( `Updating instance ${instanceAdminClient.instancePath( projectId, - instanceId - )}.` + instanceId, + )}.`, ); const [operation] = await instanceAdminClient.updateInstance({ instance: { @@ -71,7 +71,7 @@ function main(instanceId, projectId) { }); console.log( `Instance ${instanceId} has been updated with the ${metadata.edition} ` + - 'edition.' + 'edition.', ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/instance-with-asymmetric-autoscaling-config.js b/samples/instance-with-asymmetric-autoscaling-config.js index 5637db4f3..b51e41867 100644 --- a/samples/instance-with-asymmetric-autoscaling-config.js +++ b/samples/instance-with-asymmetric-autoscaling-config.js @@ -47,7 +47,7 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { { minNodes: 1, maxNodes: 2, - } + }, ), // highPriorityCpuUtilizationPercent and storageUtilizationPercent are both // percentages and must lie between 0 and 100. @@ -56,7 +56,7 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { { highPriorityCpuUtilizationPercent: 65, storageUtilizationPercent: 95, - } + }, ), // The read-only replicas listed in the asymmetric autoscaling options scale independently // from other replicas. @@ -67,9 +67,9 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { protos.google.spanner.admin.instance.v1.ReplicaSelection.create( { location: 'europe-west1', - } + }, ), - } + }, ), protos.google.spanner.admin.instance.v1.AutoscalingConfig.AsymmetricAutoscalingOption.create( { @@ -77,9 +77,9 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { protos.google.spanner.admin.instance.v1.ReplicaSelection.create( { location: 'europe-west4', - } + }, ), - } + }, ), protos.google.spanner.admin.instance.v1.AutoscalingConfig.AsymmetricAutoscalingOption.create( { @@ -87,9 +87,9 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { protos.google.spanner.admin.instance.v1.ReplicaSelection.create( { location: 'asia-east1', - } + }, ), - } + }, ), ], }); @@ -101,8 +101,8 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { console.log( `Creating instance ${instanceAdminClient.instancePath( projectId, - instanceId - )}.` + instanceId, + )}.`, ); const [operation] = await instanceAdminClient.createInstance({ instanceId: instanceId, @@ -110,7 +110,7 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { instance: { config: instanceAdminClient.instanceConfigPath( projectId, - 'nam-eur-asia3' + 'nam-eur-asia3', ), displayName: 'Display name for the instance.', autoscalingConfig: autoscalingConfig, @@ -153,11 +153,11 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { .map(option => option.replicaSelection && option.replicaSelection.location ? option.replicaSelection.location - : 'N/A' + : 'N/A', ) .join(', ') : 'None' - }` + }`, ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/instance-with-autoscaling-config.js b/samples/instance-with-autoscaling-config.js index 5ebd1160b..3eb332d21 100644 --- a/samples/instance-with-autoscaling-config.js +++ b/samples/instance-with-autoscaling-config.js @@ -47,7 +47,7 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { { minNodes: 1, maxNodes: 2, - } + }, ), // highPriorityCpuUtilizationPercent and storageUtilizationPercent are both // percentages and must lie between 0 and 100. @@ -56,7 +56,7 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { { highPriorityCpuUtilizationPercent: 65, storageUtilizationPercent: 95, - } + }, ), }); @@ -67,8 +67,8 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { console.log( `Creating instance ${instanceAdminClient.instancePath( projectId, - instanceId - )}.` + instanceId, + )}.`, ); const [operation] = await instanceAdminClient.createInstance({ instanceId: instanceId, @@ -76,7 +76,7 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { instance: { config: instanceAdminClient.instanceConfigPath( projectId, - 'regional-us-central1' + 'regional-us-central1', ), displayName: 'Display name for the instance.', autoscalingConfig: autoscalingConfig, @@ -109,7 +109,7 @@ function main(instanceId = 'my-instance', projectId = 'my-project-id') { '\n' + `High priority cpu utilization percent: ${metadata.autoscalingConfig.autoscalingTargets.highPriorityCpuUtilizationPercent}.` + '\n' + - `Storage utilization percent: ${metadata.autoscalingConfig.autoscalingTargets.storageUtilizationPercent}.` + `Storage utilization percent: ${metadata.autoscalingConfig.autoscalingTargets.storageUtilizationPercent}.`, ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/instance-with-processing-units.js b/samples/instance-with-processing-units.js index 1c4e295c5..d4a33416d 100644 --- a/samples/instance-with-processing-units.js +++ b/samples/instance-with-processing-units.js @@ -39,15 +39,15 @@ async function createInstanceWithProcessingUnits(instanceId, projectId) { console.log( `Creating instance ${instanceAdminClient.instancePath( projectId, - instanceId - )}.` + instanceId, + )}.`, ); const [operation] = await instanceAdminClient.createInstance({ instanceId: instanceId, instance: { config: instanceAdminClient.instanceConfigPath( projectId, - 'regional-us-central1' + 'regional-us-central1', ), displayName: 'Display name for the instance.', processingUnits: 500, @@ -67,7 +67,7 @@ async function createInstanceWithProcessingUnits(instanceId, projectId) { }); console.log( `Instance ${instanceId} has ${metadata.processingUnits} ` + - 'processing units.' + 'processing units.', ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/instance.js b/samples/instance.js index cad7b2bee..e43ee3ea6 100644 --- a/samples/instance.js +++ b/samples/instance.js @@ -39,8 +39,8 @@ async function createInstance(instanceId, projectId) { console.log( `Creating instance ${instanceAdminClient.instancePath( projectId, - instanceId - )}.` + instanceId, + )}.`, ); const [operation] = await instanceAdminClient.createInstance({ instanceId: instanceId, @@ -48,7 +48,7 @@ async function createInstance(instanceId, projectId) { instance: { config: instanceAdminClient.instanceConfigPath( projectId, - 'regional-us-central1' + 'regional-us-central1', ), nodeCount: 1, displayName: 'Display name for the instance.', @@ -81,17 +81,18 @@ require('yargs') 'createInstance ', 'Creates an example instance in a Cloud Spanner instance using Instance Admin Client.', {}, - opts => createInstance(opts.instanceName, opts.projectId) + opts => createInstance(opts.instanceName, opts.projectId), ) .example('node $0 createInstance "my-instance" "my-project-id"') .command( 'createInstanceWithProcessingUnits ', 'Creates an example instance in a Cloud Spanner instance with processing units.', {}, - opts => createInstanceWithProcessingUnits(opts.instanceName, opts.projectId) + opts => + createInstanceWithProcessingUnits(opts.instanceName, opts.projectId), ) .example( - 'node $0 createInstanceWithProcessingUnits "my-instance" "my-project-id"' + 'node $0 createInstanceWithProcessingUnits "my-instance" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/json-add-column.js b/samples/json-add-column.js index 1ce0331b1..5013fb82e 100644 --- a/samples/json-add-column.js +++ b/samples/json-add-column.js @@ -41,7 +41,7 @@ async function addJsonColumn(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -51,7 +51,7 @@ async function addJsonColumn(instanceId, databaseId, projectId) { await operation.promise(); console.log( - `Added VenueDetails column to Venues table in database ${databaseId}.` + `Added VenueDetails column to Venues table in database ${databaseId}.`, ); // [END spanner_add_json_column] } diff --git a/samples/json-query-parameter.js b/samples/json-query-parameter.js index 69b368105..c68a46d0b 100644 --- a/samples/json-query-parameter.js +++ b/samples/json-query-parameter.js @@ -60,8 +60,8 @@ async function queryWithJsonParameter(instanceId, databaseId, projectId) { const json = row.toJSON(); console.log( `VenueId: ${json.VenueId}, Details: ${JSON.stringify( - json.VenueDetails - )}` + json.VenueDetails, + )}`, ); }); } catch (err) { diff --git a/samples/list-backup-schedules.js b/samples/list-backup-schedules.js index 5e972de28..325781f5a 100644 --- a/samples/list-backup-schedules.js +++ b/samples/list-backup-schedules.js @@ -22,7 +22,7 @@ function main( projectId = 'my-project-id', instanceId = 'my-instance-id', - databaseId = 'my-database-id' + databaseId = 'my-database-id', ) { async function listBackupSchedules() { // [START spanner_list_backup_schedules] diff --git a/samples/list-instance-configs.js b/samples/list-instance-configs.js index 27017f38e..58e9283ff 100644 --- a/samples/list-instance-configs.js +++ b/samples/list-instance-configs.js @@ -49,7 +49,7 @@ function main(projectId) { `Available leader options for instance config ${ instanceConfig.name } ('${instanceConfig.displayName}'): - ${instanceConfig.leaderOptions.join()}` + ${instanceConfig.leaderOptions.join()}`, ); }); } diff --git a/samples/max-commit-delay.js b/samples/max-commit-delay.js index cab871ee7..56419c6b3 100644 --- a/samples/max-commit-delay.js +++ b/samples/max-commit-delay.js @@ -20,7 +20,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_set_max_commit_delay] // Imports the Google Cloud client library. @@ -58,7 +58,7 @@ function main( }); console.log( - `Successfully inserted ${rowCount} record into the Singers table.` + `Successfully inserted ${rowCount} record into the Singers table.`, ); await transaction.commit({ diff --git a/samples/numeric-add-column.js b/samples/numeric-add-column.js index 85a280e1d..78a72579e 100644 --- a/samples/numeric-add-column.js +++ b/samples/numeric-add-column.js @@ -41,7 +41,7 @@ async function addNumericColumn(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -51,7 +51,7 @@ async function addNumericColumn(instanceId, databaseId, projectId) { await operation.promise(); console.log( - `Added Revenue column to Venues table in database ${databaseId}.` + `Added Revenue column to Venues table in database ${databaseId}.`, ); // [END spanner_add_numeric_column] } diff --git a/samples/observability-traces-otlp.js b/samples/observability-traces-otlp.js index bc448f527..321923d37 100644 --- a/samples/observability-traces-otlp.js +++ b/samples/observability-traces-otlp.js @@ -23,7 +23,7 @@ async function main( projectId = 'my-project-id', instanceId = 'my-instance-id', - databaseId = 'my-project-id' + databaseId = 'my-project-id', ) { // [START spanner_opentelemetry_traces_otlp_usage] const {NodeTracerProvider} = require('@opentelemetry/sdk-trace-node'); diff --git a/samples/observability-traces.js b/samples/observability-traces.js index 6f0c199da..5314f74f1 100644 --- a/samples/observability-traces.js +++ b/samples/observability-traces.js @@ -23,7 +23,7 @@ async function main( projectId = 'my-project-id', instanceId = 'my-instance-id', - databaseId = 'my-project-id' + databaseId = 'my-project-id', ) { // [START spanner_opentelemetry_traces_cloudtrace_usage] diff --git a/samples/package.json b/samples/package.json index 1ee2fa4dc..a0dcc8655 100644 --- a/samples/package.json +++ b/samples/package.json @@ -8,16 +8,16 @@ "*.js" ], "engines": { - "node": ">=14.0.0" + "node": ">=18" }, "scripts": { "test-with-archived": "mocha system-test --timeout 1600000", "test": "mocha system-test/spanner.test.js --timeout 1600000" }, "dependencies": { - "@google-cloud/kms": "^4.0.0", - "@google-cloud/precise-date": "^4.0.0", - "@google-cloud/spanner": "^7.21.0", + "@google-cloud/kms": "^5.0.0", + "@google-cloud/precise-date": "^5.0.0", + "@google-cloud/spanner": "^8.0.0", "protobufjs": "^7.0.0", "yargs": "^17.0.0" }, @@ -35,4 +35,4 @@ "mocha": "^9.0.0", "p-limit": "^3.0.1" } -} +} \ No newline at end of file diff --git a/samples/pg-add-column.js b/samples/pg-add-column.js index dc8daceb4..752c727af 100644 --- a/samples/pg-add-column.js +++ b/samples/pg-add-column.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_add_column] /** @@ -49,7 +49,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -59,7 +59,7 @@ function main( await operation.promise(); console.log( - `Added MarketingBudget column to Albums table in database ${databaseId}.` + `Added MarketingBudget column to Albums table in database ${databaseId}.`, ); } pgAddColumn(); diff --git a/samples/pg-case-sensitivity.js b/samples/pg-case-sensitivity.js index 8c2376b93..e4ad4db77 100644 --- a/samples/pg-case-sensitivity.js +++ b/samples/pg-case-sensitivity.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_case_sensitivity] /** @@ -65,7 +65,7 @@ function main( const [operation] = await database.updateSchema(statements); await operation.promise(); console.log( - `Created table with case sensitive names in database ${databaseId} using PostgreSQL dialect.` + `Created table with case sensitive names in database ${databaseId} using PostgreSQL dialect.`, ); // Mutations: Column names in mutations are always case-insensitive, regardless whether the @@ -97,7 +97,7 @@ function main( // a result set must therefore use all lower-case letters. Location and Time were double-quoted // during creation, and retain their mixed case when returned in a result set. console.log( - `concertid: ${json.concertid}, Location: ${json.Location}, Time: ${json.Time}` + `concertid: ${json.concertid}, Location: ${json.Location}, Time: ${json.Time}`, ); }); } catch (err) { @@ -118,7 +118,7 @@ function main( const json = row.toJSON(); // The aliases are double-quoted and therefore retains their mixed case. console.log( - `concertid: ${json.ConcertId}, Location: ${json.venue}, Time: ${json.Time}` + `concertid: ${json.ConcertId}, Location: ${json.venue}, Time: ${json.Time}`, ); }); } catch (err) { diff --git a/samples/pg-database-create.js b/samples/pg-database-create.js index 7f59d4a72..2e9952717 100644 --- a/samples/pg-database-create.js +++ b/samples/pg-database-create.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_create_database] /** @@ -57,7 +57,7 @@ function main( name: databaseAdminClient.databasePath(projectId, instanceId, databaseId), }); console.log( - `Created database ${databaseId} on instance ${instanceId} with dialect ${metadata.databaseDialect}.` + `Created database ${databaseId} on instance ${instanceId} with dialect ${metadata.databaseDialect}.`, ); // Create a couple of tables using a separate request. We must use PostgreSQL style DDL as the @@ -82,7 +82,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: [statements], }); diff --git a/samples/pg-datatypes-casting.js b/samples/pg-datatypes-casting.js index 70c5d0900..112b7a02e 100644 --- a/samples/pg-datatypes-casting.js +++ b/samples/pg-datatypes-casting.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_cast_data_type] /** @@ -64,7 +64,7 @@ function main( `Bytes: ${json.bytes.valueOf()} \n` + `Float: ${json.float} \n` + `Bool: ${json.bool} \n` + - `Timestamp: ${Spanner.timestamp(json.timestamp)}` + `Timestamp: ${Spanner.timestamp(json.timestamp)}`, ); }); } catch (err) { diff --git a/samples/pg-dml-batch.js b/samples/pg-dml-batch.js index cf4a407ea..4e4a49f34 100644 --- a/samples/pg-dml-batch.js +++ b/samples/pg-dml-batch.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_batch_dml] /** @@ -83,7 +83,7 @@ function main( const [rowCounts] = await transaction.batchUpdate(dmlStatements); await transaction.commit(); console.log( - `Successfully executed ${rowCounts.length} postgreSQL statements using Batch DML.` + `Successfully executed ${rowCounts.length} postgreSQL statements using Batch DML.`, ); }); } catch (err) { diff --git a/samples/pg-dml-getting-started-update.js b/samples/pg-dml-getting-started-update.js index cfdb25fc2..bf89a59ef 100644 --- a/samples/pg-dml-getting-started-update.js +++ b/samples/pg-dml-getting-started-update.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_dml_getting_started_update] /** @@ -58,7 +58,7 @@ function main( }); console.log( - `Successfully updated ${rowCount} record in the Singers table.` + `Successfully updated ${rowCount} record in the Singers table.`, ); await transaction.commit(); diff --git a/samples/pg-dml-partitioned.js b/samples/pg-dml-partitioned.js index 33e4b3e9b..05294ba9c 100644 --- a/samples/pg-dml-partitioned.js +++ b/samples/pg-dml-partitioned.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_partitioned_dml] /** diff --git a/samples/pg-dml-returning-delete.js b/samples/pg-dml-returning-delete.js index 3d7cfcac8..ecfda0b4f 100644 --- a/samples/pg-dml-returning-delete.js +++ b/samples/pg-dml-returning-delete.js @@ -52,7 +52,7 @@ async function main(instanceId, databaseId, projectId) { const rowCount = Math.floor(stats[stats.rowCount]); console.log( - `Successfully deleted ${rowCount} record from the Singers table.` + `Successfully deleted ${rowCount} record from the Singers table.`, ); rows.forEach(row => { console.log(row.toJSON().fullname); diff --git a/samples/pg-dml-returning-insert.js b/samples/pg-dml-returning-insert.js index cf32d7b01..eeeb65351 100644 --- a/samples/pg-dml-returning-insert.js +++ b/samples/pg-dml-returning-insert.js @@ -57,7 +57,7 @@ async function main(instanceId, databaseId, projectId) { const rowCount = Math.floor(stats[stats.rowCount]); console.log( - `Successfully inserted ${rowCount} record into the Singers table.` + `Successfully inserted ${rowCount} record into the Singers table.`, ); rows.forEach(row => { console.log(row.toJSON().fullname); diff --git a/samples/pg-dml-returning-update.js b/samples/pg-dml-returning-update.js index 98ad85c4e..5de9458fb 100644 --- a/samples/pg-dml-returning-update.js +++ b/samples/pg-dml-returning-update.js @@ -57,7 +57,7 @@ async function main(instanceId, databaseId, projectId) { const rowCount = Math.floor(stats[stats.rowCount]); console.log( - `Successfully updated ${rowCount} record into the Singers table.` + `Successfully updated ${rowCount} record into the Singers table.`, ); rows.forEach(row => { console.log(row.toJSON().fullname); diff --git a/samples/pg-dml-with-parameter.js b/samples/pg-dml-with-parameter.js index eaf5200d7..078142567 100644 --- a/samples/pg-dml-with-parameter.js +++ b/samples/pg-dml-with-parameter.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_dml_with_parameters] /** @@ -66,7 +66,7 @@ function main( }); await transaction.commit(); console.log( - `Successfully executed ${rowCounts} postgreSQL statements using DML.` + `Successfully executed ${rowCounts} postgreSQL statements using DML.`, ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/pg-functions.js b/samples/pg-functions.js index 1824b3312..b4fe11644 100644 --- a/samples/pg-functions.js +++ b/samples/pg-functions.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_functions] /** diff --git a/samples/pg-index-create-storing.js b/samples/pg-index-create-storing.js index c7130cd26..467db6251 100644 --- a/samples/pg-index-create-storing.js +++ b/samples/pg-index-create-storing.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_create_storing_index] /** @@ -52,7 +52,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); diff --git a/samples/pg-interleaving.js b/samples/pg-interleaving.js index efda66436..54abd4125 100644 --- a/samples/pg-interleaving.js +++ b/samples/pg-interleaving.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_interleaved_table] /** @@ -63,7 +63,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: statements, }); @@ -72,7 +72,7 @@ function main( await operation.promise(); console.log( - `Created an interleaved table hierarchy in database ${databaseId} using PostgreSQL dialect.` + `Created an interleaved table hierarchy in database ${databaseId} using PostgreSQL dialect.`, ); } pgInterleaving(); diff --git a/samples/pg-jsonb-add-column.js b/samples/pg-jsonb-add-column.js index 358ffa6be..daf8387bd 100644 --- a/samples/pg-jsonb-add-column.js +++ b/samples/pg-jsonb-add-column.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_jsonb_add_column] /** @@ -50,14 +50,14 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); console.log(`Waiting for operation on ${databaseId} to complete...`); await operation.promise(); console.log( - `Added jsonb column to table venues to database ${databaseId}.` + `Added jsonb column to table venues to database ${databaseId}.`, ); } pgJsonbAddColumn(); diff --git a/samples/pg-jsonb-query-parameter.js b/samples/pg-jsonb-query-parameter.js index c744ad9bf..2d8111a46 100644 --- a/samples/pg-jsonb-query-parameter.js +++ b/samples/pg-jsonb-query-parameter.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_jsonb_query_parameter] /** @@ -63,8 +63,8 @@ function main( rows.forEach(row => { console.log( `VenueId: ${row.venueid}, Details: ${JSON.stringify( - row.venuedetails - )}` + row.venuedetails, + )}`, ); }); } finally { diff --git a/samples/pg-jsonb-update-data.js b/samples/pg-jsonb-update-data.js index e6c64bd3f..87bc8c3af 100644 --- a/samples/pg-jsonb-update-data.js +++ b/samples/pg-jsonb-update-data.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_jsonb_update_data] /** diff --git a/samples/pg-numeric-data-type.js b/samples/pg-numeric-data-type.js index 61eaaa09a..67da93873 100644 --- a/samples/pg-numeric-data-type.js +++ b/samples/pg-numeric-data-type.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_numeric_data_type] /** @@ -63,7 +63,7 @@ function main( const revenues1 = Spanner.pgNumeric('97372.3863'); const revenues2 = Spanner.pgNumeric(7629); const revenues3 = Spanner.pgNumeric( - '1234567890.000012387293137871837817783828271273962412698378219372373072321997201370913293722379069869126846496978479842178917827474178248943891738912692839263826722738362982366832623281' + '1234567890.000012387293137871837817783828271273962412698378219372373072321997201370913293722379069869126846496978479842178917827474178248943891738912692839263826722738362982366832623281', ); const revenues4 = Spanner.pgNumeric('1.23e-7'); const revenues5 = Spanner.pgNumeric('NaN'); diff --git a/samples/pg-ordering-nulls.js b/samples/pg-ordering-nulls.js index bb1a9b699..b59f9dfcb 100644 --- a/samples/pg-ordering-nulls.js +++ b/samples/pg-ordering-nulls.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_order_nulls] /** diff --git a/samples/pg-query-parameter.js b/samples/pg-query-parameter.js index a495e363a..98005c61b 100644 --- a/samples/pg-query-parameter.js +++ b/samples/pg-query-parameter.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_query_parameter] /** @@ -67,7 +67,7 @@ function main( rows.forEach(row => { const json = row.toJSON(); console.log( - `SingerId: ${json.singerid}, FirstName: ${json.firstname}, LastName: ${json.lastname}` + `SingerId: ${json.singerid}, FirstName: ${json.firstname}, LastName: ${json.lastname}`, ); }); } catch (err) { diff --git a/samples/pg-schema-information.js b/samples/pg-schema-information.js index 954b5153f..d5514548b 100644 --- a/samples/pg-schema-information.js +++ b/samples/pg-schema-information.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_postgresql_information_schema] /** @@ -63,7 +63,7 @@ function main( const json = row.toJSON(); console.log( `Table: ${json.table_schema}.${json.table_name} ` + - `(User defined type: ${json.user_defined_type_catalog}.${json.user_defined_type_schema}.${json.user_defined_type_name})` + `(User defined type: ${json.user_defined_type_catalog}.${json.user_defined_type_schema}.${json.user_defined_type_name})`, ); }); } catch (err) { diff --git a/samples/pg-sequence-alter.js b/samples/pg-sequence-alter.js index 6a89339e5..a0229d45c 100644 --- a/samples/pg-sequence-alter.js +++ b/samples/pg-sequence-alter.js @@ -46,7 +46,7 @@ async function main(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -55,7 +55,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.', ); } catch (err) { console.error('ERROR:', err); @@ -79,7 +79,7 @@ async function main(instanceId, databaseId, projectId) { console.log( `Inserted customer record with CustomerId: ${ row.toJSON({wrapNumbers: true}).customerid.value - }` + }`, ); }); diff --git a/samples/pg-sequence-create.js b/samples/pg-sequence-create.js index 7bbe34888..2267169a6 100644 --- a/samples/pg-sequence-create.js +++ b/samples/pg-sequence-create.js @@ -50,7 +50,7 @@ async function main(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -59,7 +59,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Created Seq sequence and Customers table, where the key column CustomerId uses the sequence as a default value' + 'Created Seq sequence and Customers table, where the key column CustomerId uses the sequence as a default value', ); } catch (err) { console.error('ERROR:', err); @@ -83,7 +83,7 @@ async function main(instanceId, databaseId, projectId) { console.log( `Inserted customer record with CustomerId: ${ row.toJSON({wrapNumbers: true}).customerid.value - }` + }`, ); }); diff --git a/samples/pg-sequence-drop.js b/samples/pg-sequence-drop.js index 760575b4a..525e97acf 100644 --- a/samples/pg-sequence-drop.js +++ b/samples/pg-sequence-drop.js @@ -50,7 +50,7 @@ async function main(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -59,7 +59,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.', ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/proto-query-data.js b/samples/proto-query-data.js index 012807123..fa1e7ee75 100644 --- a/samples/proto-query-data.js +++ b/samples/proto-query-data.js @@ -21,7 +21,7 @@ const music = singer.examples.spanner.music; function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_query_with_proto_types_parameter] /** @@ -78,7 +78,7 @@ function main( const json = row.toJSON(); console.log( `SingerId: ${json.SingerId}, SingerInfo: ${json.SingerInfo}, SingerGenre: ${json.SingerGenre}, - SingerInfoArray: ${json.SingerInfoArray}, SingerGenreArray: ${json.SingerGenreArray}` + SingerInfoArray: ${json.SingerInfoArray}, SingerGenreArray: ${json.SingerGenreArray}`, ); }); } catch (err) { diff --git a/samples/proto-type-add-column.js b/samples/proto-type-add-column.js index 76c77aa5a..a7b3ba703 100644 --- a/samples/proto-type-add-column.js +++ b/samples/proto-type-add-column.js @@ -24,7 +24,7 @@ const fs = require('fs'); function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_add_proto_type_columns] /** @@ -67,7 +67,7 @@ function main( database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, protoDescriptors: protoDescriptor, @@ -76,7 +76,7 @@ function main( console.log(`Waiting for operation on ${databaseId} to complete...`); await operation.promise(); console.log( - `Altered table "Singers" on database ${databaseId} on instance ${instanceId} with proto descriptors.` + `Altered table "Singers" on database ${databaseId} on instance ${instanceId} with proto descriptors.`, ); } protoTypeAddColumn(); diff --git a/samples/proto-update-data-dml.js b/samples/proto-update-data-dml.js index cc8f3f6c4..6d9df8472 100644 --- a/samples/proto-update-data-dml.js +++ b/samples/proto-update-data-dml.js @@ -21,7 +21,7 @@ const music = singer.examples.spanner.music; function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_update_data_with_proto_types_with_dml] /** diff --git a/samples/proto-update-data.js b/samples/proto-update-data.js index ed802b552..529577fc1 100644 --- a/samples/proto-update-data.js +++ b/samples/proto-update-data.js @@ -21,7 +21,7 @@ const music = singer.examples.spanner.music; function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_update_data_with_proto_types] /** diff --git a/samples/queryoptions.js b/samples/queryoptions.js index ee4d19064..95f0ea219 100644 --- a/samples/queryoptions.js +++ b/samples/queryoptions.js @@ -41,7 +41,7 @@ async function databaseWithQueryOptions(instanceId, databaseId, projectId) { // The list of available statistics packages can be found by querying the // "INFORMATION_SCHEMA.SPANNER_STATISTICS" table. optimizerStatisticsPackage: 'latest', - } + }, ); const query = { @@ -60,7 +60,7 @@ async function databaseWithQueryOptions(instanceId, databaseId, projectId) { ? json.MarketingBudget : null; // This value is nullable console.log( - `AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}, MarketingBudget: ${marketingBudget}` + `AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}, MarketingBudget: ${marketingBudget}`, ); }); } catch (err) { @@ -115,7 +115,7 @@ async function queryWithQueryOptions(instanceId, databaseId, projectId) { ? json.MarketingBudget : null; // This value is nullable console.log( - `AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}, MarketingBudget: ${marketingBudget}` + `AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}, MarketingBudget: ${marketingBudget}`, ); }); } catch (err) { @@ -137,8 +137,8 @@ require('yargs') databaseWithQueryOptions( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'queryWithQueryOptions ', @@ -148,14 +148,14 @@ require('yargs') queryWithQueryOptions( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .example( - 'node $0 databaseWithQueryOptions "my-instance" "my-database" "my-project-id"' + 'node $0 databaseWithQueryOptions "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 queryWithQueryOptions "my-instance" "my-database" "my-project-id"' + 'node $0 queryWithQueryOptions "my-instance" "my-database" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/quickstart.js b/samples/quickstart.js index 5bf30f81e..46b76e716 100644 --- a/samples/quickstart.js +++ b/samples/quickstart.js @@ -17,7 +17,7 @@ async function quickstart( projectId = 'YOUR-PROJECT-ID', // Your Google Cloud Platform project ID instanceId = 'my-instance', // Your Cloud Spanner instance ID - databaseId = 'my-database' // Your Cloud Spanner database ID + databaseId = 'my-database', // Your Cloud Spanner database ID ) { // [START spanner_quickstart] // Imports the Google Cloud client library diff --git a/samples/read-data-with-database-role.js b/samples/read-data-with-database-role.js index af2317b6d..c67a96257 100644 --- a/samples/read-data-with-database-role.js +++ b/samples/read-data-with-database-role.js @@ -21,7 +21,7 @@ function main( instanceId = 'my-instance', databaseId = 'my-database', - projectId = 'my-project-id' + projectId = 'my-project-id', ) { // [START spanner_read_data_with_database_role] /** @@ -57,7 +57,7 @@ function main( const json = row.toJSON(); console.log( - `SingerId: ${json.SingerId}, FirstName: ${json.FirstName}, LastName: ${json.LastName}` + `SingerId: ${json.SingerId}, FirstName: ${json.FirstName}, LastName: ${json.LastName}`, ); } } catch (err) { diff --git a/samples/request-tag.js b/samples/request-tag.js index 20dbcb954..f67b20668 100644 --- a/samples/request-tag.js +++ b/samples/request-tag.js @@ -49,7 +49,7 @@ function main(instanceId, databaseId, projectId) { }); albums.forEach(album => { console.log( - `SingerId: ${album.SingerId}, AlbumId: ${album.AlbumId}, AlbumTitle: ${album.AlbumTitle}` + `SingerId: ${album.SingerId}, AlbumId: ${album.AlbumId}, AlbumTitle: ${album.AlbumTitle}`, ); }); await database.close(); diff --git a/samples/resource/singer.d.ts b/samples/resource/singer.d.ts index 87d1cfc11..07913d33b 100644 --- a/samples/resource/singer.d.ts +++ b/samples/resource/singer.d.ts @@ -73,7 +73,7 @@ export namespace examples { * @returns SingerInfo instance */ public static create( - properties?: examples.spanner.music.ISingerInfo + properties?: examples.spanner.music.ISingerInfo, ): examples.spanner.music.SingerInfo; /** @@ -84,7 +84,7 @@ export namespace examples { */ public static encode( message: examples.spanner.music.ISingerInfo, - writer?: $protobuf.Writer + writer?: $protobuf.Writer, ): $protobuf.Writer; /** @@ -95,7 +95,7 @@ export namespace examples { */ public static encodeDelimited( message: examples.spanner.music.ISingerInfo, - writer?: $protobuf.Writer + writer?: $protobuf.Writer, ): $protobuf.Writer; /** @@ -108,7 +108,7 @@ export namespace examples { */ public static decode( reader: $protobuf.Reader | Uint8Array, - length?: number + length?: number, ): examples.spanner.music.SingerInfo; /** @@ -119,7 +119,7 @@ export namespace examples { * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited( - reader: $protobuf.Reader | Uint8Array + reader: $protobuf.Reader | Uint8Array, ): examples.spanner.music.SingerInfo; /** @@ -146,7 +146,7 @@ export namespace examples { */ public static toObject( message: examples.spanner.music.SingerInfo, - options?: $protobuf.IConversionOptions + options?: $protobuf.IConversionOptions, ): {[k: string]: any}; /** diff --git a/samples/resource/singer.js b/samples/resource/singer.js index 203791b41..df7792d17 100644 --- a/samples/resource/singer.js +++ b/samples/resource/singer.js @@ -346,7 +346,7 @@ $root.examples = (function () { if (object.singerId !== null) if ($util.Long) (message.singerId = $util.Long.fromValue( - object.singerId + object.singerId, )).unsigned = false; else if (typeof object.singerId === 'string') message.singerId = parseInt(object.singerId, 10); @@ -355,7 +355,7 @@ $root.examples = (function () { else if (typeof object.singerId === 'object') message.singerId = new $util.LongBits( object.singerId.low >>> 0, - object.singerId.high >>> 0 + object.singerId.high >>> 0, ).toNumber(); if (object.birthDate !== null) message.birthDate = String(object.birthDate); @@ -413,7 +413,7 @@ $root.examples = (function () { : options.longs === Number ? new $util.LongBits( message.singerId.low >>> 0, - message.singerId.high >>> 0 + message.singerId.high >>> 0, ).toNumber() : message.singerId; if (options.oneofs) object._singerId = 'singerId'; diff --git a/samples/rpc-priority-batch-dml.js b/samples/rpc-priority-batch-dml.js index 0db75a397..5fa4008bc 100644 --- a/samples/rpc-priority-batch-dml.js +++ b/samples/rpc-priority-batch-dml.js @@ -62,7 +62,7 @@ async function main(instanceId, databaseId, projectId) { }); await transaction.commit(); console.log( - `Successfully executed ${rowCounts.length} SQL statements using Batch DML using low RPC priority.` + `Successfully executed ${rowCounts.length} SQL statements using Batch DML using low RPC priority.`, ); }); } catch (err) { diff --git a/samples/rpc-priority-partitioned-dml.js b/samples/rpc-priority-partitioned-dml.js index 1da65e4e5..48b2b6f4b 100644 --- a/samples/rpc-priority-partitioned-dml.js +++ b/samples/rpc-priority-partitioned-dml.js @@ -38,7 +38,7 @@ async function main(instanceId, databaseId, projectId) { async function updateUsingPartitionedDmlWithRpcPriority( instanceId, - databaseId + databaseId, ) { // Gets a reference to a Cloud Spanner instance and database const instance = spanner.instance(instanceId); @@ -52,7 +52,7 @@ async function main(instanceId, databaseId, projectId) { }, }); console.log( - `Successfully updated ${rowCount} records using low RPC priority.` + `Successfully updated ${rowCount} records using low RPC priority.`, ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/rpc-priority-query-partitions.js b/samples/rpc-priority-query-partitions.js index 16f05cf06..5eb3a440e 100644 --- a/samples/rpc-priority-query-partitions.js +++ b/samples/rpc-priority-query-partitions.js @@ -38,7 +38,7 @@ async function main(instanceId, databaseId, projectId) { async function createAndExecuteQueryPartitionsWithRpcPriority( instanceId, - databaseId + databaseId, ) { // Gets a reference to a Cloud Spanner instance and database const instance = spanner.instance(instanceId); @@ -52,7 +52,7 @@ async function main(instanceId, databaseId, projectId) { }, }); console.log( - `Successfully created ${partitions.length} query partitions using low RPC priority.` + `Successfully created ${partitions.length} query partitions using low RPC priority.`, ); let row_count = 0; @@ -68,13 +68,13 @@ async function main(instanceId, databaseId, projectId) { transaction.execute(partition).then(results => { const rows = results[0].map(row => row.toJSON()); row_count += rows.length; - }) + }), ); }); Promise.all(promises) .then(() => { console.log( - `Successfully received ${row_count} from executed partitions.` + `Successfully received ${row_count} from executed partitions.`, ); transaction.close(); }) diff --git a/samples/rpc-priority-read.js b/samples/rpc-priority-read.js index cdf39b2ee..6c346c1ac 100644 --- a/samples/rpc-priority-read.js +++ b/samples/rpc-priority-read.js @@ -58,13 +58,13 @@ async function main(instanceId, databaseId, projectId) { const [rows] = await albumsTable.read(query); console.log( - `Successfully fetched ${rows.length} rows using low RPC priority.` + `Successfully fetched ${rows.length} rows using low RPC priority.`, ); rows.forEach(row => { const json = row.toJSON(); console.log( - `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}` + `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}`, ); }); } catch (err) { diff --git a/samples/rpc-priority-run.js b/samples/rpc-priority-run.js index ce5136524..c08335370 100644 --- a/samples/rpc-priority-run.js +++ b/samples/rpc-priority-run.js @@ -58,12 +58,12 @@ async function main(instanceId, databaseId, projectId) { }); console.log( - `Successfully fetched ${rows.length} rows using low RPC priority.` + `Successfully fetched ${rows.length} rows using low RPC priority.`, ); rows.forEach(row => { console.log( - `AlbumId: ${row.AlbumId}, AlbumTitle: ${row.AlbumTitle}, MarketingBudget: ${row.MarketingBudget}` + `AlbumId: ${row.AlbumId}, AlbumTitle: ${row.AlbumTitle}, MarketingBudget: ${row.MarketingBudget}`, ); }); } catch (err) { diff --git a/samples/rpc-priority-transaction.js b/samples/rpc-priority-transaction.js index eabaa417f..eb609d886 100644 --- a/samples/rpc-priority-transaction.js +++ b/samples/rpc-priority-transaction.js @@ -60,7 +60,7 @@ async function main(instanceId, databaseId, projectId) { }); console.log( - `Successfully inserted ${rowCount} record into the Singers table using low RPC priority.` + `Successfully inserted ${rowCount} record into the Singers table using low RPC priority.`, ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/schema.js b/samples/schema.js index b9dd6b09f..8bc6b53db 100644 --- a/samples/schema.js +++ b/samples/schema.js @@ -100,7 +100,7 @@ async function addColumn(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: ['ALTER TABLE Albums ADD COLUMN MarketingBudget INT64'], }); @@ -162,7 +162,7 @@ async function queryDataWithNewColumn(instanceId, databaseId, projectId) { json.AlbumId }, MarketingBudget: ${ json.MarketingBudget ? json.MarketingBudget : null - }` + }`, ); }); } catch (err) { @@ -188,14 +188,15 @@ require('yargs') 'createDatabase ', 'Creates an example database with two tables in a Cloud Spanner instance using Database Admin Client.', {}, - opts => createDatabase(opts.instanceName, opts.databaseName, opts.projectId) + opts => + createDatabase(opts.instanceName, opts.databaseName, opts.projectId), ) .example('node $0 createDatabase "my-instance" "my-database" "my-project-id"') .command( 'addColumn ', 'Adds an example MarketingBudget column to an example Cloud Spanner table.', {}, - opts => addColumn(opts.instanceName, opts.databaseName, opts.projectId) + opts => addColumn(opts.instanceName, opts.databaseName, opts.projectId), ) .example('node $0 addColumn "my-instance" "my-database" "my-project-id"') .command( @@ -206,8 +207,8 @@ require('yargs') queryDataWithNewColumn( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .example('node $0 queryNewColumn "my-instance" "my-database" "my-project-id"') .command( @@ -218,11 +219,11 @@ require('yargs') createDatabaseWithVersionRetentionPeriod( opts.instanceName, opts.databaseId, - opts.projectId - ) + opts.projectId, + ), ) .example( - 'node $0 createDatabaseWithVersionRetentionPeriod "my-instance" "my-database-id" "my-project-id"' + 'node $0 createDatabaseWithVersionRetentionPeriod "my-instance" "my-database-id" "my-project-id"', ) .command( 'createDatabaseWithEncryptionKey ', @@ -233,11 +234,11 @@ require('yargs') opts.instanceName, opts.databaseName, opts.projectId, - opts.keyName - ) + opts.keyName, + ), ) .example( - 'node $0 createDatabaseWithEncryptionKey "my-instance" "my-database" "my-project-id" "key-name"' + 'node $0 createDatabaseWithEncryptionKey "my-instance" "my-database" "my-project-id" "key-name"', ) .wrap(120) .recommendCommands() diff --git a/samples/sequence-alter.js b/samples/sequence-alter.js index b2dde1ea6..fa3a77758 100644 --- a/samples/sequence-alter.js +++ b/samples/sequence-alter.js @@ -49,7 +49,7 @@ async function main(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -58,7 +58,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.', ); } catch (err) { console.error('ERROR:', err); @@ -82,7 +82,7 @@ async function main(instanceId, databaseId, projectId) { console.log( `Inserted customer record with CustomerId: ${ row.toJSON({wrapNumbers: true}).CustomerId.value - }` + }`, ); }); diff --git a/samples/sequence-create.js b/samples/sequence-create.js index dd4c11424..329adc267 100644 --- a/samples/sequence-create.js +++ b/samples/sequence-create.js @@ -50,7 +50,7 @@ async function main(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -59,7 +59,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Created Seq sequence and Customers table, where the key column CustomerId uses the sequence as a default value.' + 'Created Seq sequence and Customers table, where the key column CustomerId uses the sequence as a default value.', ); } catch (err) { console.error('ERROR:', err); @@ -83,7 +83,7 @@ async function main(instanceId, databaseId, projectId) { console.log( `Inserted customer record with CustomerId: ${ row.toJSON({wrapNumbers: true}).CustomerId.value - }` + }`, ); }); diff --git a/samples/sequence-drop.js b/samples/sequence-drop.js index 4693cfd5f..c07f8fa84 100644 --- a/samples/sequence-drop.js +++ b/samples/sequence-drop.js @@ -50,7 +50,7 @@ async function main(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -59,7 +59,7 @@ async function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.', ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/struct.js b/samples/struct.js index a484b7ee1..9d02efa96 100644 --- a/samples/struct.js +++ b/samples/struct.js @@ -368,15 +368,15 @@ require('yargs') writeDataForStructQueries( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'queryDataWithStruct ', 'Queries Singers table using STRUCT parameters.', {}, opts => - queryDataWithStruct(opts.instanceName, opts.databaseName, opts.projectId) + queryDataWithStruct(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithArrayOfStruct ', @@ -386,15 +386,15 @@ require('yargs') queryWithArrayofStruct( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'queryStructField ', 'Queries Singers table using a STRUCT parameter field.', {}, opts => - queryStructField(opts.instanceName, opts.databaseName, opts.projectId) + queryStructField(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryNestedStructField ', @@ -404,8 +404,8 @@ require('yargs') queryNestedStructField( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .wrap(120) .recommendCommands() diff --git a/samples/system-test/archived/spanner.test.js b/samples/system-test/archived/spanner.test.js index eb771bd16..8fe6d9a05 100644 --- a/samples/system-test/archived/spanner.test.js +++ b/samples/system-test/archived/spanner.test.js @@ -126,8 +126,8 @@ async function deleteStaleInstances() { const limit = pLimit(5); await Promise.all( instances.map(instance => - limit(() => setTimeout(deleteInstance, delay, instance)) - ) + limit(() => setTimeout(deleteInstance, delay, instance)), + ), ); } @@ -147,7 +147,7 @@ async function getCryptoKey() { const keyRingName = client.keyRingPath( PROJECT_ID, KEY_LOCATION_ID, - KEY_RING_ID + KEY_RING_ID, ); // Get key ring. @@ -174,7 +174,7 @@ async function getCryptoKey() { PROJECT_ID, KEY_LOCATION_ID, KEY_RING_ID, - KEY_ID + KEY_ID, ); const [key] = await client.getCryptoKey({ name: keyName, @@ -219,7 +219,7 @@ describe('Spanner', () => { return operation.promise(); } else { console.log( - `Not creating temp instance, using + ${instance.formattedName_}...` + `Not creating temp instance, using + ${instance.formattedName_}...`, ); } }); @@ -260,38 +260,38 @@ describe('Spanner', () => { // create_instance it('should create an example instance', async () => { const output = execSync( - `${instanceCmd} createInstance "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `${instanceCmd} createInstance "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...` - ) + `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...`, + ), ); assert.match( output, - new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`) + new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`), ); }); // create_instance_with_processing_units it('should create an example instance with processing units', async () => { const output = execSync( - `${instanceCmd} createInstanceWithProcessingUnits "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `${instanceCmd} createInstanceWithProcessingUnits "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...` - ) + `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...`, + ), ); assert.match( output, - new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`) + new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`), ); assert.match( output, - new RegExp(`Instance ${SAMPLE_INSTANCE_ID} has 500 processing units.`) + new RegExp(`Instance ${SAMPLE_INSTANCE_ID} has 500 processing units.`), ); }); }); @@ -302,35 +302,35 @@ describe('Spanner', () => { assert.strictEqual( exists, true, - 'The main instance was not created successfully!' + 'The main instance was not created successfully!', ); }); // create_database it('should create an example database', async () => { const output = execSync( - `${schemaCmd} createDatabase "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${schemaCmd} createDatabase "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`), ); assert.match( output, - new RegExp(`Created database ${DATABASE_ID} on instance ${INSTANCE_ID}.`) + new RegExp(`Created database ${DATABASE_ID} on instance ${INSTANCE_ID}.`), ); }); // update_database it('should set database metadata', async () => { const output = execSync( - `node database-update.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node database-update.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for update operation for ${DATABASE_ID} to complete...` - ) + `Waiting for update operation for ${DATABASE_ID} to complete...`, + ), ); assert.match(output, new RegExp(`Updated database ${DATABASE_ID}.`)); // cleanup @@ -352,23 +352,23 @@ describe('Spanner', () => { const key = await getCryptoKey(); const output = execSync( - `${schemaCmd} createDatabaseWithEncryptionKey "${INSTANCE_ID}" "${ENCRYPTED_DATABASE_ID}" ${PROJECT_ID} "${key.name}"` + `${schemaCmd} createDatabaseWithEncryptionKey "${INSTANCE_ID}" "${ENCRYPTED_DATABASE_ID}" ${PROJECT_ID} "${key.name}"`, ); assert.match( output, new RegExp( - `Waiting for operation on ${ENCRYPTED_DATABASE_ID} to complete...` - ) + `Waiting for operation on ${ENCRYPTED_DATABASE_ID} to complete...`, + ), ); assert.match( output, new RegExp( - `Created database ${ENCRYPTED_DATABASE_ID} on instance ${INSTANCE_ID}.` - ) + `Created database ${ENCRYPTED_DATABASE_ID} on instance ${INSTANCE_ID}.`, + ), ); assert.match( output, - new RegExp(`Database encrypted with key ${key.name}.`) + new RegExp(`Database encrypted with key ${key.name}.`), ); }); }); @@ -378,7 +378,7 @@ describe('Spanner', () => { // instance and database set up at this point. it('should query a table', async () => { const output = execSync( - `node quickstart ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID}` + `node quickstart ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID}`, ); assert.match(output, /Query: \d+ found./); }); @@ -387,7 +387,7 @@ describe('Spanner', () => { // insert_data it('should insert rows into an example table', async () => { const output = execSync( - `${crudCmd} insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Inserted data\./); }); @@ -395,13 +395,13 @@ describe('Spanner', () => { // delete_data it('should delete and then insert rows in the example tables', async () => { let output = execSync( - `${crudCmd} delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.include(output, 'Deleted individual rows in Albums.'); assert.include(output, '2 records deleted from Singers.'); assert.include(output, '3 records deleted from Singers.'); output = execSync( - `${crudCmd} insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Inserted data\./); }); @@ -409,7 +409,7 @@ describe('Spanner', () => { // query_data it('should query an example table and return matching rows', async () => { const output = execSync( - `${crudCmd} query ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} query ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -417,7 +417,7 @@ describe('Spanner', () => { // read_data it('should read an example table', async () => { const output = execSync( - `${crudCmd} read ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} read ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -425,7 +425,7 @@ describe('Spanner', () => { // add_column it('should add a column to a table', async () => { const output = execSync( - `${schemaCmd} addColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${schemaCmd} addColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Waiting for operation to complete\.\.\./); assert.match(output, /Added the MarketingBudget column\./); @@ -434,7 +434,7 @@ describe('Spanner', () => { // update_data it('should update existing rows in an example table', async () => { const output = execSync( - `${crudCmd} update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Updated data\./); }); @@ -445,22 +445,22 @@ describe('Spanner', () => { // 15 seconds have elapsed since the update_data test. await new Promise(r => setTimeout(r, 16000)); const output = execSync( - `${crudCmd} read-stale ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} read-stale ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget: 100000/ + /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget: 100000/, ); assert.match( output, - /SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget: 500000/ + /SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget: 500000/, ); }); // query_data_with_new_column it('should query an example table with an additional column and return matching rows', async () => { const output = execSync( - `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, MarketingBudget: 100000/); assert.match(output, /SingerId: 2, AlbumId: 2, MarketingBudget: 500000/); @@ -469,7 +469,7 @@ describe('Spanner', () => { // create_index it('should create an index in an example table', async () => { const output = execSync( - `node archived/index-create ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node archived/index-create ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Waiting for operation to complete\.\.\./); assert.match(output, /Added the AlbumsByAlbumTitle index\./); @@ -482,7 +482,7 @@ describe('Spanner', () => { await delay(this.test); const output = execSync( - `node archived/index-create-storing ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node archived/index-create-storing ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Waiting for operation to complete\.\.\./); assert.match(output, /Added the AlbumsByAlbumTitle2 index\./); @@ -491,36 +491,36 @@ describe('Spanner', () => { // query_data_with_index it('should query an example table with an index and return matching rows', async () => { const output = execSync( - `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/ + /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/, ); assert.notMatch( output, - /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/ + /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/, ); }); it('should respect query boundaries when querying an example table with an index', async () => { const output = execSync( - `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID} "Ardvark" "Zoo"` + `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID} "Ardvark" "Zoo"`, ); assert.match( output, - /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/ + /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/, ); assert.match( output, - /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/ + /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/, ); }); // read_data_with_index it('should read an example table with an index', async () => { const output = execSync( - `node index-read-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node index-read-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -528,7 +528,7 @@ describe('Spanner', () => { // read_data_with_storing_index it('should read an example table with a storing index', async () => { const output = execSync( - `node index-read-data-with-storing ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node index-read-data-with-storing ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -536,48 +536,48 @@ describe('Spanner', () => { // spanner_create_client_with_query_options it('should use query options from a database reference', async () => { const output = execSync( - `${queryOptionsCmd} databaseWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${queryOptionsCmd} databaseWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ + /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/, ); }); // spanner_query_with_query_options it('should use query options on request', async () => { const output = execSync( - `${queryOptionsCmd} queryWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${queryOptionsCmd} queryWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ + /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/, ); }); // query with RPC priority for run command it('should use RPC priority from request options for run command', async () => { const output = execSync( - `${rpcPriorityRunCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityRunCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully fetched \d rows using low RPC priority\./ + /Successfully fetched \d rows using low RPC priority\./, ); assert.match( output, - /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ + /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/, ); }); // query with RPC priority for Read command it('should use RPC priority from request options for read command', async () => { const output = execSync( - `${rpcPriorityReadCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityReadCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully fetched \d rows using low RPC priority\./ + /Successfully fetched \d rows using low RPC priority\./, ); assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -585,44 +585,44 @@ describe('Spanner', () => { // query with RPC priority for transaction command it('should use RPC priority from request options for transaction command', async () => { const output = execSync( - `${rpcPriorityTransactionCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityTransactionCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully inserted 1 record into the Singers table using low RPC priority\./ + /Successfully inserted 1 record into the Singers table using low RPC priority\./, ); }); // query with RPC priority for batch DML command it('should use RPC priority from request options for batch DML command', async () => { const output = execSync( - `${rpcPriorityBatchDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityBatchDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully executed 2 SQL statements using Batch DML using low RPC priority\./ + /Successfully executed 2 SQL statements using Batch DML using low RPC priority\./, ); }); // query with RPC priority for partitioned DML command it('should use RPC priority from request options for partitioned DML command', async () => { const output = execSync( - `${rpcPriorityPartitionedDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityPartitionedDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully updated (\\d+) records using low RPC priority.') + new RegExp('Successfully updated (\\d+) records using low RPC priority.'), ); }); // query with RPC priority for Query partitions command it('should use RPC priority from request options for Query partition command', async () => { const output = execSync( - `${rpcPriorityQueryPartitionsCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityQueryPartitionsCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully created \d query partitions using low RPC priority\./ + /Successfully created \d query partitions using low RPC priority\./, ); assert.match(output, /Successfully received \d from executed partitions\./); }); @@ -630,7 +630,7 @@ describe('Spanner', () => { // read_only_transactioni it('should read an example table using transactions', async () => { const output = execSync( - `${transactionCmd} readOnly ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${transactionCmd} readOnly ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); assert.match(output, /Successfully executed read-only transaction\./); @@ -639,16 +639,16 @@ describe('Spanner', () => { // read_write_transaction it('should read from and write to an example table using transactions', async () => { let output = execSync( - `${transactionCmd} readWrite ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${transactionCmd} readWrite ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /The first album's marketing budget: 100000/); assert.match(output, /The second album's marketing budget: 500000/); assert.match( output, - /Successfully executed read-write transaction to transfer 200000 from Album 2 to Album 1./ + /Successfully executed read-write transaction to transfer 200000 from Album 2 to Album 1./, ); output = execSync( - `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, MarketingBudget: 300000/); assert.match(output, /SingerId: 2, AlbumId: 2, MarketingBudget: 300000/); @@ -657,7 +657,7 @@ describe('Spanner', () => { // batch_client it('should create and execute query partitions', async () => { const output = execSync( - `${batchCmd} create-and-execute-query-partitions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${batchCmd} create-and-execute-query-partitions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully created \d query partitions\./); assert.match(output, /Successfully received \d from executed partitions\./); @@ -675,7 +675,7 @@ describe('Spanner', () => { const partition = JSON.stringify(partitions[0]); const output = execSync( - `${batchCmd} execute-partition ${INSTANCE_ID} ${DATABASE_ID} '${identifier}' '${partition}' ${PROJECT_ID}` + `${batchCmd} execute-partition ${INSTANCE_ID} ${DATABASE_ID} '${identifier}' '${partition}' ${PROJECT_ID}`, ); assert.match(output, /Successfully received \d from executed partition\./); await transaction.close(); @@ -684,19 +684,19 @@ describe('Spanner', () => { // add_timestamp_column it('should add a timestamp column to a table', async () => { const output = execSync( - `${timestampCmd} addTimestampColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} addTimestampColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Waiting for operation to complete\.\.\./); assert.match( output, - /Added LastUpdateTime as a commit timestamp column in Albums table\./ + /Added LastUpdateTime as a commit timestamp column in Albums table\./, ); }); // update_data_with_timestamp_column it('should update existing rows in an example table with commit timestamp column', async () => { const output = execSync( - `${timestampCmd} updateWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} updateWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Updated data\./); }); @@ -704,38 +704,38 @@ describe('Spanner', () => { // query_data_with_timestamp_column it('should query an example table with an additional timestamp column and return matching rows', async () => { const output = execSync( - `${timestampCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /SingerId: 1, AlbumId: 1, MarketingBudget: 1000000, LastUpdateTime:/ + /SingerId: 1, AlbumId: 1, MarketingBudget: 1000000, LastUpdateTime:/, ); assert.match( output, - /SingerId: 2, AlbumId: 2, MarketingBudget: 750000, LastUpdateTime:/ + /SingerId: 2, AlbumId: 2, MarketingBudget: 750000, LastUpdateTime:/, ); }); // create_table_with_timestamp_column it('should create an example table with a timestamp column', async () => { const output = execSync( - `${timestampCmd} createTableWithTimestamp "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${timestampCmd} createTableWithTimestamp "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`), ); assert.match( output, - new RegExp(`Created table Performances in database ${DATABASE_ID}.`) + new RegExp(`Created table Performances in database ${DATABASE_ID}.`), ); }); // insert_data_with_timestamp it('should insert rows into an example table with timestamp column', async () => { const output = execSync( - `${timestampCmd} insertWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} insertWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Inserted data\./); }); @@ -743,7 +743,7 @@ describe('Spanner', () => { // query_new_table_with_timestamp it('should query an example table with a non-null timestamp column and return matching rows', async () => { const output = execSync( - `${timestampCmd} queryTableWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} queryTableWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, VenueId: 4, EventDate:/); assert.match(output, /Revenue: 15000, LastUpdateTime:/); @@ -752,7 +752,7 @@ describe('Spanner', () => { // write_data_for_struct_queries it('should insert rows into an example table for use with struct query examples', async () => { const output = execSync( - `${structCmd} writeDataForStructQueries ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${structCmd} writeDataForStructQueries ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Inserted data\./); }); @@ -760,7 +760,7 @@ describe('Spanner', () => { // query_with_struct_param it('should query an example table with a STRUCT param', async () => { const output = execSync( - `${structCmd} queryDataWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${structCmd} queryDataWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 6/); }); @@ -768,7 +768,7 @@ describe('Spanner', () => { // query_with_array_of_struct_param it('should query an example table with an array of STRUCT param', async () => { const output = execSync( - `${structCmd} queryWithArrayOfStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${structCmd} queryWithArrayOfStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 6\nSingerId: 7\nSingerId: 8/); }); @@ -776,7 +776,7 @@ describe('Spanner', () => { // query_with_struct_field_param it('should query an example table with a STRUCT field param', async () => { const output = execSync( - `${structCmd} queryStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${structCmd} queryStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 6/); }); @@ -784,29 +784,29 @@ describe('Spanner', () => { // query_with_nested_struct_param it('should query an example table with a nested STRUCT param', async () => { const output = execSync( - `${structCmd} queryNestedStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${structCmd} queryNestedStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /SingerId: 6, SongName: Imagination\nSingerId: 9, SongName: Imagination/ + /SingerId: 6, SongName: Imagination\nSingerId: 9, SongName: Imagination/, ); }); // dml_standard_insert it('should insert rows into an example table using a DML statement', async () => { const output = execSync( - `${dmlCmd} insertUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} insertUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully inserted 1 record into the Singers table/ + /Successfully inserted 1 record into the Singers table/, ); }); // dml_standard_update it('should update a row in an example table using a DML statement', async () => { const output = execSync( - `${dmlCmd} updateUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} updateUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully updated 1 record/); }); @@ -814,7 +814,7 @@ describe('Spanner', () => { // dml_standard_delete it('should delete a row from an example table using a DML statement', async () => { const output = execSync( - `${dmlCmd} deleteUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} deleteUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully deleted 1 record\./); }); @@ -822,7 +822,7 @@ describe('Spanner', () => { // dml_standard_update_with_timestamp it('should update the timestamp of multiple records in an example table using a DML statement', async () => { const output = execSync( - `${dmlCmd} updateUsingDmlWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} updateUsingDmlWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully updated 3 records/); }); @@ -830,7 +830,7 @@ describe('Spanner', () => { // dml_write_then_read it('should insert a record in an example table using a DML statement and then query the record', async () => { const output = execSync( - `${dmlCmd} writeAndReadUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} writeAndReadUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Timothy Campbell/); }); @@ -838,7 +838,7 @@ describe('Spanner', () => { // dml_structs it('should update a record in an example table using a DML statement along with a struct value', async () => { const output = execSync( - `${dmlCmd} updateUsingDmlWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} updateUsingDmlWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully updated 1 record/); }); @@ -846,7 +846,7 @@ describe('Spanner', () => { // dml_getting_started_insert it('should insert multiple records into an example table using a DML statement', async () => { const output = execSync( - `${dmlCmd} writeUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} writeUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /4 records inserted/); }); @@ -854,7 +854,7 @@ describe('Spanner', () => { // dml_query_with_parameter it('should use a parameter query to query record that was inserted using a DML statement', async () => { const output = execSync( - `${dmlCmd} queryWithParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} queryWithParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 12, FirstName: Melissa, LastName: Garcia/); }); @@ -862,18 +862,18 @@ describe('Spanner', () => { // dml_getting_started_update it('should transfer value from one record to another using DML statements within a transaction', async () => { const output = execSync( - `${dmlCmd} writeWithTransactionUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} writeWithTransactionUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully executed read-write transaction using DML to transfer 200000 from Album 2 to Album 1/ + /Successfully executed read-write transaction using DML to transfer 200000 from Album 2 to Album 1/, ); }); // dml_partitioned_update it('should update multiple records using a partitioned DML statement', async () => { const output = execSync( - `${dmlCmd} updateUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} updateUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully updated 3 records/); }); @@ -881,7 +881,7 @@ describe('Spanner', () => { // dml_partitioned_delete it('should delete multiple records using a partitioned DML statement', async () => { const output = execSync( - `${dmlCmd} deleteUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} deleteUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully deleted 6 records/); }); @@ -889,22 +889,22 @@ describe('Spanner', () => { // dml_batch_update it('should insert and update records using Batch DML', async () => { const output = execSync( - `${dmlCmd} updateUsingBatchDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} updateUsingBatchDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully executed 2 SQL statements using Batch DML/ + /Successfully executed 2 SQL statements using Batch DML/, ); }); // dml_returning_insert it('should insert records using DML Returning', async () => { const output = execSync( - `node dml-returning-insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node dml-returning-insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully inserted 1 record into the Singers table') + new RegExp('Successfully inserted 1 record into the Singers table'), ); assert.match(output, new RegExp('Virginia Watson')); }); @@ -912,11 +912,11 @@ describe('Spanner', () => { // dml_returning_update it('should update records using DML Returning', async () => { const output = execSync( - `node dml-returning-update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node dml-returning-update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully updated 1 record into the Albums table') + new RegExp('Successfully updated 1 record into the Albums table'), ); assert.match(output, new RegExp('2000000')); }); @@ -924,11 +924,11 @@ describe('Spanner', () => { // dml_returning_delete it('should delete records using DML Returning', async () => { const output = execSync( - `node dml-returning-delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node dml-returning-delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully deleted 1 record from the Singers table') + new RegExp('Successfully deleted 1 record from the Singers table'), ); assert.match(output, new RegExp('Virginia Watson')); }); @@ -936,23 +936,23 @@ describe('Spanner', () => { // create_table_with_datatypes it('should create Venues example table with supported datatype columns', async () => { const output = execSync( - `${datatypesCmd} createVenuesTable "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${datatypesCmd} createVenuesTable "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`), ); assert.match( output, - new RegExp(`Created table Venues in database ${DATABASE_ID}.`) + new RegExp(`Created table Venues in database ${DATABASE_ID}.`), ); }); // insert_datatypes_data it('should insert multiple records into Venues example table', async () => { const output = execSync( - `${datatypesCmd} insertData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} insertData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Inserted data./); }); @@ -960,33 +960,33 @@ describe('Spanner', () => { // query_with_array_parameter it('should use an ARRAY query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithArray ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithArray ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /VenueId: 19, VenueName: Venue 19, AvailableDate: 2020-11-01/ + /VenueId: 19, VenueName: Venue 19, AvailableDate: 2020-11-01/, ); assert.match( output, - /VenueId: 42, VenueName: Venue 42, AvailableDate: 2020-10-01/ + /VenueId: 42, VenueName: Venue 42, AvailableDate: 2020-10-01/, ); }); // query_with_bool_parameter it('should use a BOOL query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithBool ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithBool ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /VenueId: 19, VenueName: Venue 19, OutdoorVenue: true/ + /VenueId: 19, VenueName: Venue 19, OutdoorVenue: true/, ); }); // query_with_bytes_parameter it('should use a BYTES query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithBytes ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithBytes ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 4, VenueName: Venue 4/); }); @@ -994,37 +994,37 @@ describe('Spanner', () => { // query_with_date_parameter it('should use a DATE query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithDate ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithDate ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /VenueId: 4, VenueName: Venue 4, LastContactDate: 2018-09-02/ + /VenueId: 4, VenueName: Venue 4, LastContactDate: 2018-09-02/, ); assert.match( output, - /VenueId: 42, VenueName: Venue 42, LastContactDate: 2018-10-01/ + /VenueId: 42, VenueName: Venue 42, LastContactDate: 2018-10-01/, ); }); // query_with_float_parameter it('should use a FLOAT64 query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithFloat ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithFloat ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /VenueId: 4, VenueName: Venue 4, PopularityScore: 0.8/ + /VenueId: 4, VenueName: Venue 4, PopularityScore: 0.8/, ); assert.match( output, - /VenueId: 19, VenueName: Venue 19, PopularityScore: 0.9/ + /VenueId: 19, VenueName: Venue 19, PopularityScore: 0.9/, ); }); // query_with_int_parameter it('should use a INT64 query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithInt ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithInt ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 19, VenueName: Venue 19, Capacity: 6300/); assert.match(output, /VenueId: 42, VenueName: Venue 42, Capacity: 3000/); @@ -1033,7 +1033,7 @@ describe('Spanner', () => { // query_with_string_parameter it('should use a STRING query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithString ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithString ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 42, VenueName: Venue 42/); }); @@ -1041,7 +1041,7 @@ describe('Spanner', () => { // query_with_timestamp_parameter it('should use a TIMESTAMP query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 4, VenueName: Venue 4, LastUpdateTime:/); assert.match(output, /VenueId: 19, VenueName: Venue 19, LastUpdateTime:/); @@ -1051,23 +1051,23 @@ describe('Spanner', () => { // add_numeric_column it('should add a Revenue column to Venues example table', async () => { const output = execSync( - `${datatypesCmd} addNumericColumn "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${datatypesCmd} addNumericColumn "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.include( output, - `Waiting for operation on ${DATABASE_ID} to complete...` + `Waiting for operation on ${DATABASE_ID} to complete...`, ); assert.include( output, - `Added Revenue column to Venues table in database ${DATABASE_ID}.` + `Added Revenue column to Venues table in database ${DATABASE_ID}.`, ); }); // update_data_with_numeric it('should update rows in Venues example table to add data in Revenue column', async () => { const output = execSync( - `${datatypesCmd} updateWithNumericData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} updateWithNumericData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Updated data./); }); @@ -1075,7 +1075,7 @@ describe('Spanner', () => { // query_with_numeric_parameter it('should use a NUMERIC query parameter to query records from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithNumericParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithNumericParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 4, Revenue: 35000/); }); @@ -1083,7 +1083,7 @@ describe('Spanner', () => { // query with request tag it('should execute a query with a request tag', async () => { const output = execSync( - `${requestTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${requestTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -1091,7 +1091,7 @@ describe('Spanner', () => { // read_write_transaction with transaction tag it('should execute a read/write transaction with a transaction tag', async () => { const output = execSync( - `${transactionTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${transactionTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.include(output, 'Inserted new outdoor venue'); }); @@ -1099,23 +1099,23 @@ describe('Spanner', () => { // add_json_column it('should add a VenueDetails column to Venues example table', async () => { const output = execSync( - `${datatypesCmd} addJsonColumn "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${datatypesCmd} addJsonColumn "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.include( output, - `Waiting for operation on ${DATABASE_ID} to complete...` + `Waiting for operation on ${DATABASE_ID} to complete...`, ); assert.include( output, - `Added VenueDetails column to Venues table in database ${DATABASE_ID}.` + `Added VenueDetails column to Venues table in database ${DATABASE_ID}.`, ); }); // update_data_with_json it('should update rows in Venues example table to add data in VenueDetails column', async () => { const output = execSync( - `${datatypesCmd} updateWithJsonData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} updateWithJsonData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Updated data./); }); @@ -1123,7 +1123,7 @@ describe('Spanner', () => { // query_with_json_parameter it('should use a JSON query parameter to query records from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithJsonParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithJsonParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 19, Details: {"open":true,"rating":9}/); }); @@ -1131,40 +1131,40 @@ describe('Spanner', () => { // add_and_drop_new_database_role it('should add and drop new database roles', async () => { const output = execSync( - `node archived/add-and-drop-new-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node archived/add-and-drop-new-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Waiting for operation to complete...')); assert.match( output, - new RegExp('Created roles child and parent and granted privileges') + new RegExp('Created roles child and parent and granted privileges'), ); assert.match( output, - new RegExp('Revoked privileges and dropped role child') + new RegExp('Revoked privileges and dropped role child'), ); }); // read_data_with_database_role it('should read data with database role', async () => { const output = execSync( - `node read-data-with-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node read-data-with-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('SingerId: 1, FirstName: Marc, LastName: Richards') + new RegExp('SingerId: 1, FirstName: Marc, LastName: Richards'), ); }); // get_database_roles it('should list database roles', async () => { const output = execSync( - `node archived/get-database-roles.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node archived/get-database-roles.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Role: projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/databaseRoles/public` - ) + `Role: projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/databaseRoles/public`, + ), ); }); @@ -1179,7 +1179,7 @@ describe('Spanner', () => { const versionTime = rows[0].toJSON().Timestamp.toISOString(); const output = execSync( - `${backupsCmd} createBackup ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID} ${versionTime}` + `${backupsCmd} createBackup ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID} ${versionTime}`, ); assert.match(output, new RegExp(`Backup (.+)${BACKUP_ID} of size`)); }); @@ -1189,11 +1189,11 @@ describe('Spanner', () => { const key = await getCryptoKey(); const output = execSync( - `${backupsCmd} createBackupWithEncryptionKey ${INSTANCE_ID} ${DATABASE_ID} ${ENCRYPTED_BACKUP_ID} ${PROJECT_ID} ${key.name}` + `${backupsCmd} createBackupWithEncryptionKey ${INSTANCE_ID} ${DATABASE_ID} ${ENCRYPTED_BACKUP_ID} ${PROJECT_ID} ${key.name}`, ); assert.match( output, - new RegExp(`Backup (.+)${ENCRYPTED_BACKUP_ID} of size`) + new RegExp(`Backup (.+)${ENCRYPTED_BACKUP_ID} of size`), ); assert.include(output, `using encryption key ${key.name}`); }); @@ -1202,18 +1202,18 @@ describe('Spanner', () => { it('should create a copy of a backup', async () => { const sourceBackupPath = `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/backups/${BACKUP_ID}`; const output = execSync( - `node archived/backups-copy.js ${INSTANCE_ID} ${COPY_BACKUP_ID} ${sourceBackupPath} ${PROJECT_ID}` + `node archived/backups-copy.js ${INSTANCE_ID} ${COPY_BACKUP_ID} ${sourceBackupPath} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`(.*)Backup copy(.*)${COPY_BACKUP_ID} of size(.*)`) + new RegExp(`(.*)Backup copy(.*)${COPY_BACKUP_ID} of size(.*)`), ); }); // cancel_backup it('should cancel a backup of the database', async () => { const output = execSync( - `${backupsCmd} cancelBackup ${INSTANCE_ID} ${DATABASE_ID} ${CANCELLED_BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} cancelBackup ${INSTANCE_ID} ${DATABASE_ID} ${CANCELLED_BACKUP_ID} ${PROJECT_ID}`, ); assert.match(output, /Backup cancelled./); }); @@ -1221,7 +1221,7 @@ describe('Spanner', () => { // get_backups it('should list backups in the instance', async () => { const output = execSync( - `${backupsCmd} getBackups ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} getBackups ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}`, ); assert.include(output, 'All backups:'); assert.include(output, 'Backups matching backup name:'); @@ -1237,24 +1237,24 @@ describe('Spanner', () => { // list_backup_operations it('should list backup operations in the instance', async () => { const output = execSync( - `${backupsCmd} getBackupOperations ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} getBackupOperations ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}`, ); assert.match(output, /Create Backup Operations:/); assert.match( output, - new RegExp(`Backup (.+)${BACKUP_ID} (.+) is 100% complete`) + new RegExp(`Backup (.+)${BACKUP_ID} (.+) is 100% complete`), ); assert.match(output, /Copy Backup Operations:/); assert.match( output, - new RegExp(`Backup (.+)${COPY_BACKUP_ID} (.+) is 100% complete`) + new RegExp(`Backup (.+)${COPY_BACKUP_ID} (.+) is 100% complete`), ); }); // update_backup_expire_time it('should update the expire time of a backup', async () => { const output = execSync( - `${backupsCmd} updateBackup ${INSTANCE_ID} ${BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} updateBackup ${INSTANCE_ID} ${BACKUP_ID} ${PROJECT_ID}`, ); assert.match(output, /Expire time updated./); }); @@ -1268,15 +1268,15 @@ describe('Spanner', () => { await delay(this.test); const output = execSync( - `${backupsCmd} restoreBackup ${INSTANCE_ID} ${RESTORE_DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} restoreBackup ${INSTANCE_ID} ${RESTORE_DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}`, ); assert.match(output, /Database restored from backup./); assert.match( output, new RegExp( `Database (.+) was restored to ${RESTORE_DATABASE_ID} from backup ` + - `(.+)${BACKUP_ID} with version time (.+)` - ) + `(.+)${BACKUP_ID} with version time (.+)`, + ), ); }); @@ -1291,30 +1291,30 @@ describe('Spanner', () => { const key = await getCryptoKey(); const output = execSync( - `${backupsCmd} restoreBackupWithEncryptionKey ${INSTANCE_ID} ${ENCRYPTED_RESTORE_DATABASE_ID} ${ENCRYPTED_BACKUP_ID} ${PROJECT_ID} ${key.name}` + `${backupsCmd} restoreBackupWithEncryptionKey ${INSTANCE_ID} ${ENCRYPTED_RESTORE_DATABASE_ID} ${ENCRYPTED_BACKUP_ID} ${PROJECT_ID} ${key.name}`, ); assert.match(output, /Database restored from backup./); assert.match( output, new RegExp( `Database (.+) was restored to ${ENCRYPTED_RESTORE_DATABASE_ID} from backup ` + - `(.+)${ENCRYPTED_BACKUP_ID} using encryption key ${key.name}` - ) + `(.+)${ENCRYPTED_BACKUP_ID} using encryption key ${key.name}`, + ), ); }); // list_database_operations it('should list database operations in the instance', async () => { const output = execSync( - `${backupsCmd} getDatabaseOperations ${INSTANCE_ID} ${PROJECT_ID}` + `${backupsCmd} getDatabaseOperations ${INSTANCE_ID} ${PROJECT_ID}`, ); assert.match(output, /Optimize Database Operations:/); assert.match( output, new RegExp( `Database (.+)${RESTORE_DATABASE_ID} restored from backup is (\\d+)% ` + - 'optimized' - ) + 'optimized', + ), ); }); @@ -1332,7 +1332,7 @@ describe('Spanner', () => { } const output = execSync( - `${backupsCmd} deleteBackup ${INSTANCE_ID} ${RESTORE_DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} deleteBackup ${INSTANCE_ID} ${RESTORE_DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}`, ); assert.match(output, /Backup deleted./); }); @@ -1340,7 +1340,7 @@ describe('Spanner', () => { // custom_timeout_and_retry it('should insert with custom timeout and retry settings', async () => { const output = execSync( - `${dmlCmd} insertWithCustomTimeoutAndRetrySettings ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} insertWithCustomTimeoutAndRetrySettings ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /record inserted./); }); @@ -1348,7 +1348,7 @@ describe('Spanner', () => { // get_commit_stats it('should update rows in Albums example table and return CommitStats', async () => { const output = execSync( - `${crudCmd} getCommitStats ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} getCommitStats ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Updated data with (\\d+) mutations')); }); @@ -1356,19 +1356,19 @@ describe('Spanner', () => { // create_database_with_version_retention_period it('should create a database with a version retention period', async () => { const output = execSync( - `${schemaCmd} createDatabaseWithVersionRetentionPeriod "${INSTANCE_ID}" "${VERSION_RETENTION_DATABASE_ID}" ${PROJECT_ID}` + `${schemaCmd} createDatabaseWithVersionRetentionPeriod "${INSTANCE_ID}" "${VERSION_RETENTION_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for operation on ${VERSION_RETENTION_DATABASE_ID} to complete...` - ) + `Waiting for operation on ${VERSION_RETENTION_DATABASE_ID} to complete...`, + ), ); assert.match( output, new RegExp( - `Created database ${VERSION_RETENTION_DATABASE_ID} with version retention period.` - ) + `Created database ${VERSION_RETENTION_DATABASE_ID} with version retention period.`, + ), ); assert.include(output, 'Version retention period: 1d'); assert.include(output, 'Earliest version time:'); @@ -1376,47 +1376,49 @@ describe('Spanner', () => { it('should create a table with foreign key delete cascade', async () => { const output = execSync( - `${createTableWithForeignKeyDeleteCascadeCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${createTableWithForeignKeyDeleteCascadeCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`), ); assert.match( output, new RegExp( - 'Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId' - ) + 'Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId', + ), ); }); it('should alter a table with foreign key delete cascade', async () => { const output = execSync( - `${alterTableWithForeignKeyDeleteCascadeCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${alterTableWithForeignKeyDeleteCascadeCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`), ); assert.match( output, - new RegExp('Altered ShoppingCarts table with FKShoppingCartsCustomerName') + new RegExp( + 'Altered ShoppingCarts table with FKShoppingCartsCustomerName', + ), ); }); it('should drop a foreign key constraint delete cascade', async () => { const output = execSync( - `${dropForeignKeyConstraintDeleteCascaseCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${dropForeignKeyConstraintDeleteCascaseCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`), ); assert.match( output, new RegExp( - 'Altered ShoppingCarts table to drop FKShoppingCartsCustomerName' - ) + 'Altered ShoppingCarts table to drop FKShoppingCartsCustomerName', + ), ); }); @@ -1438,45 +1440,45 @@ describe('Spanner', () => { // create_sequence it('should create a sequence', async () => { const output = execSync( - `node archived/sequence-create.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` + `node archived/sequence-create.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Created Seq sequence and Customers table') + new RegExp('Created Seq sequence and Customers table'), ); assert.match( output, - new RegExp('Number of customer records inserted is: 3') + new RegExp('Number of customer records inserted is: 3'), ); }); // alter_sequence it('should alter a sequence', async () => { const output = execSync( - `node archived/sequence-alter.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` + `node archived/sequence-alter.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' - ) + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.', + ), ); assert.match( output, - new RegExp('Number of customer records inserted is: 3') + new RegExp('Number of customer records inserted is: 3'), ); }); // drop_sequence it('should drop a sequence', async () => { const output = execSync( - `node archived/sequence-drop.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` + `node archived/sequence-drop.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' - ) + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.', + ), ); }); }); @@ -1504,78 +1506,78 @@ describe('Spanner', () => { // create_instance_config it('should create an example custom instance config', async () => { const output = execSync( - `node archived/instance-config-create.js ${SAMPLE_INSTANCE_CONFIG_ID} ${BASE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` + `node archived/instance-config-create.js ${SAMPLE_INSTANCE_CONFIG_ID} ${BASE_INSTANCE_CONFIG_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for create operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...` - ) + `Waiting for create operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...`, + ), ); assert.match( output, - new RegExp(`Created instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) + new RegExp(`Created instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`), ); }); // update_instance_config it('should update an example custom instance config', async () => { const output = execSync( - `node archived/instance-config-update.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` + `node archived/instance-config-update.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for update operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...` - ) + `Waiting for update operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...`, + ), ); assert.match( output, - new RegExp(`Updated instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) + new RegExp(`Updated instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`), ); }); // delete_instance_config it('should delete an example custom instance config', async () => { const output = execSync( - `node archived/instance-config-delete.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` + `node archived/instance-config-delete.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Deleting ${SAMPLE_INSTANCE_CONFIG_ID}...`) + new RegExp(`Deleting ${SAMPLE_INSTANCE_CONFIG_ID}...`), ); assert.match( output, - new RegExp(`Deleted instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) + new RegExp(`Deleted instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`), ); }); // list_instance_config_operations it('should list all instance config operations', async () => { const output = execSync( - `node archived/instance-config-get-operations.js ${PROJECT_ID}` + `node archived/instance-config-get-operations.js ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Available instance config operations for project ${PROJECT_ID}:` - ) + `Available instance config operations for project ${PROJECT_ID}:`, + ), ); assert.include(output, 'Instance config operation for'); assert.include( output, - 'type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata' + 'type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata', ); }); // list_instance_configs it('should list available instance configs', async () => { const output = execSync( - `node archived/list-instance-configs.js ${PROJECT_ID}` + `node archived/list-instance-configs.js ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Available instance configs for project ${PROJECT_ID}:`) + new RegExp(`Available instance configs for project ${PROJECT_ID}:`), ); assert.include(output, 'Available leader options for instance config'); }); @@ -1584,7 +1586,7 @@ describe('Spanner', () => { // TODO: Enable when the feature has been released. it.skip('should get a specific instance config', async () => { const output = execSync( - `node archived/get-instance-config.js ${PROJECT_ID}` + `node archived/get-instance-config.js ${PROJECT_ID}`, ); assert.include(output, 'Available leader options for instance config'); }); @@ -1592,51 +1594,51 @@ describe('Spanner', () => { // create_database_with_default_leader it('should create a database with a default leader', async () => { const output = execSync( - `node archived/database-create-with-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER}" ${PROJECT_ID}` + `node archived/database-create-with-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for creation of ${DEFAULT_LEADER_DATABASE_ID} to complete...` - ) + `Waiting for creation of ${DEFAULT_LEADER_DATABASE_ID} to complete...`, + ), ); assert.match( output, new RegExp( - `Created database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER}.` - ) + `Created database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER}.`, + ), ); }); // update_database_with_default_leader it('should update a database with a default leader', async () => { const output = execSync( - `node archived/database-update-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER_2}" ${PROJECT_ID}` + `node archived/database-update-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER_2}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for updating of ${DEFAULT_LEADER_DATABASE_ID} to complete...` - ) + `Waiting for updating of ${DEFAULT_LEADER_DATABASE_ID} to complete...`, + ), ); assert.match( output, new RegExp( - `Updated database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER_2}.` - ) + `Updated database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER_2}.`, + ), ); }); // list_databases it('should list databases on the instance', async () => { const output = execSync( - `node archived/list-databases.js "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `node archived/list-databases.js "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Databases for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}:` - ) + `Databases for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}:`, + ), ); assert.include(output, `(default leader = ${DEFAULT_LEADER_2}`); }); @@ -1644,13 +1646,13 @@ describe('Spanner', () => { // get_database_ddl it('should get the ddl of a database', async () => { const output = execSync( - `node archived/database-get-ddl.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}` + `node archived/database-get-ddl.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Retrieved database DDL for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}/databases/${DEFAULT_LEADER_DATABASE_ID}:` - ) + `Retrieved database DDL for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}/databases/${DEFAULT_LEADER_DATABASE_ID}:`, + ), ); assert.include(output, 'CREATE TABLE Singers'); }); @@ -1679,65 +1681,65 @@ describe('Spanner', () => { // create_pg_database it('should create an example PostgreSQL database', async () => { const output = execSync( - `node archived/pg-database-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node archived/pg-database-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`), ); assert.match( output, new RegExp( - `Created database ${PG_DATABASE_ID} on instance ${SAMPLE_INSTANCE_ID} with dialect POSTGRESQL.` - ) + `Created database ${PG_DATABASE_ID} on instance ${SAMPLE_INSTANCE_ID} with dialect POSTGRESQL.`, + ), ); }); // pg_interleaving it('should create an interleaved table hierarchy using PostgreSQL dialect', async () => { const output = execSync( - `node archived/pg-interleaving.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node archived/pg-interleaving.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`), ); assert.match( output, new RegExp( - `Created an interleaved table hierarchy in database ${PG_DATABASE_ID} using PostgreSQL dialect.` - ) + `Created an interleaved table hierarchy in database ${PG_DATABASE_ID} using PostgreSQL dialect.`, + ), ); }); // pg_dml_with_parameter it('should execute a DML statement with parameters on a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-with-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-with-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully executed 1 postgreSQL statements using DML') + new RegExp('Successfully executed 1 postgreSQL statements using DML'), ); }); // pg_dml_batch it('should execute a batch of DML statements on a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-batch.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-batch.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - 'Successfully executed 3 postgreSQL statements using Batch DML.' - ) + 'Successfully executed 3 postgreSQL statements using Batch DML.', + ), ); }); // pg_dml_partitioned it('should execute a partitioned DML on a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-partitioned.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-partitioned.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Successfully deleted 1 record.')); }); @@ -1745,42 +1747,42 @@ describe('Spanner', () => { // pg_query_with_parameters it('should execute a query with parameters on a Spanner PostgreSQL database.', async () => { const output = execSync( - `node pg-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('SingerId: 1, FirstName: Alice, LastName: Henderson') + new RegExp('SingerId: 1, FirstName: Alice, LastName: Henderson'), ); }); // pg_dml_update it('should update a table using parameterized queries on a Spanner PostgreSQL database.', async () => { const output = execSync( - `node pg-dml-getting-started-update.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-getting-started-update.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully updated 1 record in the Singers table.') + new RegExp('Successfully updated 1 record in the Singers table.'), ); }); // pg_add_column it('should add a column to a table in the Spanner PostgreSQL database.', async () => { const output = execSync( - `node archived/pg-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node archived/pg-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Added MarketingBudget column to Albums table in database ${PG_DATABASE_ID}` - ) + `Added MarketingBudget column to Albums table in database ${PG_DATABASE_ID}`, + ), ); }); //pg_create_index it('should create an index in the Spanner PostgreSQL database.', async () => { const output = execSync( - `node archived/pg-index-create-storing.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node archived/pg-index-create-storing.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Added the AlbumsByAlbumTitle index.')); }); @@ -1788,7 +1790,7 @@ describe('Spanner', () => { // pg_schema_information it('should query the information schema metadata in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-schema-information.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-schema-information.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Table: public.albums')); assert.match(output, new RegExp('Table: public.author')); @@ -1799,29 +1801,29 @@ describe('Spanner', () => { // pg_ordering_nulls it('should order nulls as per clause in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-ordering-nulls.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-ordering-nulls.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Author ORDER BY FirstName')); assert.match(output, new RegExp('Author ORDER BY FirstName DESC')); assert.match(output, new RegExp('Author ORDER BY FirstName NULLS FIRST')); assert.match( output, - new RegExp('Author ORDER BY FirstName DESC NULLS LAST') + new RegExp('Author ORDER BY FirstName DESC NULLS LAST'), ); }); // pg_numeric_data_type it('should create a table, insert and query pg numeric data', async () => { const output = execSync( - `node pg-numeric-data-type.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-numeric-data-type.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`), ); assert.match( output, - new RegExp(`Added table venues to database ${PG_DATABASE_ID}.`) + new RegExp(`Added table venues to database ${PG_DATABASE_ID}.`), ); assert.match(output, new RegExp('Inserted data.')); assert.match(output, new RegExp('VenueId: 4, Revenue: 97372.3863')); @@ -1832,24 +1834,24 @@ describe('Spanner', () => { // pg_jsonb_add_column it('should add a jsonb column to a table', async () => { const output = execSync( - `node archived/pg-jsonb-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node archived/pg-jsonb-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`), ); assert.match( output, new RegExp( - `Added jsonb column to table venues to database ${PG_DATABASE_ID}.` - ) + `Added jsonb column to table venues to database ${PG_DATABASE_ID}.`, + ), ); }); // pg_jsonb_insert_data it('should insert pg jsonb data', async () => { const output = execSync( - `node pg-jsonb-update-data.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-jsonb-update-data.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Updated data.')); }); @@ -1857,24 +1859,24 @@ describe('Spanner', () => { // pg_jsonb_query_data it('should query pg jsonb data', async () => { const output = execSync( - `node pg-jsonb-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-jsonb-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('VenueId: 19, Details: {"value":{"open":true,"rating":9}}') + new RegExp('VenueId: 19, Details: {"value":{"open":true,"rating":9}}'), ); }); // pg_case_sensitivity it('should create case sensitive table and query the information in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-case-sensitivity.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-case-sensitivity.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Created table with case sensitive names in database ${PG_DATABASE_ID} using PostgreSQL dialect.` - ) + `Created table with case sensitive names in database ${PG_DATABASE_ID} using PostgreSQL dialect.`, + ), ); assert.match(output, new RegExp('Inserted data using mutations.')); assert.match(output, new RegExp('Concerts Table Data using Mutations:')); @@ -1885,7 +1887,7 @@ describe('Spanner', () => { // pg_datatypes_casting it('should use cast operator to cast from one data type to another in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-datatypes-casting.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-datatypes-casting.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Data types after casting')); }); @@ -1893,7 +1895,7 @@ describe('Spanner', () => { // pg_functions it('should call a server side function on a Spanner PostgreSQL database.', async () => { const output = execSync( - `node pg-functions.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-functions.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('1284352323 seconds after epoch is')); }); @@ -1901,11 +1903,11 @@ describe('Spanner', () => { // pg_dml_returning_insert it('should insert records using DML Returning in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-returning-insert ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-returning-insert ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully inserted 1 record into the Singers table') + new RegExp('Successfully inserted 1 record into the Singers table'), ); assert.match(output, new RegExp('Virginia Watson')); }); @@ -1913,11 +1915,11 @@ describe('Spanner', () => { // pg_dml_returning_update it('should update records using DML Returning in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-returning-update ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-returning-update ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully updated 1 record into the Singers table') + new RegExp('Successfully updated 1 record into the Singers table'), ); assert.match(output, new RegExp('Virginia1 Watson1')); }); @@ -1925,11 +1927,11 @@ describe('Spanner', () => { // pg_dml_returning_delete it('should delete records using DML Returning in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-returning-delete ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-returning-delete ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully deleted 1 record from the Singers table') + new RegExp('Successfully deleted 1 record from the Singers table'), ); assert.match(output, new RegExp('Virginia1 Watson1')); }); @@ -1937,65 +1939,65 @@ describe('Spanner', () => { // pg_create_sequence it('should create a sequence', async () => { const output = execSync( - `node archived/pg-sequence-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node archived/pg-sequence-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Created Seq sequence and Customers table') + new RegExp('Created Seq sequence and Customers table'), ); assert.match( output, - new RegExp('Number of customer records inserted is: 3') + new RegExp('Number of customer records inserted is: 3'), ); }); // pg_alter_sequence it('should alter a sequence', async () => { const output = execSync( - `node archived/pg-sequence-alter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node archived/pg-sequence-alter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' - ) + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.', + ), ); assert.match( output, - new RegExp('Number of customer records inserted is: 3') + new RegExp('Number of customer records inserted is: 3'), ); }); // pg_drop_sequence it('should drop a sequence', async () => { const output = execSync( - `node archived/pg-sequence-drop.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node archived/pg-sequence-drop.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' - ) + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.', + ), ); }); // directed_read_options it('should run read-only transaction with directed read options set', async () => { const output = execSync( - `node directed-reads.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node directed-reads.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); console.log(output); assert.match( output, new RegExp( - 'SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace' - ) + 'SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace', + ), ); assert.match( output, new RegExp( - 'Successfully executed read-only transaction with directedReadOptions' - ) + 'Successfully executed read-only transaction with directedReadOptions', + ), ); }); }); diff --git a/samples/system-test/spanner.test.js b/samples/system-test/spanner.test.js index 0efaa47a8..f50a0a6a9 100644 --- a/samples/system-test/spanner.test.js +++ b/samples/system-test/spanner.test.js @@ -138,8 +138,8 @@ async function deleteStaleInstances() { const limit = pLimit(5); await Promise.all( instances.map(instance => - limit(() => setTimeout(deleteInstance, delay, instance)) - ) + limit(() => setTimeout(deleteInstance, delay, instance)), + ), ); } @@ -182,7 +182,7 @@ async function getCryptoKey(key_location) { PROJECT_ID, key_location, KEY_RING_ID, - KEY_ID + KEY_ID, ); const [key] = await client.getCryptoKey({ name: keyName, @@ -228,7 +228,7 @@ describe('Autogenerated Admin Clients', () => { return operation.promise(); } else { console.log( - `Not creating temp instance, using + ${instance.formattedName_}...` + `Not creating temp instance, using + ${instance.formattedName_}...`, ); } }); @@ -269,98 +269,98 @@ describe('Autogenerated Admin Clients', () => { // create_and_update_instance it('should create and update an example instance with spanner editions', async () => { const createInstanceOutput = execSync( - `${instanceCmd} createInstance "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `${instanceCmd} createInstance "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}`, ); assert.match( createInstanceOutput, new RegExp( - `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...` - ) + `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...`, + ), ); assert.match( createInstanceOutput, - new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`) + new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`), ); const updateInstanceOutput = execSync( - `node instance-update "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `node instance-update "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}`, ); assert.match( updateInstanceOutput, new RegExp( - `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...` - ) + `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...`, + ), ); assert.match( updateInstanceOutput, - new RegExp(`Updated instance ${SAMPLE_INSTANCE_ID}.`) + new RegExp(`Updated instance ${SAMPLE_INSTANCE_ID}.`), ); assert.match( updateInstanceOutput, new RegExp( - `Instance ${SAMPLE_INSTANCE_ID} has been updated with the ENTERPRISE edition.` - ) + `Instance ${SAMPLE_INSTANCE_ID} has been updated with the ENTERPRISE edition.`, + ), ); }); // create_and_update_instance_default_backup_schedule_type it('should create an example instance without default backup schedule type and update the instance to have it', async () => { const createInstanceOutput = execSync( - `${createInstanceWithoutDefaultBackupSchedulesCommand} "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `${createInstanceWithoutDefaultBackupSchedulesCommand} "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}`, ); assert.match( createInstanceOutput, new RegExp( - `Created instance ${SAMPLE_INSTANCE_ID} without default backup schedules.` - ) + `Created instance ${SAMPLE_INSTANCE_ID} without default backup schedules.`, + ), ); const updateInstanceOutput = execSync( - `${updateInstanceDefaultBackupScheduleTypeCommand} "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `${updateInstanceDefaultBackupScheduleTypeCommand} "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}`, ); assert.match( updateInstanceOutput, new RegExp( - `Instance ${SAMPLE_INSTANCE_ID} has been updated with the AUTOMATIC default backup schedule type.` - ) + `Instance ${SAMPLE_INSTANCE_ID} has been updated with the AUTOMATIC default backup schedule type.`, + ), ); }); // create_instance_with_processing_units it('should create an example instance with processing units', async () => { const output = execSync( - `${instanceCmd} createInstanceWithProcessingUnits "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `${instanceCmd} createInstanceWithProcessingUnits "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...` - ) + `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...`, + ), ); assert.match( output, - new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`) + new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`), ); assert.match( output, - new RegExp(`Instance ${SAMPLE_INSTANCE_ID} has 500 processing units.`) + new RegExp(`Instance ${SAMPLE_INSTANCE_ID} has 500 processing units.`), ); }); // create_instance_with_autoscaling_config it('should create an example instance with autoscaling config', async () => { const output = execSync( - `node instance-with-autoscaling-config.js "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `node instance-with-autoscaling-config.js "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...` - ) + `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...`, + ), ); assert.match( output, - new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`) + new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`), ); assert.match( output, @@ -375,25 +375,25 @@ describe('Autogenerated Admin Clients', () => { '\n' + 'High priority cpu utilization percent: 65.' + '\n' + - 'Storage utilization percent: 95.' - ) + 'Storage utilization percent: 95.', + ), ); }); // create_instance_with_asymmetric_autoscaling_config it('should create an example instance with autoscaling config and asymmetric Autoscaling Options', async () => { const output = execSync( - `node instance-with-asymmetric-autoscaling-config.js "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `node instance-with-asymmetric-autoscaling-config.js "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...` - ) + `Waiting for operation on ${SAMPLE_INSTANCE_ID} to complete...`, + ), ); assert.match( output, - new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`) + new RegExp(`Created instance ${SAMPLE_INSTANCE_ID}.`), ); assert.match( output, @@ -410,8 +410,8 @@ describe('Autogenerated Admin Clients', () => { '\n' + 'Storage utilization percent: 95.' + '\n' + - 'Asymmetric Autoscaling Options: europe-west1, europe-west4, asia-east1' - ) + 'Asymmetric Autoscaling Options: europe-west1, europe-west4, asia-east1', + ), ); }); }); @@ -422,35 +422,35 @@ describe('Autogenerated Admin Clients', () => { assert.strictEqual( exists, true, - 'The main instance was not created successfully!' + 'The main instance was not created successfully!', ); }); // create_database it('should create an example database', async () => { const output = execSync( - `${schemaCmd} createDatabase "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${schemaCmd} createDatabase "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for creation of ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for creation of ${DATABASE_ID} to complete...`), ); assert.match( output, - new RegExp(`Created database ${DATABASE_ID} on instance ${INSTANCE_ID}.`) + new RegExp(`Created database ${DATABASE_ID} on instance ${INSTANCE_ID}.`), ); }); // update_database it('should set database metadata', async () => { const output = execSync( - `node database-update.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node database-update.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for update operation for ${DATABASE_ID} to complete...` - ) + `Waiting for update operation for ${DATABASE_ID} to complete...`, + ), ); assert.match(output, new RegExp(`Updated database ${DATABASE_ID}.`)); // cleanup @@ -472,23 +472,23 @@ describe('Autogenerated Admin Clients', () => { const key = await getCryptoKey(KEY_LOCATION_ID1); const output = execSync( - `${schemaCmd} createDatabaseWithEncryptionKey "${INSTANCE_ID}" "${ENCRYPTED_DATABASE_ID}" ${PROJECT_ID} "${key.name}"` + `${schemaCmd} createDatabaseWithEncryptionKey "${INSTANCE_ID}" "${ENCRYPTED_DATABASE_ID}" ${PROJECT_ID} "${key.name}"`, ); assert.match( output, new RegExp( - `Waiting for operation on ${ENCRYPTED_DATABASE_ID} to complete...` - ) + `Waiting for operation on ${ENCRYPTED_DATABASE_ID} to complete...`, + ), ); assert.match( output, new RegExp( - `Created database ${ENCRYPTED_DATABASE_ID} on instance ${INSTANCE_ID}.` - ) + `Created database ${ENCRYPTED_DATABASE_ID} on instance ${INSTANCE_ID}.`, + ), ); assert.match( output, - new RegExp(`Database encrypted with key ${key.name}.`) + new RegExp(`Database encrypted with key ${key.name}.`), ); }); }); @@ -498,7 +498,7 @@ describe('Autogenerated Admin Clients', () => { // instance and database set up at this point. it('should query a table', async () => { const output = execSync( - `node quickstart ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID}` + `node quickstart ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID}`, ); assert.match(output, /Query: \d+ found./); }); @@ -507,7 +507,7 @@ describe('Autogenerated Admin Clients', () => { // insert_data it('should insert rows into an example table', async () => { const output = execSync( - `${crudCmd} insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Inserted data\./); }); @@ -515,13 +515,13 @@ describe('Autogenerated Admin Clients', () => { // delete_data it('should delete and then insert rows in the example tables', async () => { let output = execSync( - `${crudCmd} delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.include(output, 'Deleted individual rows in Albums.'); assert.include(output, '2 records deleted from Singers.'); assert.include(output, '3 records deleted from Singers.'); output = execSync( - `${crudCmd} insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Inserted data\./); }); @@ -529,7 +529,7 @@ describe('Autogenerated Admin Clients', () => { // query_data it('should query an example table and return matching rows', async () => { const output = execSync( - `${crudCmd} query ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} query ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -537,7 +537,7 @@ describe('Autogenerated Admin Clients', () => { // read_data it('should read an example table', async () => { const output = execSync( - `${crudCmd} read ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} read ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -545,7 +545,7 @@ describe('Autogenerated Admin Clients', () => { // add_column it('should add a column to a table', async () => { const output = execSync( - `${schemaCmd} addColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${schemaCmd} addColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Waiting for operation to complete\.\.\./); assert.match(output, /Added the MarketingBudget column\./); @@ -554,7 +554,7 @@ describe('Autogenerated Admin Clients', () => { // update_data it('should update existing rows in an example table', async () => { const output = execSync( - `${crudCmd} update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Updated data\./); }); @@ -565,22 +565,22 @@ describe('Autogenerated Admin Clients', () => { // 15 seconds have elapsed since the update_data test. await new Promise(r => setTimeout(r, 16000)); const output = execSync( - `${crudCmd} read-stale ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} read-stale ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget: 100000/ + /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget: 100000/, ); assert.match( output, - /SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget: 500000/ + /SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget: 500000/, ); }); // query_data_with_new_column it('should query an example table with an additional column and return matching rows', async () => { const output = execSync( - `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, MarketingBudget: 100000/); assert.match(output, /SingerId: 2, AlbumId: 2, MarketingBudget: 500000/); @@ -589,7 +589,7 @@ describe('Autogenerated Admin Clients', () => { // create_index it('should create an index in an example table', async () => { const output = execSync( - `node index-create ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node index-create ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Waiting for operation to complete\.\.\./); assert.match(output, /Added the AlbumsByAlbumTitle index\./); @@ -602,7 +602,7 @@ describe('Autogenerated Admin Clients', () => { await delay(this.test); const output = execSync( - `node index-create-storing ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node index-create-storing ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Waiting for operation to complete\.\.\./); assert.match(output, /Added the AlbumsByAlbumTitle2 index\./); @@ -611,36 +611,36 @@ describe('Autogenerated Admin Clients', () => { // query_data_with_index it('should query an example table with an index and return matching rows', async () => { const output = execSync( - `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/ + /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/, ); assert.notMatch( output, - /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/ + /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/, ); }); it('should respect query boundaries when querying an example table with an index', async () => { const output = execSync( - `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID} "Ardvark" "Zoo"` + `node index-query-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID} "Ardvark" "Zoo"`, ); assert.match( output, - /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/ + /AlbumId: 1, AlbumTitle: Total Junk, MarketingBudget:/, ); assert.match( output, - /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/ + /AlbumId: 2, AlbumTitle: Go, Go, Go, MarketingBudget:/, ); }); // read_data_with_index it('should read an example table with an index', async () => { const output = execSync( - `node index-read-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node index-read-data ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -648,7 +648,7 @@ describe('Autogenerated Admin Clients', () => { // read_data_with_storing_index it('should read an example table with a storing index', async () => { const output = execSync( - `node index-read-data-with-storing ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node index-read-data-with-storing ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -656,48 +656,48 @@ describe('Autogenerated Admin Clients', () => { // spanner_create_client_with_query_options it('should use query options from a database reference', async () => { const output = execSync( - `${queryOptionsCmd} databaseWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${queryOptionsCmd} databaseWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ + /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/, ); }); // spanner_query_with_query_options it('should use query options on request', async () => { const output = execSync( - `${queryOptionsCmd} queryWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${queryOptionsCmd} queryWithQueryOptions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ + /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/, ); }); // query with RPC priority for run command it('should use RPC priority from request options for run command', async () => { const output = execSync( - `${rpcPriorityRunCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityRunCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully fetched \d rows using low RPC priority\./ + /Successfully fetched \d rows using low RPC priority\./, ); assert.match( output, - /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/ + /AlbumId: 2, AlbumTitle: Forever Hold your Peace, MarketingBudget:/, ); }); // query with RPC priority for Read command it('should use RPC priority from request options for read command', async () => { const output = execSync( - `${rpcPriorityReadCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityReadCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully fetched \d rows using low RPC priority\./ + /Successfully fetched \d rows using low RPC priority\./, ); assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -705,44 +705,44 @@ describe('Autogenerated Admin Clients', () => { // query with RPC priority for transaction command it('should use RPC priority from request options for transaction command', async () => { const output = execSync( - `${rpcPriorityTransactionCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityTransactionCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully inserted 1 record into the Singers table using low RPC priority\./ + /Successfully inserted 1 record into the Singers table using low RPC priority\./, ); }); // query with RPC priority for batch DML command it('should use RPC priority from request options for batch DML command', async () => { const output = execSync( - `${rpcPriorityBatchDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityBatchDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully executed 2 SQL statements using Batch DML using low RPC priority\./ + /Successfully executed 2 SQL statements using Batch DML using low RPC priority\./, ); }); // query with RPC priority for partitioned DML command it('should use RPC priority from request options for partitioned DML command', async () => { const output = execSync( - `${rpcPriorityPartitionedDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityPartitionedDMLCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully updated (\\d+) records using low RPC priority.') + new RegExp('Successfully updated (\\d+) records using low RPC priority.'), ); }); // query with RPC priority for Query partitions command it('should use RPC priority from request options for Query partition command', async () => { const output = execSync( - `${rpcPriorityQueryPartitionsCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${rpcPriorityQueryPartitionsCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully created \d query partitions using low RPC priority\./ + /Successfully created \d query partitions using low RPC priority\./, ); assert.match(output, /Successfully received \d from executed partitions\./); }); @@ -750,7 +750,7 @@ describe('Autogenerated Admin Clients', () => { // read_only_transactioni it('should read an example table using transactions', async () => { const output = execSync( - `${transactionCmd} readOnly ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${transactionCmd} readOnly ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); assert.match(output, /Successfully executed read-only transaction\./); @@ -759,16 +759,16 @@ describe('Autogenerated Admin Clients', () => { // read_write_transaction it('should read from and write to an example table using transactions', async () => { let output = execSync( - `${transactionCmd} readWrite ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${transactionCmd} readWrite ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /The first album's marketing budget: 100000/); assert.match(output, /The second album's marketing budget: 500000/); assert.match( output, - /Successfully executed read-write transaction to transfer 200000 from Album 2 to Album 1./ + /Successfully executed read-write transaction to transfer 200000 from Album 2 to Album 1./, ); output = execSync( - `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${schemaCmd} queryNewColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, MarketingBudget: 300000/); assert.match(output, /SingerId: 2, AlbumId: 2, MarketingBudget: 300000/); @@ -777,7 +777,7 @@ describe('Autogenerated Admin Clients', () => { // batch_client it('should create and execute query partitions', async () => { const output = execSync( - `${batchCmd} create-and-execute-query-partitions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${batchCmd} create-and-execute-query-partitions ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully created \d query partitions\./); assert.match(output, /Successfully received \d from executed partitions\./); @@ -795,7 +795,7 @@ describe('Autogenerated Admin Clients', () => { const partition = JSON.stringify(partitions[0]); const output = execSync( - `${batchCmd} execute-partition ${INSTANCE_ID} ${DATABASE_ID} '${identifier}' '${partition}' ${PROJECT_ID}` + `${batchCmd} execute-partition ${INSTANCE_ID} ${DATABASE_ID} '${identifier}' '${partition}' ${PROJECT_ID}`, ); assert.match(output, /Successfully received \d from executed partition\./); await transaction.close(); @@ -804,19 +804,19 @@ describe('Autogenerated Admin Clients', () => { // add_timestamp_column it('should add a timestamp column to a table', async () => { const output = execSync( - `${timestampCmd} addTimestampColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} addTimestampColumn ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Waiting for operation to complete\.\.\./); assert.match( output, - /Added LastUpdateTime as a commit timestamp column in Albums table\./ + /Added LastUpdateTime as a commit timestamp column in Albums table\./, ); }); // update_data_with_timestamp_column it('should update existing rows in an example table with commit timestamp column', async () => { const output = execSync( - `${timestampCmd} updateWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} updateWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Updated data\./); }); @@ -824,38 +824,38 @@ describe('Autogenerated Admin Clients', () => { // query_data_with_timestamp_column it('should query an example table with an additional timestamp column and return matching rows', async () => { const output = execSync( - `${timestampCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /SingerId: 1, AlbumId: 1, MarketingBudget: 1000000, LastUpdateTime:/ + /SingerId: 1, AlbumId: 1, MarketingBudget: 1000000, LastUpdateTime:/, ); assert.match( output, - /SingerId: 2, AlbumId: 2, MarketingBudget: 750000, LastUpdateTime:/ + /SingerId: 2, AlbumId: 2, MarketingBudget: 750000, LastUpdateTime:/, ); }); // create_table_with_timestamp_column it('should create an example table with a timestamp column', async () => { const output = execSync( - `${timestampCmd} createTableWithTimestamp "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${timestampCmd} createTableWithTimestamp "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`), ); assert.match( output, - new RegExp(`Created table Performances in database ${DATABASE_ID}.`) + new RegExp(`Created table Performances in database ${DATABASE_ID}.`), ); }); // insert_data_with_timestamp it('should insert rows into an example table with timestamp column', async () => { const output = execSync( - `${timestampCmd} insertWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} insertWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Inserted data\./); }); @@ -863,7 +863,7 @@ describe('Autogenerated Admin Clients', () => { // query_new_table_with_timestamp it('should query an example table with a non-null timestamp column and return matching rows', async () => { const output = execSync( - `${timestampCmd} queryTableWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${timestampCmd} queryTableWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, VenueId: 4, EventDate:/); assert.match(output, /Revenue: 15000, LastUpdateTime:/); @@ -872,7 +872,7 @@ describe('Autogenerated Admin Clients', () => { // write_data_for_struct_queries it('should insert rows into an example table for use with struct query examples', async () => { const output = execSync( - `${structCmd} writeDataForStructQueries ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${structCmd} writeDataForStructQueries ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Inserted data\./); }); @@ -880,7 +880,7 @@ describe('Autogenerated Admin Clients', () => { // query_with_struct_param it('should query an example table with a STRUCT param', async () => { const output = execSync( - `${structCmd} queryDataWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${structCmd} queryDataWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 6/); }); @@ -888,7 +888,7 @@ describe('Autogenerated Admin Clients', () => { // query_with_array_of_struct_param it('should query an example table with an array of STRUCT param', async () => { const output = execSync( - `${structCmd} queryWithArrayOfStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${structCmd} queryWithArrayOfStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 6\nSingerId: 7\nSingerId: 8/); }); @@ -896,7 +896,7 @@ describe('Autogenerated Admin Clients', () => { // query_with_struct_field_param it('should query an example table with a STRUCT field param', async () => { const output = execSync( - `${structCmd} queryStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${structCmd} queryStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 6/); }); @@ -904,29 +904,29 @@ describe('Autogenerated Admin Clients', () => { // query_with_nested_struct_param it('should query an example table with a nested STRUCT param', async () => { const output = execSync( - `${structCmd} queryNestedStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${structCmd} queryNestedStructField ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /SingerId: 6, SongName: Imagination\nSingerId: 9, SongName: Imagination/ + /SingerId: 6, SongName: Imagination\nSingerId: 9, SongName: Imagination/, ); }); // dml_standard_insert it('should insert rows into an example table using a DML statement', async () => { const output = execSync( - `${dmlCmd} insertUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} insertUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully inserted 1 record into the Singers table/ + /Successfully inserted 1 record into the Singers table/, ); }); // dml_standard_update it('should update a row in an example table using a DML statement', async () => { const output = execSync( - `${dmlCmd} updateUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} updateUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully updated 1 record/); }); @@ -934,7 +934,7 @@ describe('Autogenerated Admin Clients', () => { // dml_standard_delete it('should delete a row from an example table using a DML statement', async () => { const output = execSync( - `${dmlCmd} deleteUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} deleteUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully deleted 1 record\./); }); @@ -942,7 +942,7 @@ describe('Autogenerated Admin Clients', () => { // dml_standard_update_with_timestamp it('should update the timestamp of multiple records in an example table using a DML statement', async () => { const output = execSync( - `${dmlCmd} updateUsingDmlWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} updateUsingDmlWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully updated 3 records/); }); @@ -950,7 +950,7 @@ describe('Autogenerated Admin Clients', () => { // dml_write_then_read it('should insert a record in an example table using a DML statement and then query the record', async () => { const output = execSync( - `${dmlCmd} writeAndReadUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} writeAndReadUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Timothy Campbell/); }); @@ -958,7 +958,7 @@ describe('Autogenerated Admin Clients', () => { // dml_structs it('should update a record in an example table using a DML statement along with a struct value', async () => { const output = execSync( - `${dmlCmd} updateUsingDmlWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} updateUsingDmlWithStruct ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully updated 1 record/); }); @@ -966,7 +966,7 @@ describe('Autogenerated Admin Clients', () => { // dml_getting_started_insert it('should insert multiple records into an example table using a DML statement', async () => { const output = execSync( - `${dmlCmd} writeUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} writeUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /4 records inserted/); }); @@ -974,7 +974,7 @@ describe('Autogenerated Admin Clients', () => { // dml_query_with_parameter it('should use a parameter query to query record that was inserted using a DML statement', async () => { const output = execSync( - `${dmlCmd} queryWithParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} queryWithParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 12, FirstName: Melissa, LastName: Garcia/); }); @@ -982,18 +982,18 @@ describe('Autogenerated Admin Clients', () => { // dml_getting_started_update it('should transfer value from one record to another using DML statements within a transaction', async () => { const output = execSync( - `${dmlCmd} writeWithTransactionUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} writeWithTransactionUsingDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully executed read-write transaction using DML to transfer 200000 from Album 2 to Album 1/ + /Successfully executed read-write transaction using DML to transfer 200000 from Album 2 to Album 1/, ); }); // dml_partitioned_update it('should update multiple records using a partitioned DML statement', async () => { const output = execSync( - `${dmlCmd} updateUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} updateUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully updated 3 records/); }); @@ -1001,7 +1001,7 @@ describe('Autogenerated Admin Clients', () => { // dml_partitioned_delete it('should delete multiple records using a partitioned DML statement', async () => { const output = execSync( - `${dmlCmd} deleteUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} deleteUsingPartitionedDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Successfully deleted 6 records/); }); @@ -1009,22 +1009,22 @@ describe('Autogenerated Admin Clients', () => { // dml_batch_update it('should insert and update records using Batch DML', async () => { const output = execSync( - `${dmlCmd} updateUsingBatchDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} updateUsingBatchDml ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /Successfully executed 2 SQL statements using Batch DML/ + /Successfully executed 2 SQL statements using Batch DML/, ); }); // dml_returning_insert it('should insert records using DML Returning', async () => { const output = execSync( - `node dml-returning-insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node dml-returning-insert ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully inserted 1 record into the Singers table') + new RegExp('Successfully inserted 1 record into the Singers table'), ); assert.match(output, new RegExp('Virginia Watson')); }); @@ -1032,11 +1032,11 @@ describe('Autogenerated Admin Clients', () => { // dml_returning_update it('should update records using DML Returning', async () => { const output = execSync( - `node dml-returning-update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node dml-returning-update ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully updated 1 record into the Albums table') + new RegExp('Successfully updated 1 record into the Albums table'), ); assert.match(output, new RegExp('2000000')); }); @@ -1044,11 +1044,11 @@ describe('Autogenerated Admin Clients', () => { // dml_returning_delete it('should delete records using DML Returning', async () => { const output = execSync( - `node dml-returning-delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node dml-returning-delete ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully deleted 1 record from the Singers table') + new RegExp('Successfully deleted 1 record from the Singers table'), ); assert.match(output, new RegExp('Virginia Watson')); }); @@ -1056,7 +1056,7 @@ describe('Autogenerated Admin Clients', () => { // batch_write it('should perform CRUD operations using batch write', async () => { const output = execSync( - `${batchWriteCmd} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${batchWriteCmd} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ).toString(); const successRegex = @@ -1077,23 +1077,23 @@ describe('Autogenerated Admin Clients', () => { // create_table_with_datatypes it('should create Venues example table with supported datatype columns', async () => { const output = execSync( - `${datatypesCmd} createVenuesTable "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${datatypesCmd} createVenuesTable "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`), ); assert.match( output, - new RegExp(`Created table Venues in database ${DATABASE_ID}.`) + new RegExp(`Created table Venues in database ${DATABASE_ID}.`), ); }); // insert_datatypes_data it('should insert multiple records into Venues example table', async () => { const output = execSync( - `${datatypesCmd} insertData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} insertData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Inserted data./); }); @@ -1101,33 +1101,33 @@ describe('Autogenerated Admin Clients', () => { // query_with_array_parameter it('should use an ARRAY query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithArray ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithArray ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /VenueId: 19, VenueName: Venue 19, AvailableDate: 2020-11-01/ + /VenueId: 19, VenueName: Venue 19, AvailableDate: 2020-11-01/, ); assert.match( output, - /VenueId: 42, VenueName: Venue 42, AvailableDate: 2020-10-01/ + /VenueId: 42, VenueName: Venue 42, AvailableDate: 2020-10-01/, ); }); // query_with_bool_parameter it('should use a BOOL query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithBool ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithBool ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /VenueId: 19, VenueName: Venue 19, OutdoorVenue: true/ + /VenueId: 19, VenueName: Venue 19, OutdoorVenue: true/, ); }); // query_with_bytes_parameter it('should use a BYTES query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithBytes ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithBytes ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 4, VenueName: Venue 4/); }); @@ -1135,37 +1135,37 @@ describe('Autogenerated Admin Clients', () => { // query_with_date_parameter it('should use a DATE query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithDate ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithDate ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /VenueId: 4, VenueName: Venue 4, LastContactDate: 2018-09-02/ + /VenueId: 4, VenueName: Venue 4, LastContactDate: 2018-09-02/, ); assert.match( output, - /VenueId: 42, VenueName: Venue 42, LastContactDate: 2018-10-01/ + /VenueId: 42, VenueName: Venue 42, LastContactDate: 2018-10-01/, ); }); // query_with_float_parameter it('should use a FLOAT64 query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithFloat ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithFloat ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - /VenueId: 4, VenueName: Venue 4, PopularityScore: 0.8/ + /VenueId: 4, VenueName: Venue 4, PopularityScore: 0.8/, ); assert.match( output, - /VenueId: 19, VenueName: Venue 19, PopularityScore: 0.9/ + /VenueId: 19, VenueName: Venue 19, PopularityScore: 0.9/, ); }); // query_with_int_parameter it('should use a INT64 query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithInt ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithInt ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 19, VenueName: Venue 19, Capacity: 6300/); assert.match(output, /VenueId: 42, VenueName: Venue 42, Capacity: 3000/); @@ -1174,7 +1174,7 @@ describe('Autogenerated Admin Clients', () => { // query_with_string_parameter it('should use a STRING query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithString ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithString ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 42, VenueName: Venue 42/); }); @@ -1182,7 +1182,7 @@ describe('Autogenerated Admin Clients', () => { // query_with_timestamp_parameter it('should use a TIMESTAMP query parameter to query record from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithTimestamp ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 4, VenueName: Venue 4, LastUpdateTime:/); assert.match(output, /VenueId: 19, VenueName: Venue 19, LastUpdateTime:/); @@ -1192,23 +1192,23 @@ describe('Autogenerated Admin Clients', () => { // add_numeric_column it('should add a Revenue column to Venues example table', async () => { const output = execSync( - `${datatypesCmd} addNumericColumn "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${datatypesCmd} addNumericColumn "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.include( output, - `Waiting for operation on ${DATABASE_ID} to complete...` + `Waiting for operation on ${DATABASE_ID} to complete...`, ); assert.include( output, - `Added Revenue column to Venues table in database ${DATABASE_ID}.` + `Added Revenue column to Venues table in database ${DATABASE_ID}.`, ); }); // update_data_with_numeric it('should update rows in Venues example table to add data in Revenue column', async () => { const output = execSync( - `${datatypesCmd} updateWithNumericData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} updateWithNumericData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Updated data./); }); @@ -1216,7 +1216,7 @@ describe('Autogenerated Admin Clients', () => { // query_with_numeric_parameter it('should use a NUMERIC query parameter to query records from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithNumericParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithNumericParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 4, Revenue: 35000/); }); @@ -1224,7 +1224,7 @@ describe('Autogenerated Admin Clients', () => { // query with request tag it('should execute a query with a request tag', async () => { const output = execSync( - `${requestTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${requestTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk/); }); @@ -1232,7 +1232,7 @@ describe('Autogenerated Admin Clients', () => { // read_write_transaction with transaction tag it('should execute a read/write transaction with a transaction tag', async () => { const output = execSync( - `${transactionTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${transactionTagCommand} ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.include(output, 'Inserted new outdoor venue'); }); @@ -1240,23 +1240,23 @@ describe('Autogenerated Admin Clients', () => { // add_json_column it('should add a VenueDetails column to Venues example table', async () => { const output = execSync( - `${datatypesCmd} addJsonColumn "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${datatypesCmd} addJsonColumn "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.include( output, - `Waiting for operation on ${DATABASE_ID} to complete...` + `Waiting for operation on ${DATABASE_ID} to complete...`, ); assert.include( output, - `Added VenueDetails column to Venues table in database ${DATABASE_ID}.` + `Added VenueDetails column to Venues table in database ${DATABASE_ID}.`, ); }); // update_data_with_json it('should update rows in Venues example table to add data in VenueDetails column', async () => { const output = execSync( - `${datatypesCmd} updateWithJsonData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} updateWithJsonData ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /Updated data./); }); @@ -1264,7 +1264,7 @@ describe('Autogenerated Admin Clients', () => { // query_with_json_parameter it('should use a JSON query parameter to query records from the Venues example table', async () => { const output = execSync( - `${datatypesCmd} queryWithJsonParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${datatypesCmd} queryWithJsonParameter ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /VenueId: 19, Details: {"open":true,"rating":9}/); }); @@ -1272,115 +1272,115 @@ describe('Autogenerated Admin Clients', () => { // add_and_drop_new_database_role it('should add and drop new database roles', async () => { const output = execSync( - `node add-and-drop-new-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node add-and-drop-new-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Waiting for operation to complete...')); assert.match( output, - new RegExp('Created roles child and parent and granted privileges') + new RegExp('Created roles child and parent and granted privileges'), ); assert.match( output, - new RegExp('Revoked privileges and dropped role child') + new RegExp('Revoked privileges and dropped role child'), ); }); // read_data_with_database_role it('should read data with database role', async () => { const output = execSync( - `node read-data-with-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node read-data-with-database-role.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('SingerId: 1, FirstName: Marc, LastName: Richards') + new RegExp('SingerId: 1, FirstName: Marc, LastName: Richards'), ); }); // get_database_roles it('should list database roles', async () => { const output = execSync( - `node get-database-roles.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node get-database-roles.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Role: projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/databaseRoles/public` - ) + `Role: projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/databaseRoles/public`, + ), ); }); it('shoud create a full backup schedule', async () => { const output = execSync( - `node create-full-backup-schedule.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID} full-backup-schedule` + `node create-full-backup-schedule.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID} full-backup-schedule`, ); assert.match(output, new RegExp('Created full backup schedule')); assert.match( output, new RegExp( - `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/full-backup-schedule` - ) + `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/full-backup-schedule`, + ), ); }); it('shoud create an incremental backup schedule', async () => { const output = execSync( - `node create-incremental-backup-schedule.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID} incremental-backup-schedule` + `node create-incremental-backup-schedule.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID} incremental-backup-schedule`, ); assert.match(output, new RegExp('Created incremental backup schedule')); assert.match( output, new RegExp( - `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/incremental-backup-schedule` - ) + `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/incremental-backup-schedule`, + ), ); }); it('shoud list backup schedules', async () => { const output = execSync( - `node list-backup-schedules.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID}` + `node list-backup-schedules.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID}`, ); assert.match( output, new RegExp( - `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/full-backup-schedule` - ) + `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/full-backup-schedule`, + ), ); assert.match( output, new RegExp( - `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/incremental-backup-schedule` - ) + `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/incremental-backup-schedule`, + ), ); }); it('shoud get a backup schedule', async () => { const output = execSync( - `node get-backup-schedule.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID} full-backup-schedule` + `node get-backup-schedule.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID} full-backup-schedule`, ); assert.match( output, new RegExp( - `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/full-backup-schedule` - ) + `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/full-backup-schedule`, + ), ); }); it('shoud update a backup schedule', async () => { const output = execSync( - `node update-backup-schedule.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID} full-backup-schedule` + `node update-backup-schedule.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID} full-backup-schedule`, ); assert.match(output, new RegExp('Updated backup schedule')); assert.match( output, new RegExp( - `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/full-backup-schedule` - ) + `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/databases/${DATABASE_ID}/backupSchedules/full-backup-schedule`, + ), ); }); it('shoud delete a backup schedule', async () => { const output = execSync( - `node delete-backup-schedule.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID} full-backup-schedule` + `node delete-backup-schedule.js ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID} full-backup-schedule`, ); assert.match(output, new RegExp('Deleted backup schedule')); }); @@ -1396,7 +1396,7 @@ describe('Autogenerated Admin Clients', () => { const versionTime = rows[0].toJSON().Timestamp.toISOString(); const output = execSync( - `${backupsCmd} createBackup ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID} ${versionTime}` + `${backupsCmd} createBackup ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID} ${versionTime}`, ); assert.match(output, new RegExp(`Backup (.+)${BACKUP_ID} of size`)); }); @@ -1406,11 +1406,11 @@ describe('Autogenerated Admin Clients', () => { const key = await getCryptoKey(KEY_LOCATION_ID1); const output = execSync( - `${backupsCmd} createBackupWithEncryptionKey ${INSTANCE_ID} ${DATABASE_ID} ${ENCRYPTED_BACKUP_ID} ${PROJECT_ID} ${key.name}` + `${backupsCmd} createBackupWithEncryptionKey ${INSTANCE_ID} ${DATABASE_ID} ${ENCRYPTED_BACKUP_ID} ${PROJECT_ID} ${key.name}`, ); assert.match( output, - new RegExp(`Backup (.+)${ENCRYPTED_BACKUP_ID} of size`) + new RegExp(`Backup (.+)${ENCRYPTED_BACKUP_ID} of size`), ); assert.include(output, `using encryption key ${key.name}`); }); @@ -1419,18 +1419,18 @@ describe('Autogenerated Admin Clients', () => { it('should create a copy of a backup', async () => { const sourceBackupPath = `projects/${PROJECT_ID}/instances/${INSTANCE_ID}/backups/${BACKUP_ID}`; const output = execSync( - `node backups-copy.js ${INSTANCE_ID} ${COPY_BACKUP_ID} ${sourceBackupPath} ${PROJECT_ID}` + `node backups-copy.js ${INSTANCE_ID} ${COPY_BACKUP_ID} ${sourceBackupPath} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`(.*)Backup copy(.*)${COPY_BACKUP_ID} of size(.*)`) + new RegExp(`(.*)Backup copy(.*)${COPY_BACKUP_ID} of size(.*)`), ); }); // cancel_backup it('should cancel a backup of the database', async () => { const output = execSync( - `${backupsCmd} cancelBackup ${INSTANCE_ID} ${DATABASE_ID} ${CANCELLED_BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} cancelBackup ${INSTANCE_ID} ${DATABASE_ID} ${CANCELLED_BACKUP_ID} ${PROJECT_ID}`, ); assert.match(output, /Backup cancelled./); }); @@ -1438,7 +1438,7 @@ describe('Autogenerated Admin Clients', () => { // get_backups it('should list backups in the instance', async () => { const output = execSync( - `${backupsCmd} getBackups ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} getBackups ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}`, ); assert.include(output, 'All backups:'); assert.include(output, 'Backups matching backup name:'); @@ -1454,24 +1454,24 @@ describe('Autogenerated Admin Clients', () => { // list_backup_operations it('should list backup operations in the instance', async () => { const output = execSync( - `${backupsCmd} getBackupOperations ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} getBackupOperations ${INSTANCE_ID} ${DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}`, ); assert.match(output, /Create Backup Operations:/); assert.match( output, - new RegExp(`Backup (.+)${BACKUP_ID} (.+) is 100% complete`) + new RegExp(`Backup (.+)${BACKUP_ID} (.+) is 100% complete`), ); assert.match(output, /Copy Backup Operations:/); assert.match( output, - new RegExp(`Backup (.+)${COPY_BACKUP_ID} (.+) is 100% complete`) + new RegExp(`Backup (.+)${COPY_BACKUP_ID} (.+) is 100% complete`), ); }); // update_backup_expire_time it('should update the expire time of a backup', async () => { const output = execSync( - `${backupsCmd} updateBackup ${INSTANCE_ID} ${BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} updateBackup ${INSTANCE_ID} ${BACKUP_ID} ${PROJECT_ID}`, ); assert.match(output, /Expire time updated./); }); @@ -1485,15 +1485,15 @@ describe('Autogenerated Admin Clients', () => { await delay(this.test); const output = execSync( - `${backupsCmd} restoreBackup ${INSTANCE_ID} ${RESTORE_DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} restoreBackup ${INSTANCE_ID} ${RESTORE_DATABASE_ID} ${BACKUP_ID} ${PROJECT_ID}`, ); assert.match(output, /Database restored from backup./); assert.match( output, new RegExp( `Database (.+) was restored to ${RESTORE_DATABASE_ID} from backup ` + - `(.+)${BACKUP_ID} with version time (.+)` - ) + `(.+)${BACKUP_ID} with version time (.+)`, + ), ); }); @@ -1508,30 +1508,30 @@ describe('Autogenerated Admin Clients', () => { const key = await getCryptoKey(KEY_LOCATION_ID1); const output = execSync( - `${backupsCmd} restoreBackupWithEncryptionKey ${INSTANCE_ID} ${ENCRYPTED_RESTORE_DATABASE_ID} ${ENCRYPTED_BACKUP_ID} ${PROJECT_ID} ${key.name}` + `${backupsCmd} restoreBackupWithEncryptionKey ${INSTANCE_ID} ${ENCRYPTED_RESTORE_DATABASE_ID} ${ENCRYPTED_BACKUP_ID} ${PROJECT_ID} ${key.name}`, ); assert.match(output, /Database restored from backup./); assert.match( output, new RegExp( `Database (.+) was restored to ${ENCRYPTED_RESTORE_DATABASE_ID} from backup ` + - `(.+)${ENCRYPTED_BACKUP_ID} using encryption key ${key.name}` - ) + `(.+)${ENCRYPTED_BACKUP_ID} using encryption key ${key.name}`, + ), ); }); // list_database_operations it('should list database operations in the instance', async () => { const output = execSync( - `${backupsCmd} getDatabaseOperations ${INSTANCE_ID} ${PROJECT_ID}` + `${backupsCmd} getDatabaseOperations ${INSTANCE_ID} ${PROJECT_ID}`, ); assert.match(output, /Optimize Database Operations:/); assert.match( output, new RegExp( `Database (.+)${RESTORE_DATABASE_ID} restored from backup is (\\d+)% ` + - 'optimized' - ) + 'optimized', + ), ); }); @@ -1549,7 +1549,7 @@ describe('Autogenerated Admin Clients', () => { } const output = execSync( - `${backupsCmd} deleteBackup ${INSTANCE_ID} ${BACKUP_ID} ${PROJECT_ID}` + `${backupsCmd} deleteBackup ${INSTANCE_ID} ${BACKUP_ID} ${PROJECT_ID}`, ); assert.match(output, /Backup deleted./); }); @@ -1557,7 +1557,7 @@ describe('Autogenerated Admin Clients', () => { // custom_timeout_and_retry it('should insert with custom timeout and retry settings', async () => { const output = execSync( - `${dmlCmd} insertWithCustomTimeoutAndRetrySettings ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${dmlCmd} insertWithCustomTimeoutAndRetrySettings ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, /record inserted./); }); @@ -1565,7 +1565,7 @@ describe('Autogenerated Admin Clients', () => { // get_commit_stats it('should update rows in Albums example table and return CommitStats', async () => { const output = execSync( - `${crudCmd} getCommitStats ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} getCommitStats ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Updated data with (\\d+) mutations')); }); @@ -1573,19 +1573,19 @@ describe('Autogenerated Admin Clients', () => { // create_database_with_version_retention_period it('should create a database with a version retention period', async () => { const output = execSync( - `${schemaCmd} createDatabaseWithVersionRetentionPeriod "${INSTANCE_ID}" "${VERSION_RETENTION_DATABASE_ID}" ${PROJECT_ID}` + `${schemaCmd} createDatabaseWithVersionRetentionPeriod "${INSTANCE_ID}" "${VERSION_RETENTION_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for operation on ${VERSION_RETENTION_DATABASE_ID} to complete...` - ) + `Waiting for operation on ${VERSION_RETENTION_DATABASE_ID} to complete...`, + ), ); assert.match( output, new RegExp( - `Created database ${VERSION_RETENTION_DATABASE_ID} with version retention period.` - ) + `Created database ${VERSION_RETENTION_DATABASE_ID} with version retention period.`, + ), ); assert.include(output, 'Version retention period: 1d'); assert.include(output, 'Earliest version time:'); @@ -1593,54 +1593,56 @@ describe('Autogenerated Admin Clients', () => { it('should create a table with foreign key delete cascade', async () => { const output = execSync( - `${createTableWithForeignKeyDeleteCascadeCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${createTableWithForeignKeyDeleteCascadeCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`), ); assert.match( output, new RegExp( - 'Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId' - ) + 'Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId', + ), ); }); it('should alter a table with foreign key delete cascade', async () => { const output = execSync( - `${alterTableWithForeignKeyDeleteCascadeCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${alterTableWithForeignKeyDeleteCascadeCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`), ); assert.match( output, - new RegExp('Altered ShoppingCarts table with FKShoppingCartsCustomerName') + new RegExp( + 'Altered ShoppingCarts table with FKShoppingCartsCustomerName', + ), ); }); it('should drop a foreign key constraint delete cascade', async () => { const output = execSync( - `${dropForeignKeyConstraintDeleteCascaseCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}` + `${dropForeignKeyConstraintDeleteCascaseCommand} "${INSTANCE_ID}" "${DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${DATABASE_ID} to complete...`), ); assert.match( output, new RegExp( - 'Altered ShoppingCarts table to drop FKShoppingCartsCustomerName' - ) + 'Altered ShoppingCarts table to drop FKShoppingCartsCustomerName', + ), ); }); describe('observability', () => { it('traces', () => { const output = execSync( - `${traceObservabilityCommand} ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID}` + `${traceObservabilityCommand} ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID}`, ); assert.match(output, /Query: \d+ found./); }); @@ -1669,67 +1671,67 @@ describe('Autogenerated Admin Clients', () => { // create_instance_config it('should create an example custom instance config', async () => { const output = execSync( - `node instance-config-create.js ${SAMPLE_INSTANCE_CONFIG_ID} ${BASE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` + `node instance-config-create.js ${SAMPLE_INSTANCE_CONFIG_ID} ${BASE_INSTANCE_CONFIG_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for create operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...` - ) + `Waiting for create operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...`, + ), ); assert.match( output, - new RegExp(`Created instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) + new RegExp(`Created instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`), ); }); // update_instance_config it('should update an example custom instance config', async () => { const output = execSync( - `node instance-config-update.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` + `node instance-config-update.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for update operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...` - ) + `Waiting for update operation for ${SAMPLE_INSTANCE_CONFIG_ID} to complete...`, + ), ); assert.match( output, - new RegExp(`Updated instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) + new RegExp(`Updated instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`), ); }); // delete_instance_config it('should delete an example custom instance config', async () => { const output = execSync( - `node instance-config-delete.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}` + `node instance-config-delete.js ${SAMPLE_INSTANCE_CONFIG_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Deleting ${SAMPLE_INSTANCE_CONFIG_ID}...`) + new RegExp(`Deleting ${SAMPLE_INSTANCE_CONFIG_ID}...`), ); assert.match( output, - new RegExp(`Deleted instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`) + new RegExp(`Deleted instance config ${SAMPLE_INSTANCE_CONFIG_ID}.`), ); }); // list_instance_config_operations it('should list all instance config operations', async () => { const output = execSync( - `node instance-config-get-operations.js ${PROJECT_ID}` + `node instance-config-get-operations.js ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Available instance config operations for project ${PROJECT_ID}:` - ) + `Available instance config operations for project ${PROJECT_ID}:`, + ), ); assert.include(output, 'Instance config operation for'); assert.include( output, - 'type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata' + 'type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata', ); }); @@ -1738,7 +1740,7 @@ describe('Autogenerated Admin Clients', () => { const output = execSync(`node list-instance-configs.js ${PROJECT_ID}`); assert.match( output, - new RegExp(`Available instance configs for project ${PROJECT_ID}:`) + new RegExp(`Available instance configs for project ${PROJECT_ID}:`), ); assert.include(output, 'Available leader options for instance config'); }); @@ -1753,62 +1755,62 @@ describe('Autogenerated Admin Clients', () => { // create_database_with_default_leader it('should create a database with a default leader', async () => { const output = execSync( - `node database-create-with-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER}" ${PROJECT_ID}` + `node database-create-with-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for creation of ${DEFAULT_LEADER_DATABASE_ID} to complete...` - ) + `Waiting for creation of ${DEFAULT_LEADER_DATABASE_ID} to complete...`, + ), ); assert.match( output, new RegExp( - `Created database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER}.` - ) + `Created database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER}.`, + ), ); }); // update_database_with_default_leader it('should update a database with a default leader', async () => { const output = execSync( - `node database-update-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER_2}" ${PROJECT_ID}` + `node database-update-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" "${DEFAULT_LEADER_2}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Waiting for updating of ${DEFAULT_LEADER_DATABASE_ID} to complete...` - ) + `Waiting for updating of ${DEFAULT_LEADER_DATABASE_ID} to complete...`, + ), ); assert.match( output, new RegExp( - `Updated database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER_2}.` - ) + `Updated database ${DEFAULT_LEADER_DATABASE_ID} with default leader ${DEFAULT_LEADER_2}.`, + ), ); }); // get_default_leader it('should get the default leader option of a database', async () => { const output = execSync( - `node database-get-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}` + `node database-get-default-leader.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}`, ); assert.include( output, - `The default_leader for ${DEFAULT_LEADER_DATABASE_ID} is ${DEFAULT_LEADER_2}` + `The default_leader for ${DEFAULT_LEADER_DATABASE_ID} is ${DEFAULT_LEADER_2}`, ); }); // list_databases it('should list databases on the instance', async () => { const output = execSync( - `node list-databases.js "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}` + `node list-databases.js "${SAMPLE_INSTANCE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Databases for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}:` - ) + `Databases for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}:`, + ), ); assert.include(output, `(default leader = ${DEFAULT_LEADER_2}`); }); @@ -1816,13 +1818,13 @@ describe('Autogenerated Admin Clients', () => { // get_database_ddl it('should get the ddl of a database', async () => { const output = execSync( - `node database-get-ddl.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}` + `node database-get-ddl.js "${SAMPLE_INSTANCE_ID}" "${DEFAULT_LEADER_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Retrieved database DDL for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}/databases/${DEFAULT_LEADER_DATABASE_ID}:` - ) + `Retrieved database DDL for projects/${PROJECT_ID}/instances/${SAMPLE_INSTANCE_ID}/databases/${DEFAULT_LEADER_DATABASE_ID}:`, + ), ); assert.include(output, 'CREATE TABLE Singers'); }); @@ -1830,13 +1832,13 @@ describe('Autogenerated Admin Clients', () => { // max_commit_delay it('should update rows in Albums example table when max commit delay is set', async () => { const output = execSync( - `node max-commit-delay.js "${INSTANCE_ID}" "${DATABASE_ID}" "${PROJECT_ID}"` + `node max-commit-delay.js "${INSTANCE_ID}" "${DATABASE_ID}" "${PROJECT_ID}"`, ); assert.match( output, new RegExp( - 'Successfully inserted (\\d+) record into the Singers table.' - ) + 'Successfully inserted (\\d+) record into the Singers table.', + ), ); }); }); @@ -1859,45 +1861,45 @@ describe('Autogenerated Admin Clients', () => { // create_sequence it('should create a sequence', async () => { const output = execSync( - `node sequence-create.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` + `node sequence-create.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Created Seq sequence and Customers table') + new RegExp('Created Seq sequence and Customers table'), ); assert.match( output, - new RegExp('Number of customer records inserted is: 3') + new RegExp('Number of customer records inserted is: 3'), ); }); // alter_sequence it('should alter a sequence', async () => { const output = execSync( - `node sequence-alter.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` + `node sequence-alter.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' - ) + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.', + ), ); assert.match( output, - new RegExp('Number of customer records inserted is: 3') + new RegExp('Number of customer records inserted is: 3'), ); }); // drop_sequence it('should drop a sequence', async () => { const output = execSync( - `node sequence-drop.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}` + `node sequence-drop.js "${INSTANCE_ID}" "${SEQUENCE_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' - ) + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.', + ), ); }); }); @@ -1926,17 +1928,17 @@ describe('Autogenerated Admin Clients', () => { // create_instance_partition it('should create an instance partition', async () => { const output = execSync( - `node instance-partition-create.js "${SAMPLE_INSTANCE_ID}" "my-instance-partition" "${PROJECT_ID}"` + `node instance-partition-create.js "${SAMPLE_INSTANCE_ID}" "my-instance-partition" "${PROJECT_ID}"`, ); assert.match( output, new RegExp( - 'Waiting for operation on my-instance-partition to complete...' - ) + 'Waiting for operation on my-instance-partition to complete...', + ), ); assert.match( output, - new RegExp('Created instance partition my-instance-partition.') + new RegExp('Created instance partition my-instance-partition.'), ); }); }); @@ -1964,65 +1966,65 @@ describe('Autogenerated Admin Clients', () => { // create_pg_database it('should create an example PostgreSQL database', async () => { const output = execSync( - `node pg-database-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-database-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`), ); assert.match( output, new RegExp( - `Created database ${PG_DATABASE_ID} on instance ${SAMPLE_INSTANCE_ID} with dialect POSTGRESQL.` - ) + `Created database ${PG_DATABASE_ID} on instance ${SAMPLE_INSTANCE_ID} with dialect POSTGRESQL.`, + ), ); }); // pg_interleaving it('should create an interleaved table hierarchy using PostgreSQL dialect', async () => { const output = execSync( - `node pg-interleaving.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-interleaving.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`), ); assert.match( output, new RegExp( - `Created an interleaved table hierarchy in database ${PG_DATABASE_ID} using PostgreSQL dialect.` - ) + `Created an interleaved table hierarchy in database ${PG_DATABASE_ID} using PostgreSQL dialect.`, + ), ); }); // pg_dml_with_parameter it('should execute a DML statement with parameters on a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-with-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-with-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully executed 1 postgreSQL statements using DML') + new RegExp('Successfully executed 1 postgreSQL statements using DML'), ); }); // pg_dml_batch it('should execute a batch of DML statements on a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-batch.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-batch.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - 'Successfully executed 3 postgreSQL statements using Batch DML.' - ) + 'Successfully executed 3 postgreSQL statements using Batch DML.', + ), ); }); // pg_dml_partitioned it('should execute a partitioned DML on a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-partitioned.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-partitioned.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Successfully deleted 1 record.')); }); @@ -2030,42 +2032,42 @@ describe('Autogenerated Admin Clients', () => { // pg_query_with_parameters it('should execute a query with parameters on a Spanner PostgreSQL database.', async () => { const output = execSync( - `node pg-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('SingerId: 1, FirstName: Alice, LastName: Henderson') + new RegExp('SingerId: 1, FirstName: Alice, LastName: Henderson'), ); }); // pg_dml_update it('should update a table using parameterized queries on a Spanner PostgreSQL database.', async () => { const output = execSync( - `node pg-dml-getting-started-update.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-getting-started-update.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully updated 1 record in the Singers table.') + new RegExp('Successfully updated 1 record in the Singers table.'), ); }); // pg_add_column it('should add a column to a table in the Spanner PostgreSQL database.', async () => { const output = execSync( - `node pg-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Added MarketingBudget column to Albums table in database ${PG_DATABASE_ID}` - ) + `Added MarketingBudget column to Albums table in database ${PG_DATABASE_ID}`, + ), ); }); //pg_create_index it('should create an index in the Spanner PostgreSQL database.', async () => { const output = execSync( - `node pg-index-create-storing.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-index-create-storing.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Added the AlbumsByAlbumTitle index.')); }); @@ -2073,7 +2075,7 @@ describe('Autogenerated Admin Clients', () => { // pg_schema_information it('should query the information schema metadata in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-schema-information.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-schema-information.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Table: public.albums')); assert.match(output, new RegExp('Table: public.author')); @@ -2084,29 +2086,29 @@ describe('Autogenerated Admin Clients', () => { // pg_ordering_nulls it('should order nulls as per clause in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-ordering-nulls.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-ordering-nulls.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Author ORDER BY FirstName')); assert.match(output, new RegExp('Author ORDER BY FirstName DESC')); assert.match(output, new RegExp('Author ORDER BY FirstName NULLS FIRST')); assert.match( output, - new RegExp('Author ORDER BY FirstName DESC NULLS LAST') + new RegExp('Author ORDER BY FirstName DESC NULLS LAST'), ); }); // pg_numeric_data_type it('should create a table, insert and query pg numeric data', async () => { const output = execSync( - `node pg-numeric-data-type.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-numeric-data-type.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`), ); assert.match( output, - new RegExp(`Added table venues to database ${PG_DATABASE_ID}.`) + new RegExp(`Added table venues to database ${PG_DATABASE_ID}.`), ); assert.match(output, new RegExp('Inserted data.')); assert.match(output, new RegExp('VenueId: 4, Revenue: 97372.3863')); @@ -2117,24 +2119,24 @@ describe('Autogenerated Admin Clients', () => { // pg_jsonb_add_column it('should add a jsonb column to a table', async () => { const output = execSync( - `node pg-jsonb-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-jsonb-add-column.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`) + new RegExp(`Waiting for operation on ${PG_DATABASE_ID} to complete...`), ); assert.match( output, new RegExp( - `Added jsonb column to table venues to database ${PG_DATABASE_ID}.` - ) + `Added jsonb column to table venues to database ${PG_DATABASE_ID}.`, + ), ); }); // pg_jsonb_insert_data it('should insert pg jsonb data', async () => { const output = execSync( - `node pg-jsonb-update-data.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-jsonb-update-data.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Updated data.')); }); @@ -2142,24 +2144,24 @@ describe('Autogenerated Admin Clients', () => { // pg_jsonb_query_data it('should query pg jsonb data', async () => { const output = execSync( - `node pg-jsonb-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-jsonb-query-parameter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('VenueId: 19, Details: {"value":{"open":true,"rating":9}}') + new RegExp('VenueId: 19, Details: {"value":{"open":true,"rating":9}}'), ); }); // pg_case_sensitivity it('should create case sensitive table and query the information in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-case-sensitivity.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-case-sensitivity.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Created table with case sensitive names in database ${PG_DATABASE_ID} using PostgreSQL dialect.` - ) + `Created table with case sensitive names in database ${PG_DATABASE_ID} using PostgreSQL dialect.`, + ), ); assert.match(output, new RegExp('Inserted data using mutations.')); assert.match(output, new RegExp('Concerts Table Data using Mutations:')); @@ -2170,7 +2172,7 @@ describe('Autogenerated Admin Clients', () => { // pg_datatypes_casting it('should use cast operator to cast from one data type to another in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-datatypes-casting.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-datatypes-casting.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Data types after casting')); }); @@ -2178,7 +2180,7 @@ describe('Autogenerated Admin Clients', () => { // pg_functions it('should call a server side function on a Spanner PostgreSQL database.', async () => { const output = execSync( - `node pg-functions.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-functions.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('1284352323 seconds after epoch is')); }); @@ -2186,11 +2188,11 @@ describe('Autogenerated Admin Clients', () => { // pg_dml_returning_insert it('should insert records using DML Returning in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-returning-insert ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-returning-insert ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully inserted 1 record into the Singers table') + new RegExp('Successfully inserted 1 record into the Singers table'), ); assert.match(output, new RegExp('Virginia Watson')); }); @@ -2198,11 +2200,11 @@ describe('Autogenerated Admin Clients', () => { // pg_dml_returning_update it('should update records using DML Returning in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-returning-update ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-returning-update ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully updated 1 record into the Singers table') + new RegExp('Successfully updated 1 record into the Singers table'), ); assert.match(output, new RegExp('Virginia1 Watson1')); }); @@ -2210,11 +2212,11 @@ describe('Autogenerated Admin Clients', () => { // pg_dml_returning_delete it('should delete records using DML Returning in a Spanner PostgreSQL database', async () => { const output = execSync( - `node pg-dml-returning-delete ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-dml-returning-delete ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Successfully deleted 1 record from the Singers table') + new RegExp('Successfully deleted 1 record from the Singers table'), ); assert.match(output, new RegExp('Virginia1 Watson1')); }); @@ -2222,65 +2224,65 @@ describe('Autogenerated Admin Clients', () => { // pg_create_sequence it('should create a sequence', async () => { const output = execSync( - `node pg-sequence-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-sequence-create.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, - new RegExp('Created Seq sequence and Customers table') + new RegExp('Created Seq sequence and Customers table'), ); assert.match( output, - new RegExp('Number of customer records inserted is: 3') + new RegExp('Number of customer records inserted is: 3'), ); }); // pg_alter_sequence it('should alter a sequence', async () => { const output = execSync( - `node pg-sequence-alter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-sequence-alter.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.' - ) + 'Altered Seq sequence to skip an inclusive range between 1000 and 5000000.', + ), ); assert.match( output, - new RegExp('Number of customer records inserted is: 3') + new RegExp('Number of customer records inserted is: 3'), ); }); // pg_drop_sequence it('should drop a sequence', async () => { const output = execSync( - `node pg-sequence-drop.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}` + `node pg-sequence-drop.js ${SAMPLE_INSTANCE_ID} ${PG_DATABASE_ID} ${PROJECT_ID}`, ); assert.match( output, new RegExp( - 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.' - ) + 'Altered Customers table to drop DEFAULT from CustomerId column and dropped the Seq sequence.', + ), ); }); // directed_read_options it('should run read-only transaction with directed read options set', async () => { const output = execSync( - `node directed-reads.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node directed-reads.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); console.log(output); assert.match( output, new RegExp( - 'SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace' - ) + 'SingerId: 2, AlbumId: 2, AlbumTitle: Forever Hold your Peace', + ), ); assert.match( output, new RegExp( - 'Successfully executed read-only transaction with directedReadOptions' - ) + 'Successfully executed read-only transaction with directedReadOptions', + ), ); }); }); @@ -2309,16 +2311,16 @@ describe('Autogenerated Admin Clients', () => { }); console.log( - `Waiting for creation of ${PROTO_DATABASE_ID} to complete...` + `Waiting for creation of ${PROTO_DATABASE_ID} to complete...`, ); await operation.promise(); console.log( - `Created database ${PROTO_DATABASE_ID} on instance ${INSTANCE_ID}.` + `Created database ${PROTO_DATABASE_ID} on instance ${INSTANCE_ID}.`, ); // Insert seed data into the database tables execSync( - `${crudCmd} insert ${INSTANCE_ID} ${PROTO_DATABASE_ID} ${PROJECT_ID}` + `${crudCmd} insert ${INSTANCE_ID} ${PROTO_DATABASE_ID} ${PROJECT_ID}`, ); }); @@ -2328,33 +2330,33 @@ describe('Autogenerated Admin Clients', () => { it('should add proto message and enum columns', async () => { const output = execSync( - `node proto-type-add-column.js "${INSTANCE_ID}" "${PROTO_DATABASE_ID}" ${PROJECT_ID}` + `node proto-type-add-column.js "${INSTANCE_ID}" "${PROTO_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match( output, new RegExp( - `Altered table "Singers" on database ${PROTO_DATABASE_ID} on instance ${INSTANCE_ID} with proto descriptors.` - ) + `Altered table "Singers" on database ${PROTO_DATABASE_ID} on instance ${INSTANCE_ID} with proto descriptors.`, + ), ); }); it('update data with proto message and enum columns', async () => { const output = execSync( - `node proto-update-data.js "${INSTANCE_ID}" "${PROTO_DATABASE_ID}" ${PROJECT_ID}` + `node proto-update-data.js "${INSTANCE_ID}" "${PROTO_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match(output, new RegExp('Data updated')); }); it('update data with proto message and enum columns using DML', async () => { const output = execSync( - `node proto-update-data-dml.js "${INSTANCE_ID}" "${PROTO_DATABASE_ID}" ${PROJECT_ID}` + `node proto-update-data-dml.js "${INSTANCE_ID}" "${PROTO_DATABASE_ID}" ${PROJECT_ID}`, ); assert.include(output, '1 record updated.'); }); it('query data with proto message and enum columns', async () => { const output = execSync( - `node proto-query-data.js "${INSTANCE_ID}" "${PROTO_DATABASE_ID}" ${PROJECT_ID}` + `node proto-query-data.js "${INSTANCE_ID}" "${PROTO_DATABASE_ID}" ${PROJECT_ID}`, ); assert.match(output, new RegExp('SingerId: 2')); }); @@ -2387,11 +2389,11 @@ describe('Autogenerated Admin Clients', () => { }); await operation.promise(); console.log( - `Created temp instance, using + ${multi_region_instance.formattedName_}...` + `Created temp instance, using + ${multi_region_instance.formattedName_}...`, ); } else { console.log( - `Not creating temp instance, using + ${multi_region_instance.formattedName_}...` + `Not creating temp instance, using + ${multi_region_instance.formattedName_}...`, ); } }); @@ -2423,17 +2425,17 @@ describe('Autogenerated Admin Clients', () => { "${MULTI_REGION_INSTANCE_ID}" \ "${MR_CMEK_DB}" \ "${PROJECT_ID}" \ - "${key1.name},${key2.name},${key3.name}"` + "${key1.name},${key2.name},${key3.name}"`, ); assert.match( output, - new RegExp(`Waiting for operation on ${MR_CMEK_DB} to complete...`) + new RegExp(`Waiting for operation on ${MR_CMEK_DB} to complete...`), ); assert.match( output, new RegExp( - `Created database ${MR_CMEK_DB} on instance ${MULTI_REGION_INSTANCE_ID}.` - ) + `Created database ${MR_CMEK_DB} on instance ${MULTI_REGION_INSTANCE_ID}.`, + ), ); assert.match(output, new RegExp('Database encrypted with keys')); }); @@ -2445,7 +2447,7 @@ describe('Autogenerated Admin Clients', () => { ${MR_CMEK_DB} \ ${MR_CMEK_BACKUP} \ ${PROJECT_ID} \ - "${key1.name},${key2.name},${key3.name}"` + "${key1.name},${key2.name},${key3.name}"`, ); assert.match(output, new RegExp(`Backup (.+)${MR_CMEK_BACKUP} of size`)); assert.include(output, 'using encryption key'); @@ -2459,11 +2461,11 @@ describe('Autogenerated Admin Clients', () => { ${MR_CMEK_COPIED} \ ${sourceBackupPath} \ ${PROJECT_ID} \ - "${key1.name},${key2.name},${key3.name}"` + "${key1.name},${key2.name},${key3.name}"`, ); assert.match( output, - new RegExp(`(.*)Backup copy(.*)${MR_CMEK_COPIED} of size(.*)`) + new RegExp(`(.*)Backup copy(.*)${MR_CMEK_COPIED} of size(.*)`), ); }); @@ -2480,22 +2482,22 @@ describe('Autogenerated Admin Clients', () => { ${MR_CMEK_RESTORED} \ ${MR_CMEK_BACKUP} \ ${PROJECT_ID} \ - "${key1.name},${key2.name},${key3.name}"` + "${key1.name},${key2.name},${key3.name}"`, ); assert.match(output, /Database restored from backup./); assert.match( output, new RegExp( `Database (.+) was restored to ${MR_CMEK_RESTORED} from backup ` + - `(.+)${MR_CMEK_BACKUP}.` - ) + `(.+)${MR_CMEK_BACKUP}.`, + ), ); }); // add split points it('should add split points', async () => { const output = execSync( - `node database-add-split-points.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}` + `node database-add-split-points.js ${INSTANCE_ID} ${DATABASE_ID} ${PROJECT_ID}`, ); assert.match(output, new RegExp('Added Split Points')); }); diff --git a/samples/table-alter-with-foreign-key-delete-cascade.js b/samples/table-alter-with-foreign-key-delete-cascade.js index f3bf4e466..382b918ec 100644 --- a/samples/table-alter-with-foreign-key-delete-cascade.js +++ b/samples/table-alter-with-foreign-key-delete-cascade.js @@ -50,7 +50,7 @@ function main(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); diff --git a/samples/table-create-with-foreign-key-delete-cascade.js b/samples/table-create-with-foreign-key-delete-cascade.js index b0e9b215e..93c1791bf 100644 --- a/samples/table-create-with-foreign-key-delete-cascade.js +++ b/samples/table-create-with-foreign-key-delete-cascade.js @@ -56,7 +56,7 @@ function main(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -65,7 +65,7 @@ function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId' + 'Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId', ); } createTableWithForeignKeyDeleteCascade(); diff --git a/samples/table-drop-foreign-key-constraint-delete-cascade.js b/samples/table-drop-foreign-key-constraint-delete-cascade.js index ac9fc5699..05a2880c3 100644 --- a/samples/table-drop-foreign-key-constraint-delete-cascade.js +++ b/samples/table-drop-foreign-key-constraint-delete-cascade.js @@ -48,7 +48,7 @@ function main(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -57,7 +57,7 @@ function main(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Altered ShoppingCarts table to drop FKShoppingCartsCustomerName' + 'Altered ShoppingCarts table to drop FKShoppingCartsCustomerName', ); } dropForeignKeyConstraintDeleteCascade(); diff --git a/samples/timestamp.js b/samples/timestamp.js index 0f83f0e98..2453b167b 100644 --- a/samples/timestamp.js +++ b/samples/timestamp.js @@ -52,7 +52,7 @@ async function createTableWithTimestamp(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -162,7 +162,7 @@ async function queryTableWithTimestamp(instanceId, databaseId, projectId) { rows.forEach(row => { const json = row.toJSON(); console.log( - `SingerId: ${json.SingerId}, VenueId: ${json.VenueId}, EventDate: ${json.EventDate}, Revenue: ${json.Revenue}, LastUpdateTime: ${json.LastUpdateTime}` + `SingerId: ${json.SingerId}, VenueId: ${json.VenueId}, EventDate: ${json.EventDate}, Revenue: ${json.Revenue}, LastUpdateTime: ${json.LastUpdateTime}`, ); }); } catch (err) { @@ -205,7 +205,7 @@ async function addTimestampColumn(instanceId, databaseId, projectId) { database: databaseAdminClient.databasePath( projectId, instanceId, - databaseId + databaseId, ), statements: request, }); @@ -215,7 +215,7 @@ async function addTimestampColumn(instanceId, databaseId, projectId) { await operation.promise(); console.log( - 'Added LastUpdateTime as a commit timestamp column in Albums table.' + 'Added LastUpdateTime as a commit timestamp column in Albums table.', ); } catch (err) { console.error('ERROR:', err); @@ -344,7 +344,7 @@ async function queryWithTimestamp(instanceId, databaseId, projectId) { json.AlbumId }, MarketingBudget: ${ json.MarketingBudget ? json.MarketingBudget : null - }, LastUpdateTime: ${json.LastUpdateTime}` + }, LastUpdateTime: ${json.LastUpdateTime}`, ); }); } catch (err) { @@ -366,15 +366,15 @@ require('yargs') createTableWithTimestamp( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'insertWithTimestamp ', 'Inserts new rows of data including commit timestamps into an example Cloud Spanner table.', {}, opts => - insertWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + insertWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryTableWithTimestamp ', @@ -384,22 +384,22 @@ require('yargs') queryTableWithTimestamp( opts.instanceName, opts.databaseName, - opts.projectId - ) + opts.projectId, + ), ) .command( 'addTimestampColumn ', 'Adds a example commit timestamp column to an existing example Cloud Spanner table.', {}, opts => - addTimestampColumn(opts.instanceName, opts.databaseName, opts.projectId) + addTimestampColumn(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'updateWithTimestamp ', 'Modifies existing rows of data in an example Cloud Spanner table with a commit timestamp column..', {}, opts => - updateWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + updateWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'queryWithTimestamp ', @@ -407,25 +407,25 @@ require('yargs') column (LastUpdateTime) added by addTimestampColumn.`, {}, opts => - queryWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId) + queryWithTimestamp(opts.instanceName, opts.databaseName, opts.projectId), ) .example( - 'node $0 createTableWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 createTableWithTimestamp "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 insertWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 insertWithTimestamp "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 queryTableWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 queryTableWithTimestamp "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 addTimestampColumn "my-instance" "my-database" "my-project-id"' + 'node $0 addTimestampColumn "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 updateWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 updateWithTimestamp "my-instance" "my-database" "my-project-id"', ) .example( - 'node $0 queryWithTimestamp "my-instance" "my-database" "my-project-id"' + 'node $0 queryWithTimestamp "my-instance" "my-database" "my-project-id"', ) .wrap(120) .recommendCommands() diff --git a/samples/transaction-tag.js b/samples/transaction-tag.js index e9be9ac6f..4a9c857c5 100644 --- a/samples/transaction-tag.js +++ b/samples/transaction-tag.js @@ -75,7 +75,7 @@ function main(instanceId, databaseId, projectId) { console.log('Inserted new outdoor venue'); await tx.commit(); - } + }, ); } catch (err) { console.error('ERROR:', err); diff --git a/samples/transaction.js b/samples/transaction.js index 47406b939..b72d9d3a3 100644 --- a/samples/transaction.js +++ b/samples/transaction.js @@ -51,7 +51,7 @@ function readOnlyTransaction(instanceId, databaseId, projectId) { qOneRows.forEach(row => { const json = row.toJSON(); console.log( - `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}` + `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}`, ); }); @@ -67,7 +67,7 @@ function readOnlyTransaction(instanceId, databaseId, projectId) { qTwoRows.forEach(row => { const json = row.toJSON(); console.log( - `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}` + `SingerId: ${json.SingerId}, AlbumId: ${json.AlbumId}, AlbumTitle: ${json.AlbumTitle}`, ); }); @@ -138,7 +138,7 @@ function readWriteTransaction(instanceId, databaseId, projectId) { // Makes sure the second album's budget is large enough if (secondBudget < transferAmount) { throw new Error( - `The second album's budget (${secondBudget}) is less than the transfer amount (${transferAmount}).` + `The second album's budget (${secondBudget}) is less than the transfer amount (${transferAmount}).`, ); } }), @@ -181,7 +181,7 @@ function readWriteTransaction(instanceId, databaseId, projectId) { }) .then(() => { console.log( - `Successfully executed read-write transaction to transfer ${transferAmount} from Album 2 to Album 1.` + `Successfully executed read-write transaction to transfer ${transferAmount} from Album 2 to Album 1.`, ); }) .catch(err => { @@ -203,14 +203,18 @@ require('yargs') 'Execute a read-only transaction on an example Cloud Spanner table.', {}, opts => - readOnlyTransaction(opts.instanceName, opts.databaseName, opts.projectId) + readOnlyTransaction(opts.instanceName, opts.databaseName, opts.projectId), ) .command( 'readWrite ', 'Execute a read-write transaction on an example Cloud Spanner table.', {}, opts => - readWriteTransaction(opts.instanceName, opts.databaseName, opts.projectId) + readWriteTransaction( + opts.instanceName, + opts.databaseName, + opts.projectId, + ), ) .example('node $0 readOnly "my-instance" "my-database" "my-project-id"') .example('node $0 readWrite "my-instance" "my-database" "my-project-id"') diff --git a/samples/update-backup-schedule.js b/samples/update-backup-schedule.js index c2b6f18e6..41e3fcbcb 100644 --- a/samples/update-backup-schedule.js +++ b/samples/update-backup-schedule.js @@ -23,7 +23,7 @@ function main( projectId = 'my-project-id', instanceId = 'my-instance-id', databaseId = 'my-database-id', - scheduleId = 'my-schedule-id' + scheduleId = 'my-schedule-id', ) { async function updateBackupSchedule() { // [START spanner_update_backup_schedule] @@ -51,7 +51,7 @@ function main( projectId, instanceId, databaseId, - scheduleId + scheduleId, ), spec: { cronSpec: { diff --git a/samples/update-instance-default-backup-schedule-type.js b/samples/update-instance-default-backup-schedule-type.js index 89bd07cc3..cd222a56b 100644 --- a/samples/update-instance-default-backup-schedule-type.js +++ b/samples/update-instance-default-backup-schedule-type.js @@ -58,7 +58,7 @@ function main(instanceId, projectId) { }); console.log( `Instance ${instanceId} has been updated with the ${metadata.defaultBackupScheduleType}` + - ' default backup schedule type.' + ' default backup schedule type.', ); } catch (err) { console.error('ERROR:', err); diff --git a/scripts/cleanup.js b/scripts/cleanup.js index 25c1e7f40..88f306a0f 100644 --- a/scripts/cleanup.js +++ b/scripts/cleanup.js @@ -37,9 +37,9 @@ async function deleteStaleInstances(labelFilter) { limit(() => setTimeout(() => { instance.delete(); - }, 500) - ) - ) + }, 500), + ), + ), ); } diff --git a/src/backup.ts b/src/backup.ts index ba4753bb8..ece00c33e 100644 --- a/src/backup.ts +++ b/src/backup.ts @@ -90,7 +90,7 @@ type DeleteCallback = RequestCallback; interface BackupRequest { ( config: RequestConfig, - callback: ResourceCallback + callback: ResourceCallback, ): void; (config: RequestConfig, callback: RequestCallback): void; } @@ -207,15 +207,15 @@ class Backup { * ``` */ create( - options: CreateBackupOptions | CopyBackupOptions + options: CreateBackupOptions | CopyBackupOptions, ): Promise | Promise; create( options: CreateBackupOptions | CopyBackupOptions, - callback: CreateBackupCallback | CopyBackupCallback + callback: CreateBackupCallback | CopyBackupCallback, ): void; create( options: CreateBackupOptions | CopyBackupOptions, - callback?: CreateBackupCallback | CopyBackupCallback + callback?: CreateBackupCallback | CopyBackupCallback, ): Promise | Promise | void { const gaxOpts = options.gaxOptions; if ('databasePath' in options) { @@ -231,7 +231,7 @@ class Backup { }; if ('versionTime' in options) { reqOpts.backup!.versionTime = Spanner.timestamp( - options.versionTime + options.versionTime, ).toStruct(); } if ( @@ -256,7 +256,7 @@ class Backup { return; } callback!(null, this, operation, resp); - } + }, ); } else if (this.sourceName) { delete options.gaxOptions; @@ -278,7 +278,7 @@ class Backup { return; } callback!(null, this, operation, resp); - } + }, ); } } @@ -322,7 +322,7 @@ class Backup { getMetadata(gaxOptions: CallOptions, callback: GetMetadataCallback): void; getMetadata( gaxOptionsOrCallback?: CallOptions | GetMetadataCallback, - cb?: GetMetadataCallback + cb?: GetMetadataCallback, ): void | Promise { const callback = typeof gaxOptionsOrCallback === 'function' @@ -348,7 +348,7 @@ class Backup { this.metadata = response; } callback!(err, response); - } + }, ); } @@ -485,25 +485,25 @@ class Backup { * ``` */ updateExpireTime( - expireTime: string | number | p.ITimestamp | PreciseDate + expireTime: string | number | p.ITimestamp | PreciseDate, ): Promise; updateExpireTime( expireTime: string | number | p.ITimestamp | PreciseDate, - gaxOptions?: CallOptions + gaxOptions?: CallOptions, ): Promise; updateExpireTime( expireTime: string | number | p.ITimestamp | PreciseDate, - callback: UpdateExpireTimeCallback + callback: UpdateExpireTimeCallback, ): void; updateExpireTime( expireTime: string | number | p.ITimestamp | PreciseDate, gaxOptions: CallOptions, - callback: UpdateExpireTimeCallback + callback: UpdateExpireTimeCallback, ): void; updateExpireTime( expireTime: string | number | p.ITimestamp | PreciseDate, gaxOptionsOrCallback?: CallOptions | UpdateExpireTimeCallback, - cb?: UpdateExpireTimeCallback + cb?: UpdateExpireTimeCallback, ): void | Promise { const callback = typeof gaxOptionsOrCallback === 'function' @@ -533,7 +533,7 @@ class Backup { }, (err, response) => { callback!(err, response); - } + }, ); } @@ -561,7 +561,7 @@ class Backup { delete(gaxOptions: CallOptions, callback: DeleteCallback): void; delete( gaxOptionsOrCallback?: CallOptions | DeleteCallback, - cb?: DeleteCallback + cb?: DeleteCallback, ): void | Promise { const callback = typeof gaxOptionsOrCallback === 'function' @@ -585,7 +585,7 @@ class Backup { }, err => { callback!(err); - } + }, ); } diff --git a/src/batch-transaction.ts b/src/batch-transaction.ts index ee3a53a3e..f52e8c9b7 100644 --- a/src/batch-transaction.ts +++ b/src/batch-transaction.ts @@ -18,11 +18,20 @@ import {PreciseDate} from '@google-cloud/precise-date'; import {promisifyAll} from '@google-cloud/promisify'; import * as extend from 'extend'; import * as is from 'is'; -import {Snapshot} from './transaction'; +import { + ExecuteSqlRequest, + ReadCallback, + ReadRequest, + ReadResponse, + RunCallback, + RunResponse, + Snapshot, +} from './transaction'; import {google} from '../protos/protos'; import {Session, Database} from '.'; import { CLOUD_RESOURCE_HEADER, + ResourceCallback, addLeaderAwareRoutingHeader, } from '../src/common'; import {startTrace, setSpanError, traceConfig} from './instrument'; @@ -34,6 +43,26 @@ export interface TransactionIdentifier { timestamp?: google.protobuf.ITimestamp; } +export type CreateReadPartitionsResponse = [ + ReadRequest[], + google.spanner.v1.IPartitionResponse, +]; + +export type CreateReadPartitionsCallback = ResourceCallback< + ReadRequest[], + google.spanner.v1.IPartitionResponse +>; + +export type CreateQueryPartitionsResponse = [ + ExecuteSqlRequest[], + google.spanner.v1.IPartitionResponse, +]; + +export type CreateQueryPartitionsCallback = ResourceCallback< + ExecuteSqlRequest[], + google.spanner.v1.IPartitionResponse +>; + /** * Use a BatchTransaction object to create partitions and read/query against * your Cloud Spanner database. @@ -93,17 +122,17 @@ class BatchTransaction extends Snapshot { /** * @see [`ExecuteSqlRequest`](https://cloud.google.com/spanner/docs/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest) * @typedef {object} QueryPartition - * @property {string} partitionToken The partition token. + * @property {string} partitionToken A token representing the partition, used to identify and execute the partition at a later time. */ /** * @typedef {array} CreateQueryPartitionsResponse - * @property {QueryPartition[]} 0 List of query partitions. + * @property {ExecuteSqlRequest[]} 0 Array of ExecuteSqlRequest partitions. * @property {object} 1 The full API response. */ /** * @callback CreateQueryPartitionsCallback * @param {?Error} err Request error, if any. - * @param {QueryPartition[]} partitions List of query partitions. + * @param {ExecuteSqlRequest[]} partitions Array of ExecuteSqlRequest partitions. * @param {object} apiResponse The full API response. */ /** @@ -111,35 +140,42 @@ class BatchTransaction extends Snapshot { * operation in parallel. Partitions become invalid when the transaction used * to create them is closed. * - * @param {string|object} query A SQL query or - * [`ExecuteSqlRequest`](https://cloud.google.com/spanner/docs/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest) - * object. + * @param {string|ExecuteSqlRequest} query - A SQL query string or an {@link ExecuteSqlRequest} object. + * If a string is provided, it will be wrapped into an `ExecuteSqlRequest`. * @param {object} [query.gaxOptions] Request configuration options, * See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} * for more details. * @param {object} [query.params] A map of parameter name to values. * @param {object} [query.partitionOptions] A map of partition options. * @param {object} [query.types] A map of parameter types. - * @param {CreateQueryPartitionsCallback} [callback] Callback callback function. - * @returns {Promise} + * @param {CreateQueryPartitionsCallback} [callback] - Optional Callback function. If not provided, a promise is returned. + * @returns {Promise|void} A promise resolving to an array of + * `ExecuteSqlRequest' partitions and `IPartitionResponse` , or void if a callback is provided. * * @example include:samples/batch.js * region_tag:spanner_batch_client */ - createQueryPartitions(query, callback) { - if (is.string(query)) { - query = { - sql: query, - }; - } + createQueryPartitions( + query: string | ExecuteSqlRequest, + ): Promise; + createQueryPartitions( + query: string | ExecuteSqlRequest, + callback: CreateQueryPartitionsCallback, + ): void; + createQueryPartitions( + query: string | ExecuteSqlRequest, + cb?: CreateQueryPartitionsCallback, + ): void | Promise { + const request: ExecuteSqlRequest = + typeof query === 'string' ? {sql: query} : query; - const reqOpts = Object.assign({}, query, Snapshot.encodeParams(query)); + const reqOpts = Object.assign({}, request, Snapshot.encodeParams(request)); - delete reqOpts.gaxOptions; - delete reqOpts.types; + delete (reqOpts as any).gaxOptions; + delete (reqOpts as any).types; const traceConfig: traceConfig = { - sql: query, + sql: request.sql, opts: this._observabilityOptions, dbName: this.getDBName(), }; @@ -157,7 +193,7 @@ class BatchTransaction extends Snapshot { client: 'SpannerClient', method: 'partitionQuery', reqOpts, - gaxOpts: query.gaxOptions, + gaxOpts: request.gaxOptions, headers: injectRequestIDIntoHeaders(headers, this.session), }, (err, partitions, resp) => { @@ -166,10 +202,10 @@ class BatchTransaction extends Snapshot { } span.end(); - callback(err, partitions, resp); - } + cb!(err, partitions, resp); + }, ); - } + }, ); } @@ -234,26 +270,26 @@ class BatchTransaction extends Snapshot { span.end(); callback(null, partitions, resp); }); - } + }, ); } /** * @typedef {object} ReadPartition * @mixes ReadRequestOptions - * @property {string} partitionToken The partition token. - * @property {object} [gaxOptions] Request configuration options, - * See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} + * @property {string} partitionToken partitionToken A token representing the partition, used to identify and execute the partition at a later time. + * @property {object} [gaxOptions] optional request configuration options, + * See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} * for more details. */ /** * @typedef {array} CreateReadPartitionsResponse - * @property {ReadPartition[]} 0 List of read partitions. + * @property {ReadPartition[]} 0 Array of read partitions. * @property {object} 1 The full API response. */ /** * @callback CreateReadPartitionsCallback * @param {?Error} err Request error, if any. - * @param {ReadPartition[]} partitions List of read partitions. + * @param {ReadPartition[]} partitions Array of read partitions. * @param {object} apiResponse The full API response. */ /** @@ -261,12 +297,23 @@ class BatchTransaction extends Snapshot { * operation in parallel. Partitions become invalid when the transaction used * to create them is closed. * - * @param {ReadRequestOptions} options Configuration object, describing what to + * @param {ReadRequest} options Configuration object, describing what to * read from. * @param {CreateReadPartitionsCallback} [callback] Callback function. - * @returns {Promise} + * @returns {Promise|void} A promise that resolves + * to an array containing the read partitions and the full API response, or `void` if a callback is provided. */ - createReadPartitions(options, callback) { + createReadPartitions( + options: ReadRequest, + ): Promise; + createReadPartitions( + options: ReadRequest, + callback: CreateReadPartitionsCallback, + ): void; + createReadPartitions( + options: ReadRequest, + cb?: CreateReadPartitionsCallback, + ): void | Promise { const traceConfig: traceConfig = { opts: this._observabilityOptions, dbName: this.getDBName(), @@ -303,35 +350,56 @@ class BatchTransaction extends Snapshot { } span.end(); - callback(err, partitions, resp); - } + cb!(err, partitions, resp); + }, ); - } + }, ); } /** - * Executes partition. + * Executes partition using either a read or a SQL query, depending on the type of partition provided. * - * @see {@link Transaction#read} when using {@link ReadPartition}. - * @see {@link Transaction#run} when using {@link QueryParition}. + * @param {ReadRequest|ExecuteSqlRequest} partition The partition object to execute. + * This can either be a `ReadPartition` or a `QueryPartition`. * - * @param {ReadPartition|QueryParition} partition The partition object. - * @param {object} [partition.gaxOptions] Request configuration options, - * See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} - * for more details. - * @param {TransactionRequestReadCallback|RunCallback} [callback] Callback - * function. - * @returns {Promise|Promise} + * @param {ReadCallback|RunCallback} [callback] Optional Callback function. If not provided, + * a promise will be returned. + * + * If the partition is a read partition, it will execute a read using {@link Transaction#read} + * @see {@link Transaction#read} when using {@link ReadRequest}. + * + * If the partition is query partition, it will execute a SQL query using {@link Transaction#run} + * @see {@link Transaction#run} when using {@link ExecuteSqlRequest}. + * + * @returns {Promise|void} Returns a promise when no callback is provided, + * or void when a callback is used. * * @example include:samples/batch.js * region_tag:spanner_batch_execute_partitions */ - execute(partition, callback) { - if (is.string(partition.table)) { - this.read(partition.table, partition, callback); + execute( + partition: ReadRequest | ExecuteSqlRequest, + ): Promise; + execute( + partition: ReadRequest | ExecuteSqlRequest, + callback: ReadCallback | RunCallback, + ): void; + execute( + partition: ReadRequest | ExecuteSqlRequest, + cb?: ReadCallback | RunCallback, + ): void | Promise { + const isRead = typeof (partition as ReadRequest).table === 'string'; + + if (isRead) { + this.read( + (partition as ReadRequest).table!, + partition as ReadRequest, + cb as ReadCallback, + ); return; } - this.run(partition, callback); + + this.run(partition as ExecuteSqlRequest, cb as RunCallback); } /** * Executes partition in streaming mode. diff --git a/src/codec.ts b/src/codec.ts index 806c8a863..7ef850c3d 100644 --- a/src/codec.ts +++ b/src/codec.ts @@ -15,7 +15,7 @@ */ import {GrpcService} from './common-grpc/service'; import {PreciseDate} from '@google-cloud/precise-date'; -import arrify = require('arrify'); +import {toArray} from './helper'; import {Big} from 'big.js'; import * as is from 'is'; import {common as p} from 'protobufjs'; @@ -137,7 +137,7 @@ export class SpannerDate extends Date { return `${year.padStart(4, '0')}-${month.padStart(2, '0')}-${date.padStart( 2, - '0' + '0', )}`; } } @@ -297,7 +297,7 @@ export class ProtoMessage { this.value = protoMessageParams.value; } else if (protoMessageParams.messageFunction) { this.value = protoMessageParams.messageFunction['encode']( - protoMessageParams.value + protoMessageParams.value, ).finish(); } else { throw new GoogleError(`protoMessageParams cannot be used to construct @@ -310,7 +310,7 @@ export class ProtoMessage { toJSON(): string { if (this.messageFunction) { return this.messageFunction['toObject']( - this.messageFunction['decode'](this.value) + this.messageFunction['decode'](this.value), ); } return this.value.toString(); @@ -442,19 +442,19 @@ export class Interval { constructor(months: number, days: number, nanoseconds: bigint) { if (!is.integer(months)) { throw new GoogleError( - `Invalid months: ${months}, months should be an integral value` + `Invalid months: ${months}, months should be an integral value`, ); } if (!is.integer(days)) { throw new GoogleError( - `Invalid days: ${days}, days should be an integral value` + `Invalid days: ${days}, days should be an integral value`, ); } if (is.null(nanoseconds) || is.undefined(nanoseconds)) { throw new GoogleError( - `Invalid nanoseconds: ${nanoseconds}, nanoseconds should be a valid bigint value` + `Invalid nanoseconds: ${nanoseconds}, nanoseconds should be a valid bigint value`, ); } @@ -504,13 +504,13 @@ export class Interval { static fromSeconds(seconds: number): Interval { if (!is.integer(seconds)) { throw new GoogleError( - `Invalid seconds: ${seconds}, seconds should be an integral value` + `Invalid seconds: ${seconds}, seconds should be an integral value`, ); } return new Interval( 0, 0, - BigInt(Interval.NANOSECONDS_PER_SECOND) * BigInt(seconds) + BigInt(Interval.NANOSECONDS_PER_SECOND) * BigInt(seconds), ); } @@ -520,13 +520,13 @@ export class Interval { static fromMilliseconds(milliseconds: number): Interval { if (!is.integer(milliseconds)) { throw new GoogleError( - `Invalid milliseconds: ${milliseconds}, milliseconds should be an integral value` + `Invalid milliseconds: ${milliseconds}, milliseconds should be an integral value`, ); } return new Interval( 0, 0, - BigInt(Interval.NANOSECONDS_PER_MILLISECOND) * BigInt(milliseconds) + BigInt(Interval.NANOSECONDS_PER_MILLISECOND) * BigInt(milliseconds), ); } @@ -536,13 +536,13 @@ export class Interval { static fromMicroseconds(microseconds: number): Interval { if (!is.integer(microseconds)) { throw new GoogleError( - `Invalid microseconds: ${microseconds}, microseconds should be an integral value` + `Invalid microseconds: ${microseconds}, microseconds should be an integral value`, ); } return new Interval( 0, 0, - BigInt(Interval.NANOSECONDS_PER_MICROSECOND) * BigInt(microseconds) + BigInt(Interval.NANOSECONDS_PER_MICROSECOND) * BigInt(microseconds), ); } @@ -572,7 +572,7 @@ export class Interval { const hours: number = parseInt(getNullOrDefault(5).replace('H', '')); const minutes: number = parseInt(getNullOrDefault(6).replace('M', '')); const seconds: Big = Big( - getNullOrDefault(7).replace('S', '').replace(',', '.') + getNullOrDefault(7).replace('S', '').replace(',', '.'), ); const totalMonths: number = Big(years) @@ -581,22 +581,22 @@ export class Interval { .toNumber(); if (!Number.isSafeInteger(totalMonths)) { throw new GoogleError( - 'Total months is outside of the range of safe integer' + 'Total months is outside of the range of safe integer', ); } const totalNanoseconds = BigInt( seconds .add( - Big((BigInt(hours) * BigInt(Interval.SECONDS_PER_HOUR)).toString()) + Big((BigInt(hours) * BigInt(Interval.SECONDS_PER_HOUR)).toString()), ) .add( Big( - (BigInt(minutes) * BigInt(Interval.SECONDS_PER_MINUTE)).toString() - ) + (BigInt(minutes) * BigInt(Interval.SECONDS_PER_MINUTE)).toString(), + ), ) .mul(Big(this.NANOSECONDS_PER_SECOND)) - .toString() + .toString(), ); return new Interval(totalMonths, days, totalNanoseconds); @@ -614,7 +614,7 @@ export class Interval { let result = 'P'; if (this.months !== 0) { const years_part: number = Math.trunc( - this.months / Interval.MONTHS_PER_YEAR + this.months / Interval.MONTHS_PER_YEAR, ); const months_part: number = this.months - years_part * Interval.MONTHS_PER_YEAR; @@ -815,7 +815,7 @@ function convertValueToJson(value: Value, options: JSONOptions): Value { function decode( value: Value, type: spannerClient.spanner.v1.Type, - columnMetadata?: object + columnMetadata?: object, ): Value { if (is.null(value)) { return null; @@ -908,7 +908,7 @@ function decode( return decode( value, type.arrayElementType! as spannerClient.spanner.v1.Type, - columnMetadata + columnMetadata, ); }); break; @@ -918,7 +918,7 @@ function decode( const value = decode( (!Array.isArray(decoded) && decoded[name!]) || decoded[index], type as spannerClient.spanner.v1.Type, - columnMetadata + columnMetadata, ); return {name, value}; }); @@ -1203,7 +1203,7 @@ function getType(value: Value): Type { * @returns {object} */ function convertToListValue(value: T): p.IListValue { - const values = (arrify(value) as T[]).map(codec.encode); + const values = (toArray(value) as T[]).map(codec.encode); return {values}; } @@ -1216,7 +1216,7 @@ function convertToListValue(value: T): p.IListValue { * @returns {object} */ function convertMsToProtoTimestamp( - ms: number + ms: number, ): spannerClient.protobuf.ITimestamp { const rawSeconds = ms / 1000; const seconds = Math.floor(rawSeconds); @@ -1250,7 +1250,7 @@ function convertProtoTimestampToDate({ * @return {object} */ function createTypeObject( - friendlyType?: string | Type + friendlyType?: string | Type, ): spannerClient.spanner.v1.Type { if (!friendlyType) { friendlyType = 'unspecified'; @@ -1277,7 +1277,7 @@ function createTypeObject( if (code === 'STRUCT') { type.structType = { - fields: arrify(config.fields!).map(field => { + fields: toArray(config.fields!).map(field => { return {name: field.name, type: codec.createTypeObject(field)}; }), }; diff --git a/src/common-grpc/service-object.ts b/src/common-grpc/service-object.ts index 7addf20dd..d071bb010 100644 --- a/src/common-grpc/service-object.ts +++ b/src/common-grpc/service-object.ts @@ -32,7 +32,7 @@ import * as extend from 'extend'; import {CoreOptions, RequestCallback, Response} from 'teeny-request'; export class GrpcServiceObject extends ServiceObject { - parent!: GrpcServiceObject; + declare parent: GrpcServiceObject; /** * GrpcServiceObject is a base class, meant to be inherited from by a service @@ -47,6 +47,7 @@ export class GrpcServiceObject extends ServiceObject { */ constructor(config: ServiceObjectConfig) { super(config); + this.parent = config.parent as GrpcServiceObject; } /** @@ -98,7 +99,7 @@ export class GrpcServiceObject extends ServiceObject { setMetadata(metadata: Metadata, callback: ResponseCallback): void; setMetadata( metadata: Metadata, - callback?: ResponseCallback + callback?: ResponseCallback, ): void | Promise { // eslint-disable-next-line @typescript-eslint/no-explicit-any const protoOpts = (this.methods.setMetadata as any).protoOpts; @@ -106,7 +107,7 @@ export class GrpcServiceObject extends ServiceObject { true, {}, this.getOpts(this.methods.setMetadata), - metadata + metadata, ); this.request(protoOpts, reqOpts, callback || util.noop); } diff --git a/src/common-grpc/service.ts b/src/common-grpc/service.ts index 08f5642d7..ca4bda373 100644 --- a/src/common-grpc/service.ts +++ b/src/common-grpc/service.ts @@ -45,7 +45,7 @@ import {Duplex, PassThrough} from 'stream'; const gaxProtoPath = path.join( path.dirname(require.resolve('google-gax')), '..', - 'protos' + 'protos', ); export interface ServiceRequestCallback { @@ -320,7 +320,7 @@ export class ObjectToStructConverter { [ 'This object contains a circular reference. To automatically', 'remove it, set the `removeCircular` option to true.', - ].join(' ') + ].join(' '), ); } convertedValue = { @@ -407,7 +407,7 @@ export class GrpcService extends Service { 'gl-node/' + process.versions.node, 'gccl/' + config.packageJson.version, this.grpcVersion, - ].join(' ') + ].join(' '), ); if (config.grpcMetadata) { for (const prop in config.grpcMetadata) { @@ -459,21 +459,21 @@ export class GrpcService extends Service { request(reqOpts: DecorateRequestOptions): Promise; request( reqOpts: DecorateRequestOptions, - callback: BodyResponseCallback + callback: BodyResponseCallback, ): void; request( reqOpts: DecorateRequestOptions, - callback?: BodyResponseCallback + callback?: BodyResponseCallback, ): void | Promise; request( protoOpts: ProtoOpts, reqOpts: DecorateRequestOptions, - callback: ServiceRequestCallback + callback: ServiceRequestCallback, ): Abortable | void; request( pOpts: ProtoOpts | DecorateRequestOptions, rOpts?: DecorateRequestOptions | BodyResponseCallback, - callback?: ServiceRequestCallback + callback?: ServiceRequestCallback, ): Abortable | void | Promise { /** * The function signature above is a little funky. This is due to the way @@ -557,11 +557,11 @@ export class GrpcService extends Service { } onResponse(null, resp); - } + }, ); }, }, - protoOpts.retryOpts + protoOpts.retryOpts, ); return retryRequest(null!, retryOpts, (err, resp: object) => { @@ -587,7 +587,7 @@ export class GrpcService extends Service { requestStream(protoOpts: ProtoOpts, reqOpts: DecorateRequestOptions): Duplex; requestStream( pOpts: ProtoOpts | DecorateRequestOptions, - rOpts?: DecorateRequestOptions + rOpts?: DecorateRequestOptions, ): Duplex | Request { /** * The function signature above is a little funky. This is due to the way @@ -654,7 +654,7 @@ export class GrpcService extends Service { const ee: EventEmitter = service[protoOpts.method]( reqOpts, grpcMetadata, - grpcOpts + grpcOpts, ).on('metadata', () => { // retry-request requires a server response before it // starts emitting data. The closest mechanism grpc @@ -669,7 +669,7 @@ export class GrpcService extends Service { return ee; }, }, - protoOpts.retryOpts + protoOpts.retryOpts, ); // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -739,7 +739,7 @@ export class GrpcService extends Service { const grpcStream = service[protoOpts.method]( reqOpts, grpcMetadata, - grpcOpts + grpcOpts, ) .on('status', status => { const grcpStatus = GrpcService.decorateStatus_(status); @@ -999,7 +999,19 @@ export class GrpcService extends Service { this.authClient.getClient().then(client => { const credentials = this.grpc!.credentials.combineChannelCredentials( this.grpc!.credentials.createSsl(), - this.grpc!.credentials.createFromGoogleCredential(client) + grpc.credentials.createFromGoogleCredential({ + // the `grpc` package does not support the `Headers` object yet + getRequestHeaders: async (url?: string | URL) => { + const headers = await client.getRequestHeaders(url); + const genericHeadersObject: Record = {}; + + headers.forEach( + (value, key) => (genericHeadersObject[key] = value), + ); + + return genericHeadersObject; + }, + }), ); if (!this.projectId || this.projectId === '{{projectId}}') { this.projectId = client.projectId!; @@ -1020,7 +1032,7 @@ export class GrpcService extends Service { */ private loadProtoFile( protoPath: string, - config: GrpcServiceConfig + config: GrpcServiceConfig, ): PackageDefinition { const protoObjectCacheKey = [config.protosDir, protoPath].join('$'); @@ -1060,8 +1072,8 @@ export class GrpcService extends Service { { 'grpc.primary_user_agent': this.userAgent, }, - GRPC_SERVICE_OPTIONS - ) + GRPC_SERVICE_OPTIONS, + ), ); this.activeServiceMap_.set(protoOpts.service, service); diff --git a/src/common.ts b/src/common.ts index 6eaa85984..9688f9810 100644 --- a/src/common.ts +++ b/src/common.ts @@ -29,7 +29,7 @@ export interface ResourceCallback { ( err: grpc.ServiceError | null, resource?: Resource | null, - response?: Response + response?: Response, ): void; } export type PagedResponse = [Item[], {} | null, Response]; @@ -47,7 +47,7 @@ export interface PagedCallback { err: grpc.ServiceError | null, results?: Item[] | null, nextQuery?: {} | null, - response?: Response | null + response?: Response | null, ): void; } @@ -56,7 +56,7 @@ export interface LongRunningCallback { err: grpc.ServiceError | null, resource?: Resource | null, operation?: GaxOperation | null, - apiResponse?: IOperation + apiResponse?: IOperation, ): void; } @@ -100,7 +100,7 @@ export function addLeaderAwareRoutingHeader(headers: {[k: string]: string}) { */ export function getCommonHeaders( resourceName: string, - enableTracing?: boolean + enableTracing?: boolean, ) { const headers: {[k: string]: string} = {}; diff --git a/src/database.ts b/src/database.ts index 94554e1d4..9d3e1e745 100644 --- a/src/database.ts +++ b/src/database.ts @@ -97,7 +97,7 @@ import { import {finished, Duplex, Readable, Transform} from 'stream'; import {PreciseDate} from '@google-cloud/precise-date'; import {EnumKey, RequestConfig, TranslateEnumKeys, Spanner} from '.'; -import arrify = require('arrify'); +import {toArray} from './helper'; import {ServiceError} from 'google-gax'; import IPolicy = google.iam.v1.IPolicy; import Policy = google.iam.v1.Policy; @@ -151,7 +151,7 @@ type GetTransactionCallback = NormalCallback; export interface SessionPoolConstructor { new ( database: Database, - options?: SessionPoolOptions | null + options?: SessionPoolOptions | null, ): SessionPoolInterface; } @@ -313,7 +313,7 @@ export type GetStateCallback = NormalCallback< interface DatabaseRequest { ( config: RequestConfig, - callback: ResourceCallback + callback: ResourceCallback, ): void; (config: RequestConfig, callback: RequestCallback): void; (config: RequestConfig, callback: RequestCallback): void; @@ -368,7 +368,7 @@ class Database extends common.GrpcServiceObject { instance: Instance, name: string, poolOptions?: SessionPoolConstructor | SessionPoolOptions, - queryOptions?: spannerClient.spanner.v1.ExecuteSqlRequest.IQueryOptions + queryOptions?: spannerClient.spanner.v1.ExecuteSqlRequest.IQueryOptions, ) { const methods = { /** @@ -425,7 +425,7 @@ class Database extends common.GrpcServiceObject { createMethod: ( _: {}, options: CreateDatabaseOptions, - callback: CreateDatabaseCallback + callback: CreateDatabaseCallback, ) => { const pool = this.pool_ as SessionPool; if (pool._pending > 0) { @@ -438,7 +438,7 @@ class Database extends common.GrpcServiceObject { let timeout; const promises = [ new Promise( - resolve => (timeout = setTimeout(resolve, 10000)) + resolve => (timeout = setTimeout(resolve, 10000)), ), new Promise(resolve => { pool @@ -456,9 +456,11 @@ class Database extends common.GrpcServiceObject { }); }), ]; - Promise.race(promises).then(() => - instance.createDatabase(formattedName_, options, callback) - ); + Promise.race(promises) + .then(() => + instance.createDatabase(formattedName_, options, callback), + ) + .catch(() => {}); } else { return instance.createDatabase(formattedName_, options, callback); } @@ -487,7 +489,7 @@ class Database extends common.GrpcServiceObject { this._observabilityOptions = instance._observabilityOptions; this.commonHeaders_ = getCommonHeaders( this.formattedName_, - this._observabilityOptions?.enableEndToEndTracing + this._observabilityOptions?.enableEndToEndTracing, ); // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -501,7 +503,7 @@ class Database extends common.GrpcServiceObject { } this.queryOptions_ = Object.assign( Object.assign({}, queryOptions), - Database.getEnvironmentQueryOptions() + Database.getEnvironmentQueryOptions(), ); } @@ -571,18 +573,18 @@ class Database extends common.GrpcServiceObject { */ setMetadata( metadata: IDatabase, - gaxOptions?: CallOptions + gaxOptions?: CallOptions, ): Promise; setMetadata(metadata: IDatabase, callback: SetDatabaseMetadataCallback): void; setMetadata( metadata: IDatabase, gaxOptions: CallOptions, - callback: SetDatabaseMetadataCallback + callback: SetDatabaseMetadataCallback, ): void; setMetadata( metadata: IDatabase, optionsOrCallback?: CallOptions | SetDatabaseMetadataCallback, - cb?: SetDatabaseMetadataCallback + cb?: SetDatabaseMetadataCallback, ): void | Promise { const gaxOpts = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -594,7 +596,7 @@ class Database extends common.GrpcServiceObject { { name: this.formattedName_, }, - metadata + metadata, ), updateMask: { paths: Object.keys(metadata).map(snakeCase), @@ -608,7 +610,7 @@ class Database extends common.GrpcServiceObject { gaxOpts, headers: this.commonHeaders_, }, - callback! + callback!, ); } @@ -688,15 +690,15 @@ class Database extends common.GrpcServiceObject { * ``` */ batchCreateSessions( - options: number | BatchCreateSessionsOptions + options: number | BatchCreateSessionsOptions, ): Promise; batchCreateSessions( options: number | BatchCreateSessionsOptions, - callback: BatchCreateSessionsCallback + callback: BatchCreateSessionsCallback, ): void; batchCreateSessions( options: number | BatchCreateSessionsOptions, - callback?: BatchCreateSessionsCallback + callback?: BatchCreateSessionsCallback, ): void | Promise { if (typeof options === 'number') { options = {count: options}; @@ -720,7 +722,7 @@ class Database extends common.GrpcServiceObject { const allHeaders = this._metadataWithRequestId( this._nextNthRequest(), 1, - headers + headers, ); startTrace('Database.batchCreateSessions', this._traceConfig, span => { @@ -749,7 +751,7 @@ class Database extends common.GrpcServiceObject { span.end(); callback!(null, sessions, resp!); - } + }, ); }); } @@ -757,7 +759,7 @@ class Database extends common.GrpcServiceObject { public _metadataWithRequestId( nthRequest: number, attempt: number, - priorMetadata?: {[k: string]: string} + priorMetadata?: {[k: string]: string}, ): {[k: string]: string} { if (!priorMetadata) { priorMetadata = {}; @@ -769,7 +771,7 @@ class Database extends common.GrpcServiceObject { this._clientId || 1, 1, // TODO: Properly infer the channelId nthRequest, - attempt + attempt, ); return withReqId; } @@ -800,7 +802,7 @@ class Database extends common.GrpcServiceObject { */ batchTransaction( identifier: TransactionIdentifier, - options?: TimestampBounds + options?: TimestampBounds, ): BatchTransaction { const session = typeof identifier.session === 'string' @@ -853,7 +855,7 @@ class Database extends common.GrpcServiceObject { close(callback: SessionPoolCloseCallback): void; close(): Promise; close( - callback?: SessionPoolCloseCallback + callback?: SessionPoolCloseCallback, ): void | Promise { const key = this.id!.split('/').pop(); // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -879,16 +881,16 @@ class Database extends common.GrpcServiceObject { * @returns {Promise} */ createBatchTransaction( - options?: TimestampBounds + options?: TimestampBounds, ): Promise; createBatchTransaction(callback: CreateBatchTransactionCallback): void; createBatchTransaction( options: TimestampBounds, - callback: CreateBatchTransactionCallback + callback: CreateBatchTransactionCallback, ): void; createBatchTransaction( optionsOrCallback?: TimestampBounds | CreateBatchTransactionCallback, - cb?: CreateBatchTransactionCallback + cb?: CreateBatchTransactionCallback, ): void | Promise { const callback = typeof optionsOrCallback === 'function' @@ -911,7 +913,7 @@ class Database extends common.GrpcServiceObject { } const transaction = this.batchTransaction( {session: session!}, - options + options, ); this._releaseOnEnd(session!, transaction, span); transaction.begin((err, resp) => { @@ -931,7 +933,7 @@ class Database extends common.GrpcServiceObject { callback!(null, transaction, resp!); }); }); - } + }, ); } /** @@ -1011,11 +1013,11 @@ class Database extends common.GrpcServiceObject { createSession(callback: CreateSessionCallback): void; createSession( options: CreateSessionOptions, - callback: CreateSessionCallback + callback: CreateSessionCallback, ): void; createSession( optionsOrCallback: CreateSessionOptions | CreateSessionCallback, - cb?: CreateSessionCallback + cb?: CreateSessionCallback, ): void | Promise { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb!; @@ -1042,7 +1044,7 @@ class Database extends common.GrpcServiceObject { const headers = this._metadataWithRequestId( this._nextNthRequest(), 1, - this.commonHeaders_ + this.commonHeaders_, ); if (this._getSpanner().routeToLeaderEnabled) { addLeaderAwareRoutingHeader(headers); @@ -1069,7 +1071,7 @@ class Database extends common.GrpcServiceObject { session._observabilityOptions = this._traceConfig!.opts; span.end(); callback(null, session, resp!); - } + }, ); }); } @@ -1147,18 +1149,18 @@ class Database extends common.GrpcServiceObject { */ createTable( schema: Schema, - gaxOptions?: CallOptions + gaxOptions?: CallOptions, ): Promise; createTable(schema: Schema, callback: CreateTableCallback): void; createTable( schema: Schema, gaxOptions: CallOptions, - callback: CreateTableCallback + callback: CreateTableCallback, ): void; createTable( schema: Schema, gaxOptionsOrCallback?: CallOptions | CreateTableCallback, - cb?: CreateTableCallback + cb?: CreateTableCallback, ): void | Promise { const gaxOptions = typeof gaxOptionsOrCallback === 'object' ? gaxOptionsOrCallback : {}; @@ -1171,7 +1173,7 @@ class Database extends common.GrpcServiceObject { return; } const tableName = (schema as string).match( - /CREATE TABLE `*([^\s`(]+)/ + /CREATE TABLE `*([^\s`(]+)/, )![1]; const table = this.table(tableName!); table._observabilityOptions = this._traceConfig!.opts; @@ -1252,7 +1254,7 @@ class Database extends common.GrpcServiceObject { delete(gaxOptions: CallOptions, callback: DatabaseDeleteCallback): void; delete( optionsOrCallback?: CallOptions | DatabaseDeleteCallback, - cb?: DatabaseDeleteCallback + cb?: DatabaseDeleteCallback, ): void | Promise { const gaxOpts = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1272,7 +1274,7 @@ class Database extends common.GrpcServiceObject { gaxOpts, headers: this.commonHeaders_, }, - callback! + callback!, ); }); } @@ -1318,7 +1320,7 @@ class Database extends common.GrpcServiceObject { exists(gaxOptions: CallOptions, callback: ExistsCallback): void; exists( gaxOptionsOrCallback?: CallOptions | ExistsCallback, - cb?: ExistsCallback + cb?: ExistsCallback, ): void | Promise<[boolean]> { const gaxOptions = typeof gaxOptionsOrCallback === 'object' ? gaxOptionsOrCallback : {}; @@ -1390,7 +1392,7 @@ class Database extends common.GrpcServiceObject { get(options: GetDatabaseConfig, callback: DatabaseCallback): void; get( optionsOrCallback?: GetDatabaseConfig | DatabaseCallback, - cb?: DatabaseCallback + cb?: DatabaseCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' @@ -1414,7 +1416,7 @@ class Database extends common.GrpcServiceObject { this.metadata = metadata; callback!(null, this, metadata as r.Response); }); - } + }, ); return; } @@ -1477,11 +1479,11 @@ class Database extends common.GrpcServiceObject { getMetadata(callback: GetDatabaseMetadataCallback): void; getMetadata( gaxOptions: CallOptions, - callback: GetDatabaseMetadataCallback + callback: GetDatabaseMetadataCallback, ): void; getMetadata( gaxOptionsOrCallback?: CallOptions | GetDatabaseMetadataCallback, - cb?: GetDatabaseMetadataCallback + cb?: GetDatabaseMetadataCallback, ): void | Promise { const callback = typeof gaxOptionsOrCallback === 'function' @@ -1509,7 +1511,7 @@ class Database extends common.GrpcServiceObject { this.metadata = resp; } callback!(err, resp); - } + }, ); } @@ -1550,12 +1552,12 @@ class Database extends common.GrpcServiceObject { * ``` */ getRestoreInfo( - options?: CallOptions + options?: CallOptions, ): Promise; getRestoreInfo(callback: GetRestoreInfoCallback): void; getRestoreInfo(options: CallOptions, callback: GetRestoreInfoCallback): void; async getRestoreInfo( - optionsOrCallback?: CallOptions | GetRestoreInfoCallback + optionsOrCallback?: CallOptions | GetRestoreInfoCallback, ): Promise { const gaxOptions = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1598,7 +1600,7 @@ class Database extends common.GrpcServiceObject { * ``` */ getState( - options?: CallOptions + options?: CallOptions, ): Promise< | EnumKey | undefined @@ -1606,7 +1608,7 @@ class Database extends common.GrpcServiceObject { getState(callback: GetStateCallback): void; getState(options: CallOptions, callback: GetStateCallback): void; async getState( - optionsOrCallback?: CallOptions | GetStateCallback + optionsOrCallback?: CallOptions | GetStateCallback, ): Promise< | EnumKey | undefined @@ -1642,7 +1644,7 @@ class Database extends common.GrpcServiceObject { */ getDatabaseDialect( - options?: CallOptions + options?: CallOptions, ): Promise< | EnumKey | undefined @@ -1650,11 +1652,11 @@ class Database extends common.GrpcServiceObject { getDatabaseDialect(callback: GetDatabaseDialectCallback): void; getDatabaseDialect( options: CallOptions, - callback: GetDatabaseDialectCallback + callback: GetDatabaseDialectCallback, ): void; async getDatabaseDialect( optionsOrCallback?: CallOptions | GetDatabaseDialectCallback, - callback?: GetDatabaseDialectCallback + callback?: GetDatabaseDialectCallback, ): Promise< | EnumKey | undefined @@ -1739,7 +1741,7 @@ class Database extends common.GrpcServiceObject { getSchema(options: CallOptions, callback: GetSchemaCallback): void; getSchema( optionsOrCallback?: CallOptions | GetSchemaCallback, - cb?: GetSchemaCallback + cb?: GetSchemaCallback, ): void | Promise { const gaxOpts = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1761,7 +1763,7 @@ class Database extends common.GrpcServiceObject { // eslint-disable-next-line @typescript-eslint/no-explicit-any (err, statements, ...args: any[]) => { callback!(err, statements ? statements.statements : null, ...args); - } + }, ); } @@ -1813,11 +1815,11 @@ class Database extends common.GrpcServiceObject { getIamPolicy(callback: GetIamPolicyCallback): void; getIamPolicy( options: GetIamPolicyOptions, - callback: GetIamPolicyCallback + callback: GetIamPolicyCallback, ): void; getIamPolicy( optionsOrCallback?: GetIamPolicyOptions | GetIamPolicyCallback, - cb?: GetIamPolicyCallback + cb?: GetIamPolicyCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1840,7 +1842,7 @@ class Database extends common.GrpcServiceObject { }, (err, resp) => { callback!(err, resp); - } + }, ); } @@ -1934,7 +1936,7 @@ class Database extends common.GrpcServiceObject { getSessions(options: GetSessionsOptions, callback: GetSessionsCallback): void; getSessions( optionsOrCallback?: GetSessionsOptions | GetSessionsCallback, - cb?: GetSessionsCallback + cb?: GetSessionsCallback, ): void | Promise { // eslint-disable-next-line @typescript-eslint/no-this-alias const self = this; @@ -1959,7 +1961,7 @@ class Database extends common.GrpcServiceObject { pageSize: (gaxOpts as GetSessionsOptions).pageSize, pageToken: (gaxOpts as GetSessionsOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetSessionsOptions).pageSize; delete (gaxOpts as GetSessionsOptions).pageToken; @@ -1968,7 +1970,7 @@ class Database extends common.GrpcServiceObject { const headers = this._metadataWithRequestId( this._nextNthRequest(), 1, - this.commonHeaders_ + this.commonHeaders_, ); return startTrace('Database.getSessions', this._traceConfig, span => { @@ -2001,7 +2003,7 @@ class Database extends common.GrpcServiceObject { ? extend({}, options, nextPageRequest!) : null; callback!(err, sessionInstances!, nextQuery, ...args); - } + }, ); }); } @@ -2063,7 +2065,7 @@ class Database extends common.GrpcServiceObject { pageSize: (gaxOpts as GetSessionsOptions).pageSize, pageToken: (gaxOpts as GetSessionsOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetSessionsOptions).pageSize; delete (gaxOpts as GetSessionsOptions).pageToken; @@ -2142,7 +2144,7 @@ class Database extends common.GrpcServiceObject { getSnapshot(options: TimestampBounds, callback: GetSnapshotCallback): void; getSnapshot( optionsOrCallback?: TimestampBounds | GetSnapshotCallback, - cb?: GetSnapshotCallback + cb?: GetSnapshotCallback, ): void | Promise<[Snapshot]> { const callback = typeof optionsOrCallback === 'function' @@ -2163,11 +2165,11 @@ class Database extends common.GrpcServiceObject { ) { const error = Object.assign( new Error( - 'maxStaleness / minReadTimestamp is not supported for multi-use read-only transactions.' + 'maxStaleness / minReadTimestamp is not supported for multi-use read-only transactions.', ), { code: 3, // invalid argument - } + }, ) as ServiceError; callback!(error); return; @@ -2258,12 +2260,12 @@ class Database extends common.GrpcServiceObject { * ``` */ getTransaction( - optionsOrCallback?: GetTransactionOptions + optionsOrCallback?: GetTransactionOptions, ): Promise<[Transaction]>; getTransaction(callback: GetTransactionCallback): void; getTransaction( optionsOrCallback?: GetTransactionOptions | GetTransactionCallback, - callback?: GetTransactionCallback + callback?: GetTransactionCallback, ): void | Promise<[Transaction]> { const cb = typeof optionsOrCallback === 'function' @@ -2286,11 +2288,11 @@ class Database extends common.GrpcServiceObject { if (options.requestOptions) { transaction!.requestOptions = Object.assign( transaction!.requestOptions || {}, - options.requestOptions + options.requestOptions, ); } transaction?.setReadWriteTransactionOptions( - options as RunTransactionOptions + options as RunTransactionOptions, ); span.addEvent('Using Session', {'session.id': session?.id}); transaction!._observabilityOptions = this._observabilityOptions; @@ -2301,7 +2303,7 @@ class Database extends common.GrpcServiceObject { span.end(); cb!(err as grpc.ServiceError | null, transaction); }); - } + }, ); } @@ -2363,17 +2365,17 @@ class Database extends common.GrpcServiceObject { * ``` */ getOperations( - options?: GetDatabaseOperationsOptions + options?: GetDatabaseOperationsOptions, ): Promise; getOperations(callback: GetDatabaseOperationsCallback): void; getOperations( options: GetDatabaseOperationsOptions, - callback: GetDatabaseOperationsCallback + callback: GetDatabaseOperationsCallback, ): void; async getOperations( optionsOrCallback?: | GetDatabaseOperationsOptions - | GetDatabaseOperationsCallback + | GetDatabaseOperationsCallback, ): Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -2454,11 +2456,11 @@ class Database extends common.GrpcServiceObject { getDatabaseRoles(callback: GetDatabaseRolesCallback): void; getDatabaseRoles( gaxOptions: CallOptions, - callback: GetDatabaseRolesCallback + callback: GetDatabaseRolesCallback, ): void; getDatabaseRoles( optionsOrCallback?: CallOptions | GetDatabaseRolesCallback, - cb?: GetDatabaseRolesCallback + cb?: GetDatabaseRolesCallback, ): void | Promise { const gaxOpts = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -2477,7 +2479,7 @@ class Database extends common.GrpcServiceObject { pageSize: (gaxOpts as GetDatabaseRolesOptions).pageSize, pageToken: (gaxOpts as GetDatabaseRolesOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetDatabaseRolesOptions).pageSize; delete (gaxOpts as GetDatabaseRolesOptions).pageToken; @@ -2500,7 +2502,7 @@ class Database extends common.GrpcServiceObject { : null; callback!(err, roles, nextQuery, ...args); - } + }, ); } @@ -2515,11 +2517,11 @@ class Database extends common.GrpcServiceObject { makePooledRequest_(config: RequestConfig): Promise; makePooledRequest_( config: RequestConfig, - callback: PoolRequestCallback + callback: PoolRequestCallback, ): void; makePooledRequest_( config: RequestConfig, - callback?: PoolRequestCallback + callback?: PoolRequestCallback, ): void | Promise { const pool = this.pool_; pool.getSession((err, session) => { @@ -2658,18 +2660,18 @@ class Database extends common.GrpcServiceObject { restore(backupPath: string): Promise; restore( backupPath: string, - options?: RestoreOptions | CallOptions + options?: RestoreOptions | CallOptions, ): Promise; restore(backupPath: string, callback: RestoreDatabaseCallback): void; restore( backupPath: string, options: RestoreOptions | CallOptions, - callback: RestoreDatabaseCallback + callback: RestoreDatabaseCallback, ): void; restore( backupName: string, optionsOrCallback?: RestoreOptions | CallOptions | RestoreDatabaseCallback, - cb?: RestoreDatabaseCallback + cb?: RestoreDatabaseCallback, ): Promise | void { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -2710,7 +2712,7 @@ class Database extends common.GrpcServiceObject { return; } callback!(null, this, operation, resp); - } + }, ); } @@ -2870,18 +2872,18 @@ class Database extends common.GrpcServiceObject { run(query: string | ExecuteSqlRequest): Promise; run( query: string | ExecuteSqlRequest, - options?: TimestampBounds + options?: TimestampBounds, ): Promise; run(query: string | ExecuteSqlRequest, callback: RunCallback): void; run( query: string | ExecuteSqlRequest, options: TimestampBounds, - callback: RunCallback + callback: RunCallback, ): void; run( query: string | ExecuteSqlRequest, optionsOrCallback?: TimestampBounds | RunCallback, - cb?: RunCallback + cb?: RunCallback, ): void | Promise { let stats: ResultSetStats; let metadata: ResultSetMetadata; @@ -2921,7 +2923,7 @@ class Database extends common.GrpcServiceObject { span.end(); callback!(null, rows, stats, metadata); }); - } + }, ); } /** @@ -2939,15 +2941,15 @@ class Database extends common.GrpcServiceObject { * @returns {Promise} */ runPartitionedUpdate( - query: string | RunPartitionedUpdateOptions + query: string | RunPartitionedUpdateOptions, ): Promise<[number]>; runPartitionedUpdate( query: string | RunPartitionedUpdateOptions, - callback?: RunUpdateCallback + callback?: RunUpdateCallback, ): void; runPartitionedUpdate( query: string | RunPartitionedUpdateOptions, - callback?: RunUpdateCallback + callback?: RunUpdateCallback, ): void | Promise<[number]> { return startTrace( 'Database.runPartitionedUpdate', @@ -2966,7 +2968,7 @@ class Database extends common.GrpcServiceObject { return; } - this._runPartitionedUpdate(session!, query, (err, count) => { + void this._runPartitionedUpdate(session!, query, (err, count) => { if (err) { setSpanError(span, err); } @@ -2974,14 +2976,14 @@ class Database extends common.GrpcServiceObject { callback!(err, count); }); }); - } + }, ); } _runPartitionedUpdate( session: Session, query: string | RunPartitionedUpdateOptions, - callback?: RunUpdateCallback + callback?: RunUpdateCallback, ): void | Promise { const transaction = session.partitionedDml(); @@ -2995,14 +2997,14 @@ class Database extends common.GrpcServiceObject { return; } - transaction.runUpdate(query, (err, updateCount) => { + transaction.runUpdate(query, async (err, updateCount) => { if (err) { if (err.code !== grpc.status.ABORTED) { this.sessionFactory_.release(session!); callback!(err, 0); return; } - this._runPartitionedUpdate(session, query, callback); + void this._runPartitionedUpdate(session, query, callback); } else { this.sessionFactory_.release(session!); callback!(null, updateCount); @@ -3139,7 +3141,7 @@ class Database extends common.GrpcServiceObject { */ runStream( query: string | ExecuteSqlRequest, - options?: TimestampBounds + options?: TimestampBounds, ): PartialResultStream { const proxyStream: Transform = through.obj(); return startTrace( @@ -3217,7 +3219,7 @@ class Database extends common.GrpcServiceObject { }); return proxyStream as PartialResultStream; - } + }, ); } @@ -3313,11 +3315,11 @@ class Database extends common.GrpcServiceObject { runTransaction(runFn: RunTransactionCallback): void; runTransaction( options: RunTransactionOptions, - runFn: RunTransactionCallback + runFn: RunTransactionCallback, ): void; runTransaction( optionsOrRunFn: RunTransactionOptions | RunTransactionCallback, - fn?: RunTransactionCallback + fn?: RunTransactionCallback, ): void { const runFn = typeof optionsOrRunFn === 'function' @@ -3359,11 +3361,11 @@ class Database extends common.GrpcServiceObject { transaction!.requestOptions = Object.assign( transaction!.requestOptions || {}, - options.requestOptions + options.requestOptions, ); transaction!.setReadWriteTransactionOptions( - options as RunTransactionOptions + options as RunTransactionOptions, ); const release = () => { @@ -3375,7 +3377,7 @@ class Database extends common.GrpcServiceObject { session!, transaction!, runFn!, - options + options, ); runner.run().then(release, err => { @@ -3393,16 +3395,16 @@ class Database extends common.GrpcServiceObject { } }); }); - } + }, ); } runTransactionAsync( - runFn: AsyncRunTransactionCallback + runFn: AsyncRunTransactionCallback, ): Promise; runTransactionAsync( options: RunTransactionOptions, - runFn: AsyncRunTransactionCallback + runFn: AsyncRunTransactionCallback, ): Promise; /** * @callback AsyncRunTransactionCallback @@ -3465,7 +3467,7 @@ class Database extends common.GrpcServiceObject { */ async runTransactionAsync( optionsOrRunFn: RunTransactionOptions | AsyncRunTransactionCallback, - fn?: AsyncRunTransactionCallback + fn?: AsyncRunTransactionCallback, ): Promise { const runFn = typeof optionsOrRunFn === 'function' @@ -3494,10 +3496,10 @@ class Database extends common.GrpcServiceObject { const [session, transaction] = await promisify(getSession)(); transaction.requestOptions = Object.assign( transaction.requestOptions || {}, - options.requestOptions + options.requestOptions, ); transaction!.setReadWriteTransactionOptions( - options as RunTransactionOptions + options as RunTransactionOptions, ); sessionId = session?.id; span.addEvent('Using Session', {'session.id': sessionId}); @@ -3505,7 +3507,7 @@ class Database extends common.GrpcServiceObject { session, transaction, runFn, - options + options, ); try { @@ -3528,7 +3530,7 @@ class Database extends common.GrpcServiceObject { } } } - } + }, ); } @@ -3592,7 +3594,7 @@ class Database extends common.GrpcServiceObject { */ batchWriteAtLeastOnce( mutationGroups: MutationGroup[], - options?: BatchWriteOptions + options?: BatchWriteOptions, ): NodeJS.ReadableStream { const proxyStream: Transform = through.obj(); @@ -3620,7 +3622,7 @@ class Database extends common.GrpcServiceObject { mutationGroups: mutationGroups.map(mg => mg.proto()), requestOptions: options?.requestOptions, excludeTxnFromChangeStream: options?.excludeTxnFromChangeStreams, - } + }, ); let dataReceived = false; let dataStream = this.requestStream({ @@ -3655,7 +3657,7 @@ class Database extends common.GrpcServiceObject { // Create a new stream and add it to the end user stream. dataStream = this.batchWriteAtLeastOnce( mutationGroups, - options + options, ); dataStream.pipe(proxyStream); } else { @@ -3671,7 +3673,7 @@ class Database extends common.GrpcServiceObject { }); return proxyStream as NodeJS.ReadableStream; - } + }, ); } @@ -3728,18 +3730,18 @@ class Database extends common.GrpcServiceObject { writeAtLeastOnce(mutations: MutationSet): Promise; writeAtLeastOnce( mutations: MutationSet, - options: WriteAtLeastOnceOptions + options: WriteAtLeastOnceOptions, ): Promise; writeAtLeastOnce(mutations: MutationSet, callback: CommitCallback): void; writeAtLeastOnce( mutations: MutationSet, options: WriteAtLeastOnceOptions, - callback: CommitCallback + callback: CommitCallback, ): void; writeAtLeastOnce( mutations: MutationSet, optionsOrCallback?: WriteAtLeastOnceOptions | CommitCallback, - callback?: CommitCallback + callback?: CommitCallback, ): void | Promise { const cb = typeof optionsOrCallback === 'function' @@ -3774,7 +3776,7 @@ class Database extends common.GrpcServiceObject { this._releaseOnEnd(session!, transaction!, span); try { transaction!.setReadWriteTransactionOptions( - options as RunTransactionOptions + options as RunTransactionOptions, ); transaction?.setQueuedMutations(mutations.proto()); return transaction?.commit(options, (err, resp) => { @@ -3881,21 +3883,21 @@ class Database extends common.GrpcServiceObject { setIamPolicy(policy: SetIamPolicyRequest): Promise; setIamPolicy( policy: SetIamPolicyRequest, - options?: CallOptions + options?: CallOptions, ): Promise; setIamPolicy( policy: SetIamPolicyRequest, - callback: SetIamPolicyCallback + callback: SetIamPolicyCallback, ): void; setIamPolicy( policy: SetIamPolicyRequest, options: CallOptions, - callback: SetIamPolicyCallback + callback: SetIamPolicyCallback, ): void; setIamPolicy( policy: SetIamPolicyRequest, optionsOrCallback?: CallOptions | SetIamPolicyCallback, - cb?: SetIamPolicyCallback + cb?: SetIamPolicyCallback, ): Promise | void { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -3921,7 +3923,7 @@ class Database extends common.GrpcServiceObject { }, (err, resp) => { callback!(err, resp); - } + }, ); } @@ -4011,18 +4013,18 @@ class Database extends common.GrpcServiceObject { */ updateSchema( statements: Schema, - gaxOptions?: CallOptions + gaxOptions?: CallOptions, ): Promise; updateSchema(statements: Schema, callback: UpdateSchemaCallback): void; updateSchema( statements: Schema, gaxOptions: CallOptions, - callback: UpdateSchemaCallback + callback: UpdateSchemaCallback, ): void; updateSchema( statements: Schema, optionsOrCallback?: CallOptions | UpdateSchemaCallback, - cb?: UpdateSchemaCallback + cb?: UpdateSchemaCallback, ): Promise | void { const gaxOpts = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -4031,7 +4033,7 @@ class Database extends common.GrpcServiceObject { if (typeof statements === 'string' || Array.isArray(statements)) { statements = { - statements: arrify(statements) as string[], + statements: toArray(statements) as string[], }; } const reqOpts: databaseAdmin.spanner.admin.database.v1.IUpdateDatabaseDdlRequest = @@ -4039,7 +4041,7 @@ class Database extends common.GrpcServiceObject { { database: this.formattedName_, }, - statements + statements, ); return this.request( { @@ -4049,7 +4051,7 @@ class Database extends common.GrpcServiceObject { gaxOpts, headers: this.commonHeaders_, }, - callback! + callback!, ); } /** diff --git a/src/helper.ts b/src/helper.ts index 52e88b7a1..fbd16c4a9 100644 --- a/src/helper.ts +++ b/src/helper.ts @@ -20,7 +20,7 @@ import {grpc} from 'google-gax'; * @return {boolean} True if the error is a 'Database not found' error, and otherwise false. */ export function isDatabaseNotFoundError( - error: grpc.ServiceError | undefined + error: grpc.ServiceError | undefined, ): boolean { return ( error !== undefined && @@ -35,7 +35,7 @@ export function isDatabaseNotFoundError( * @return {boolean} True if the error is an 'Instance not found' error, and otherwise false. */ export function isInstanceNotFoundError( - error: grpc.ServiceError | undefined + error: grpc.ServiceError | undefined, ): boolean { return ( error !== undefined && @@ -50,7 +50,7 @@ export function isInstanceNotFoundError( * @return {boolean} True if the error is a 'Could not load the default credentials' error, and otherwise false. */ export function isDefaultCredentialsNotSetError( - error: grpc.ServiceError | undefined + error: grpc.ServiceError | undefined, ): boolean { return ( error !== undefined && @@ -64,12 +64,12 @@ export function isDefaultCredentialsNotSetError( * @return {boolean} True if the error is an 'Unable to detect a Project Id in the current environment' error, and otherwise false. */ export function isProjectIdNotSetInEnvironmentError( - error: grpc.ServiceError | undefined + error: grpc.ServiceError | undefined, ): boolean { return ( error !== undefined && error.message.includes( - 'Unable to detect a Project Id in the current environment' + 'Unable to detect a Project Id in the current environment', ) ); } @@ -80,7 +80,7 @@ export function isProjectIdNotSetInEnvironmentError( * @return {boolean} True if the error is a 'Create session permission' error, and otherwise false. */ export function isCreateSessionPermissionError( - error: grpc.ServiceError | undefined + error: grpc.ServiceError | undefined, ): boolean { return ( error !== undefined && @@ -88,3 +88,31 @@ export function isCreateSessionPermissionError( error.message.includes('spanner.sessions.create') ); } + +/** + * Converts any value into an array. Acts as a replacement for `arrify`. + * If the value is null or undefined, returns an empty array. + * If the value is already an array, returns is unchanges. + * Otherwise, wraps the value in a new array. + * @param value The value to convert into an array. + * @returns An array containing the value, or an empty array. + */ +export function toArray(value: any) { + if (value === null || value === undefined) { + return []; + } + + if (Array.isArray(value)) { + return value; + } + + if (typeof value === 'string') { + return [value]; + } + + if (typeof value[Symbol.iterator] === 'function') { + return [...value]; + } + + return [value]; +} diff --git a/src/index.ts b/src/index.ts index 3be6d3688..39b4b437f 100644 --- a/src/index.ts +++ b/src/index.ts @@ -287,7 +287,7 @@ class Spanner extends GrpcService { endpointWithPort.startsWith('https:') ) { throw new GoogleError( - 'SPANNER_EMULATOR_HOST must not start with a protocol specification (http/https)' + 'SPANNER_EMULATOR_HOST must not start with a protocol specification (http/https)', ); } const index = endpointWithPort.indexOf(':'); @@ -336,7 +336,7 @@ class Spanner extends GrpcService { 'grpc.gcpApiConfig': grpcGcp.createGcpApiConfig(gcpApiConfig), grpc, }, - options || {} + options || {}, ) as {} as SpannerOptions; const directedReadOptions = options.directedReadOptions @@ -395,7 +395,7 @@ class Spanner extends GrpcService { this._observabilityOptions = options.observabilityOptions; this.commonHeaders_ = getCommonHeaders( this.projectFormattedName_, - this._observabilityOptions?.enableEndToEndTracing + this._observabilityOptions?.enableEndToEndTracing, ); ensureInitialContextManagerSet(); this._nthClientId = nextSpannerClientId(); @@ -419,7 +419,7 @@ class Spanner extends GrpcService { if (!this.clients_.has(clientName)) { this.clients_.set( clientName, - new v1[clientName](this.options as ClientOptions) + new v1[clientName](this.options as ClientOptions), ); } return this.clients_.get(clientName)! as v1.InstanceAdminClient; @@ -443,7 +443,7 @@ class Spanner extends GrpcService { if (!this.clients_.has(clientName)) { this.clients_.set( clientName, - new v1[clientName](this.options as ClientOptions) + new v1[clientName](this.options as ClientOptions), ); } return this.clients_.get(clientName)! as v1.DatabaseAdminClient; @@ -552,24 +552,24 @@ class Spanner extends GrpcService { */ createInstance( name: string, - config: CreateInstanceRequest + config: CreateInstanceRequest, ): Promise; createInstance( name: string, config: CreateInstanceRequest, - callback: CreateInstanceCallback + callback: CreateInstanceCallback, ): void; createInstance( name: string, config: CreateInstanceRequest, - callback?: CreateInstanceCallback + callback?: CreateInstanceCallback, ): void | Promise { if (!name) { throw new GoogleError('A name is required to create an instance.'); } if (!config) { throw new GoogleError( - ['A configuration object is required to create an instance.'].join('') + ['A configuration object is required to create an instance.'].join(''), ); } const formattedName = Instance.formatName_(this.projectId, name); @@ -584,13 +584,13 @@ class Spanner extends GrpcService { nodeCount: config.nodes, processingUnits: config.processingUnits, }, - config + config, ), }; if (reqOpts.instance.nodeCount && reqOpts.instance.processingUnits) { throw new GoogleError( - ['Only one of nodeCount or processingUnits can be specified.'].join('') + ['Only one of nodeCount or processingUnits can be specified.'].join(''), ); } if (!reqOpts.instance.nodeCount && !reqOpts.instance.processingUnits) { @@ -621,7 +621,7 @@ class Spanner extends GrpcService { const instance = this.instance(formattedName); instance._observabilityOptions = this._observabilityOptions; callback!(null, instance, operation, resp); - } + }, ); } @@ -713,11 +713,11 @@ class Spanner extends GrpcService { getInstances(callback: GetInstancesCallback): void; getInstances( query: GetInstancesOptions, - callback: GetInstancesCallback + callback: GetInstancesCallback, ): void; getInstances( optionsOrCallback?: GetInstancesOptions | GetInstancesCallback, - cb?: GetInstancesCallback + cb?: GetInstancesCallback, ): Promise | void { // eslint-disable-next-line @typescript-eslint/no-this-alias const self = this; @@ -745,7 +745,7 @@ class Spanner extends GrpcService { pageSize: (gaxOpts as GetInstancesOptions).pageSize, pageToken: (gaxOpts as GetInstancesOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetInstancesOptions).pageToken; delete (gaxOpts as GetInstancesOptions).pageSize; @@ -772,7 +772,7 @@ class Spanner extends GrpcService { ? extend({}, options, nextPageRequest!) : null; callback!(err, instanceInstances, nextQuery, ...args); - } + }, ); } @@ -830,7 +830,7 @@ class Spanner extends GrpcService { pageSize: (gaxOpts as GetInstancesOptions).pageSize, pageToken: (gaxOpts as GetInstancesOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetInstancesOptions).pageSize; delete (gaxOpts as GetInstancesOptions).pageToken; @@ -945,17 +945,17 @@ class Spanner extends GrpcService { */ createInstanceConfig( name: string, - config: CreateInstanceConfigRequest + config: CreateInstanceConfigRequest, ): Promise; createInstanceConfig( name: string, config: CreateInstanceConfigRequest, - callback: CreateInstanceConfigCallback + callback: CreateInstanceConfigCallback, ): void; createInstanceConfig( name: string, config: CreateInstanceConfigRequest, - callback?: CreateInstanceConfigCallback + callback?: CreateInstanceConfigCallback, ): void | Promise { if (!name) { throw new GoogleError('A name is required to create an instance config.'); @@ -964,14 +964,14 @@ class Spanner extends GrpcService { throw new GoogleError( [ 'A configuration object is required to create an instance config.', - ].join('') + ].join(''), ); } if (!config.baseConfig) { throw new GoogleError( ['Base instance config is required to create an instance config.'].join( - '' - ) + '', + ), ); } const formattedName = InstanceConfig.formatName_(this.projectId, name); @@ -984,7 +984,7 @@ class Spanner extends GrpcService { name: formattedName, displayName, }, - config + config, ), validateOnly: config.validateOnly, }; @@ -1016,7 +1016,7 @@ class Spanner extends GrpcService { } const instanceConfig = this.instanceConfig(formattedName); callback!(null, instanceConfig, operation, resp); - } + }, ); } @@ -1105,16 +1105,16 @@ class Spanner extends GrpcService { * ``` */ getInstanceConfigs( - query?: GetInstanceConfigsOptions + query?: GetInstanceConfigsOptions, ): Promise; getInstanceConfigs(callback: GetInstanceConfigsCallback): void; getInstanceConfigs( query: GetInstanceConfigsOptions, - callback: GetInstanceConfigsCallback + callback: GetInstanceConfigsCallback, ): void; getInstanceConfigs( optionsOrCallback?: GetInstanceConfigsOptions | GetInstanceConfigsCallback, - cb?: GetInstanceConfigsCallback + cb?: GetInstanceConfigsCallback, ): Promise | void { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; @@ -1138,7 +1138,7 @@ class Spanner extends GrpcService { pageSize: (gaxOpts as GetInstanceConfigsOptions).pageSize, pageToken: (gaxOpts as GetInstanceConfigsOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetInstanceConfigsOptions).pageSize; delete (gaxOpts as GetInstanceConfigsOptions).pageToken; @@ -1157,7 +1157,7 @@ class Spanner extends GrpcService { ? extend({}, options, nextPageRequest!) : null; callback!(err, instanceConfigs, nextQuery, ...args); - } + }, ); } @@ -1197,7 +1197,7 @@ class Spanner extends GrpcService { * ``` */ getInstanceConfigsStream( - options: GetInstanceConfigsOptions = {} + options: GetInstanceConfigsOptions = {}, ): NodeJS.ReadableStream { const gaxOpts = extend(true, {}, options.gaxOptions); @@ -1214,7 +1214,7 @@ class Spanner extends GrpcService { pageSize: (gaxOpts as GetInstancesOptions).pageSize, pageToken: (gaxOpts as GetInstancesOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetInstancesOptions).pageSize; delete (gaxOpts as GetInstancesOptions).pageToken; @@ -1287,18 +1287,18 @@ class Spanner extends GrpcService { getInstanceConfig(name: string): Promise; getInstanceConfig( name: string, - options: GetInstanceConfigOptions + options: GetInstanceConfigOptions, ): Promise; getInstanceConfig(name: string, callback: GetInstanceConfigCallback): void; getInstanceConfig( name: string, options: GetInstanceConfigOptions, - callback: GetInstanceConfigCallback + callback: GetInstanceConfigCallback, ): void; getInstanceConfig( name: string, optionsOrCallback?: GetInstanceConfigOptions | GetInstanceConfigCallback, - cb?: GetInstanceConfigCallback + cb?: GetInstanceConfigCallback, ): Promise | void { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; @@ -1311,7 +1311,7 @@ class Spanner extends GrpcService { {}, { name: 'projects/' + this.projectId + '/instanceConfigs/' + name, - } + }, ); const gaxOpts = extend({}, options.gaxOptions); @@ -1325,7 +1325,7 @@ class Spanner extends GrpcService { }, (err, instanceConfig) => { callback!(err, instanceConfig); - } + }, ); } @@ -1393,20 +1393,20 @@ class Spanner extends GrpcService { * ``` */ getInstanceConfigOperations( - options?: GetInstanceConfigOperationsOptions + options?: GetInstanceConfigOperationsOptions, ): Promise; getInstanceConfigOperations( - callback: GetInstanceConfigOperationsCallback + callback: GetInstanceConfigOperationsCallback, ): void; getInstanceConfigOperations( options: GetInstanceConfigOperationsOptions, - callback: GetInstanceConfigOperationsCallback + callback: GetInstanceConfigOperationsCallback, ): void; getInstanceConfigOperations( optionsOrCallback?: | GetInstanceConfigOperationsOptions | GetInstanceConfigOperationsCallback, - cb?: GetInstanceConfigOperationsCallback + cb?: GetInstanceConfigOperationsCallback, ): void | Promise { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb!; @@ -1429,7 +1429,7 @@ class Spanner extends GrpcService { pageSize: (gaxOpts as GetInstanceConfigOperationsOptions).pageSize, pageToken: (gaxOpts as GetInstanceConfigOperationsOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetInstanceConfigOperationsOptions).pageSize; delete (gaxOpts as GetInstanceConfigOperationsOptions).pageToken; @@ -1449,7 +1449,7 @@ class Spanner extends GrpcService { : null; callback!(err, operations, nextQuery, ...args); - } + }, ); } @@ -1497,7 +1497,7 @@ class Spanner extends GrpcService { instanceConfig(name: string): InstanceConfig { if (!name) { throw new GoogleError( - 'A name is required to access an InstanceConfig object.' + 'A name is required to access an InstanceConfig object.', ); } const key = name.split('/').pop()!; @@ -1537,17 +1537,17 @@ class Spanner extends GrpcService { this.projectId = replaceProjectIdToken(this.projectId, projectId!); this.projectFormattedName_ = replaceProjectIdToken( this.projectFormattedName_, - projectId! + projectId!, ); this.instances_.forEach(instance => { instance.formattedName_ = replaceProjectIdToken( instance.formattedName_, - projectId! + projectId!, ); instance.databases_.forEach(database => { database.formattedName_ = replaceProjectIdToken( database.formattedName_, - projectId! + projectId!, ); }); }); @@ -1555,7 +1555,7 @@ class Spanner extends GrpcService { } config.headers[CLOUD_RESOURCE_HEADER] = replaceProjectIdToken( config.headers[CLOUD_RESOURCE_HEADER], - projectId! + projectId!, ); // Do context propagation propagation.inject(context.active(), config.headers, { @@ -1573,7 +1573,7 @@ class Spanner extends GrpcService { otherArgs: { headers: config.headers, }, - }) + }), ); // Wrap requestFn to inject the spanner request id into every returned error. @@ -1720,7 +1720,7 @@ class Spanner extends GrpcService { static date( dateStringOrYear?: string | number, month?: number, - date?: number + date?: number, ): SpannerDate { if (typeof dateStringOrYear === 'number') { return new codec.SpannerDate(dateStringOrYear, month!, date!); @@ -1768,7 +1768,7 @@ class Spanner extends GrpcService { * ``` */ static timestamp( - value?: string | number | p.ITimestamp | PreciseDate + value?: string | number | p.ITimestamp | PreciseDate, ): PreciseDate { value = value || Date.now(); if (value instanceof PreciseDate) { diff --git a/src/instance-config.ts b/src/instance-config.ts index bc069b5d7..626edee9d 100644 --- a/src/instance-config.ts +++ b/src/instance-config.ts @@ -17,7 +17,7 @@ import {ServiceObjectConfig} from '@google-cloud/common'; const common = require('./common-grpc/service-object'); import {google as instanceAdmin} from '../protos/protos'; -import {Operation as GaxOperation} from 'google-gax/build/src/longRunningCalls/longrunning'; +import {Operation as GaxOperation} from 'google-gax'; import snakeCase = require('lodash.snakecase'); import { CLOUD_RESOURCE_HEADER, @@ -64,7 +64,7 @@ export type IInstanceConfig = interface InstanceConfigRequest { ( config: RequestConfig, - callback: ResourceCallback + callback: ResourceCallback, ): void; (config: RequestConfig, callback: RequestCallback): void; (config: RequestConfig, callback: RequestCallback): void; @@ -166,7 +166,7 @@ class InstanceConfig extends common.GrpcServiceObject { createMethod( _: {}, options: CreateInstanceConfigRequest, - callback: CreateInstanceConfigCallback + callback: CreateInstanceConfigCallback, ): void { spanner.createInstanceConfig(formattedName_, options, callback); }, @@ -237,11 +237,11 @@ class InstanceConfig extends common.GrpcServiceObject { get(callback: GetInstanceConfigCallback): void; get( options: GetInstanceConfigOptions, - callback: GetInstanceConfigCallback + callback: GetInstanceConfigCallback, ): void; get( optionsOrCallback?: GetInstanceConfigOptions | GetInstanceConfigCallback, - cb?: GetInstanceConfigCallback + cb?: GetInstanceConfigCallback, ): void | Promise { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb!; @@ -305,22 +305,22 @@ class InstanceConfig extends common.GrpcServiceObject { * ``` */ setMetadata( - config: SetInstanceConfigMetadataRequest + config: SetInstanceConfigMetadataRequest, ): Promise; setMetadata( config: SetInstanceConfigMetadataRequest, - callback: SetInstanceConfigMetadataCallback + callback: SetInstanceConfigMetadataCallback, ): void; setMetadata( config: SetInstanceConfigMetadataRequest, - callback?: SetInstanceConfigMetadataCallback + callback?: SetInstanceConfigMetadataCallback, ): void | Promise { const reqOpts = { instanceConfig: extend( { name: this.formattedName_, }, - config.instanceConfig + config.instanceConfig, ), updateMask: { paths: Object.keys(config.instanceConfig).map(snakeCase), @@ -343,7 +343,7 @@ class InstanceConfig extends common.GrpcServiceObject { : config.gaxOpts, headers: this.resourceHeader_, }, - callback! + callback!, ); } @@ -395,18 +395,18 @@ class InstanceConfig extends common.GrpcServiceObject { * ``` */ delete( - config?: DeleteInstanceConfigRequest + config?: DeleteInstanceConfigRequest, ): Promise; delete(callback: DeleteInstanceConfigCallback): void; delete( config: DeleteInstanceConfigRequest, - callback: DeleteInstanceConfigCallback + callback: DeleteInstanceConfigCallback, ): void; delete( optionsOrCallback?: | DeleteInstanceConfigRequest | DeleteInstanceConfigCallback, - cb?: DeleteInstanceConfigCallback + cb?: DeleteInstanceConfigCallback, ): void | Promise { const config = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -441,7 +441,7 @@ class InstanceConfig extends common.GrpcServiceObject { this.parent.instanceConfigs_.delete(this.id); } callback!(err, resp!); - } + }, ); } diff --git a/src/instance.ts b/src/instance.ts index 1504e5b51..f72ecd690 100644 --- a/src/instance.ts +++ b/src/instance.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import arrify = require('arrify'); +import {toArray} from './helper'; import {ServiceObjectConfig, GetConfig} from '@google-cloud/common'; // eslint-disable-next-line @typescript-eslint/no-var-requires const common = require('./common-grpc/service-object'); @@ -135,7 +135,7 @@ export interface GetInstanceConfig interface InstanceRequest { ( config: RequestConfig, - callback: ResourceCallback + callback: ResourceCallback, ): void; (config: RequestConfig, callback: RequestCallback): void; (config: RequestConfig, callback: RequestCallback): void; @@ -229,7 +229,7 @@ class Instance extends common.GrpcServiceObject { createMethod( _: {}, options: CreateInstanceRequest, - callback: CreateInstanceCallback + callback: CreateInstanceCallback, ): void { spanner.createInstance(formattedName_, options, callback); }, @@ -241,7 +241,7 @@ class Instance extends common.GrpcServiceObject { this._observabilityOptions = spanner._observabilityOptions; this.commonHeaders_ = getCommonHeaders( this.formattedName_, - this._observabilityOptions?.enableEndToEndTracing + this._observabilityOptions?.enableEndToEndTracing, ); } @@ -311,11 +311,11 @@ class Instance extends common.GrpcServiceObject { sourceBackupId: string, backupId: string, options: CopyBackupOptions, - callback?: CopyBackupCallback + callback?: CopyBackupCallback, ): Promise | void { if (!backupId || !sourceBackupId) { throw new GoogleError( - 'A backup ID and source backup ID is required to create a copy of the source backup.' + 'A backup ID and source backup ID is required to create a copy of the source backup.', ); } const copyOfBackup = new Backup(this, backupId, sourceBackupId); @@ -395,7 +395,7 @@ class Instance extends common.GrpcServiceObject { getBackups(options: GetBackupsOptions, callback: GetBackupsCallback): void; getBackups( optionsOrCallback?: GetBackupsOptions | GetBackupsCallback, - cb?: GetBackupsCallback + cb?: GetBackupsCallback, ): void | Promise { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb!; @@ -418,7 +418,7 @@ class Instance extends common.GrpcServiceObject { pageSize: (gaxOpts as GetBackupsOptions).pageSize, pageToken: (gaxOpts as GetBackupsOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetBackupsOptions).pageSize; delete (gaxOpts as GetBackupsOptions).pageToken; @@ -449,7 +449,7 @@ class Instance extends common.GrpcServiceObject { : null; callback(err, backupInstances, nextQuery, ...args); - } + }, ); } @@ -509,7 +509,7 @@ class Instance extends common.GrpcServiceObject { pageSize: (gaxOpts as GetBackupsOptions).pageSize, pageToken: (gaxOpts as GetBackupsOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetBackupsOptions).pageSize; delete (gaxOpts as GetBackupsOptions).pageToken; @@ -552,12 +552,12 @@ class Instance extends common.GrpcServiceObject { * @param {object} apiResponse The full API response. */ getBackupOperations( - options?: GetBackupOperationsOptions + options?: GetBackupOperationsOptions, ): Promise; getBackupOperations(callback: GetBackupOperationsCallback): void; getBackupOperations( options: GetBackupOperationsOptions, - callback: GetBackupOperationsCallback + callback: GetBackupOperationsCallback, ): void; /** @@ -601,7 +601,7 @@ class Instance extends common.GrpcServiceObject { optionsOrCallback?: | GetBackupOperationsOptions | GetBackupOperationsCallback, - cb?: GetBackupOperationsCallback + cb?: GetBackupOperationsCallback, ): void | Promise { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb!; @@ -624,7 +624,7 @@ class Instance extends common.GrpcServiceObject { pageSize: (gaxOpts as GetBackupsOptions).pageSize, pageToken: (gaxOpts as GetBackupsOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetBackupsOptions).pageSize; delete (gaxOpts as GetBackupsOptions).pageToken; @@ -647,7 +647,7 @@ class Instance extends common.GrpcServiceObject { : null; callback!(err, operations, nextQuery, ...args); - } + }, ); } @@ -679,12 +679,12 @@ class Instance extends common.GrpcServiceObject { * @param {object} apiResponse The full API response. */ getDatabaseOperations( - options?: GetDatabaseOperationsOptions + options?: GetDatabaseOperationsOptions, ): Promise; getDatabaseOperations(callback: GetDatabaseOperationsCallback): void; getDatabaseOperations( options: GetDatabaseOperationsOptions, - callback: GetDatabaseOperationsCallback + callback: GetDatabaseOperationsCallback, ): void; /** @@ -729,7 +729,7 @@ class Instance extends common.GrpcServiceObject { optionsOrCallback?: | GetDatabaseOperationsOptions | GetDatabaseOperationsCallback, - cb?: GetDatabaseOperationsCallback + cb?: GetDatabaseOperationsCallback, ): void | Promise { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb!; @@ -752,7 +752,7 @@ class Instance extends common.GrpcServiceObject { pageSize: (gaxOpts as GetBackupsOptions).pageSize, pageToken: (gaxOpts as GetBackupsOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetBackupsOptions).pageSize; delete (gaxOpts as GetBackupsOptions).pageToken; @@ -775,7 +775,7 @@ class Instance extends common.GrpcServiceObject { : null; callback!(err, operations, nextQuery, ...args); - } + }, ); } @@ -867,18 +867,18 @@ class Instance extends common.GrpcServiceObject { */ createDatabase( name: string, - options?: CreateDatabaseOptions + options?: CreateDatabaseOptions, ): Promise; createDatabase(name: string, callback: CreateDatabaseCallback): void; createDatabase( name: string, options: CreateDatabaseOptions, - callback: CreateDatabaseCallback + callback: CreateDatabaseCallback, ): void; createDatabase( name: string, optionsOrCallback?: CreateDatabaseOptions | CreateDatabaseCallback, - cb?: CreateDatabaseCallback + cb?: CreateDatabaseCallback, ): void | Promise { if (!name) { throw new GoogleError('A name is required to create a database.'); @@ -904,7 +904,7 @@ class Instance extends common.GrpcServiceObject { parent: this.formattedName_, createStatement: createStatement, }, - options + options, ); delete reqOpts.poolOptions; @@ -912,7 +912,7 @@ class Instance extends common.GrpcServiceObject { delete reqOpts.gaxOptions; if (reqOpts.schema) { - reqOpts.extraStatements = arrify(reqOpts.schema); + reqOpts.extraStatements = toArray(reqOpts.schema); delete reqOpts.schema; } this.request( @@ -931,7 +931,7 @@ class Instance extends common.GrpcServiceObject { const database = this.database(name, poolOptions || poolCtor); database._observabilityOptions = this._observabilityOptions; callback(null, database, operation, resp); - } + }, ); } @@ -961,7 +961,7 @@ class Instance extends common.GrpcServiceObject { database( name: string, poolOptions?: SessionPoolOptions | SessionPoolConstructor, - queryOptions?: spannerClient.spanner.v1.ExecuteSqlRequest.IQueryOptions + queryOptions?: spannerClient.spanner.v1.ExecuteSqlRequest.IQueryOptions, ): Database { if (!name) { throw new GoogleError('A name is required to access a Database object.'); @@ -1040,7 +1040,7 @@ class Instance extends common.GrpcServiceObject { delete(gaxOptions: CallOptions, callback: DeleteInstanceCallback): void; delete( optionsOrCallback?: CallOptions | DeleteInstanceCallback, - cb?: DeleteInstanceCallback + cb?: DeleteInstanceCallback, ): void | Promise { const gaxOpts = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1050,10 +1050,10 @@ class Instance extends common.GrpcServiceObject { const reqOpts = { name: this.formattedName_, }; - Promise.all( + void Promise.all( Array.from(this.databases_.values()).map(database => { return database.close(); - }) + }), ) .catch(() => {}) .then(() => { @@ -1071,7 +1071,7 @@ class Instance extends common.GrpcServiceObject { this.parent.instances_.delete(this.id); } callback!(err, resp!); - } + }, ); }); } @@ -1117,7 +1117,7 @@ class Instance extends common.GrpcServiceObject { exists(gaxOptions: CallOptions, callback: ExistsInstanceCallback): void; exists( optionsOrCallback?: CallOptions | ExistsInstanceCallback, - cb?: ExistsInstanceCallback + cb?: ExistsInstanceCallback, ): void | Promise { const gaxOptions = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1190,7 +1190,7 @@ class Instance extends common.GrpcServiceObject { get(options: GetInstanceConfig, callback: GetInstanceCallback): void; get( optionsOrCallback?: GetInstanceConfig | GetInstanceCallback, - cb?: GetInstanceCallback + cb?: GetInstanceCallback, ): void | Promise { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb!; @@ -1218,7 +1218,7 @@ class Instance extends common.GrpcServiceObject { ( err: grpc.ServiceError | null, instance?: Instance, - operation?: GaxOperation | null + operation?: GaxOperation | null, ) => { if (err) { callback(err); @@ -1230,7 +1230,7 @@ class Instance extends common.GrpcServiceObject { this.metadata = metadata; callback(null, this, metadata); }); - } + }, ); return; } @@ -1315,11 +1315,11 @@ class Instance extends common.GrpcServiceObject { getDatabases(callback: GetDatabasesCallback): void; getDatabases( options: GetDatabasesOptions, - callback: GetDatabasesCallback + callback: GetDatabasesCallback, ): void; getDatabases( optionsOrCallback?: GetDatabasesOptions | GetDatabasesCallback, - cb?: GetDatabasesCallback + cb?: GetDatabasesCallback, ): void | Promise { // eslint-disable-next-line @typescript-eslint/no-this-alias const self = this; @@ -1345,7 +1345,7 @@ class Instance extends common.GrpcServiceObject { pageSize: (gaxOpts as GetBackupsOptions).pageSize, pageToken: (gaxOpts as GetBackupsOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetBackupsOptions).pageSize; delete (gaxOpts as GetBackupsOptions).pageToken; @@ -1377,7 +1377,7 @@ class Instance extends common.GrpcServiceObject { : null; callback(err, databases, nextQuery, ...args); - } + }, ); } @@ -1437,7 +1437,7 @@ class Instance extends common.GrpcServiceObject { pageSize: (gaxOpts as GetBackupsOptions).pageSize, pageToken: (gaxOpts as GetBackupsOptions).pageToken, }, - reqOpts + reqOpts, ); delete (gaxOpts as GetBackupsOptions).pageSize; delete (gaxOpts as GetBackupsOptions).pageToken; @@ -1514,18 +1514,18 @@ class Instance extends common.GrpcServiceObject { * ``` */ getMetadata( - options?: GetInstanceMetadataOptions + options?: GetInstanceMetadataOptions, ): Promise; getMetadata(callback: GetInstanceMetadataCallback): void; getMetadata( options: GetInstanceMetadataOptions, - callback: GetInstanceMetadataCallback + callback: GetInstanceMetadataCallback, ): void; getMetadata( optionsOrCallback?: | GetInstanceMetadataOptions | GetInstanceMetadataCallback, - cb?: GetInstanceMetadataCallback + cb?: GetInstanceMetadataCallback, ): Promise | void { const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb!; @@ -1536,7 +1536,7 @@ class Instance extends common.GrpcServiceObject { }; if (options.fieldNames) { reqOpts['fieldMask'] = { - paths: arrify(options['fieldNames']!).map(snakeCase), + paths: toArray(options['fieldNames']!).map(snakeCase), }; } return this.request( @@ -1552,7 +1552,7 @@ class Instance extends common.GrpcServiceObject { this.metadata = resp; } callback!(err, resp); - } + }, ); } @@ -1606,18 +1606,18 @@ class Instance extends common.GrpcServiceObject { */ setMetadata( metadata: IInstance, - gaxOptions?: CallOptions + gaxOptions?: CallOptions, ): Promise; setMetadata(metadata: IInstance, callback: SetInstanceMetadataCallback): void; setMetadata( metadata: IInstance, gaxOptions: CallOptions, - callback: SetInstanceMetadataCallback + callback: SetInstanceMetadataCallback, ): void; setMetadata( metadata: IInstance, optionsOrCallback?: CallOptions | SetInstanceMetadataCallback, - cb?: SetInstanceMetadataCallback + cb?: SetInstanceMetadataCallback, ): void | Promise { const gaxOpts = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1629,7 +1629,7 @@ class Instance extends common.GrpcServiceObject { { name: this.formattedName_, }, - metadata + metadata, ), fieldMask: { paths: Object.keys(metadata).map(snakeCase), @@ -1643,7 +1643,7 @@ class Instance extends common.GrpcServiceObject { gaxOpts, headers: this.commonHeaders_, }, - callback! + callback!, ); } /** diff --git a/src/instrument.ts b/src/instrument.ts index 203aa85e1..9a07a8c0c 100644 --- a/src/instrument.ts +++ b/src/instrument.ts @@ -129,7 +129,7 @@ export {ensureInitialContextManagerSet}; export function startTrace( spanNameSuffix: string, config: traceConfig | undefined, - cb: (span: Span) => T + cb: (span: Span) => T, ): T { if (!config) { config = {} as traceConfig; @@ -180,7 +180,7 @@ export function startTrace( // Finally re-throw the exception. throw e; } - } + }, ); } @@ -220,7 +220,7 @@ export function setSpanError(span: Span, err: Error | String): boolean { */ export function setSpanErrorAndException( span: Span, - err: Error | String + err: Error | String, ): boolean { if (setSpanError(span, err)) { span.recordException(err as Error); diff --git a/src/multiplexed-session.ts b/src/multiplexed-session.ts index 4255fcba0..b44da17a7 100644 --- a/src/multiplexed-session.ts +++ b/src/multiplexed-session.ts @@ -127,7 +127,7 @@ export class MultiplexedSession }); this._multiplexedSession = createSessionResponse; span.addEvent( - `Created multiplexed session ${this._multiplexedSession.id}` + `Created multiplexed session ${this._multiplexedSession.id}`, ); this.emit(MUX_SESSION_AVAILABLE); } catch (e) { @@ -137,7 +137,7 @@ export class MultiplexedSession } finally { span.end(); } - } + }, ); } @@ -178,7 +178,7 @@ export class MultiplexedSession getSession(callback: GetSessionCallback): void { this._acquire().then( session => callback(null, session, session?.txn), - callback + callback, ); } @@ -195,7 +195,7 @@ export class MultiplexedSession const session = await this._getSession(); // Prepare a transaction for a session session!.txn = session!.transaction( - (session!.parent as Database).queryOptions_ + (session!.parent as Database).queryOptions_, ); return session; } @@ -234,7 +234,7 @@ export class MultiplexedSession removeErrorListener = this.removeListener.bind( this, MUX_SESSION_CREATE_ERROR, - reject + reject, ); }), new Promise(resolve => { @@ -242,7 +242,7 @@ export class MultiplexedSession removeAvailableListener = this.removeListener.bind( this, MUX_SESSION_AVAILABLE, - resolve + resolve, ); }), ]; diff --git a/src/partial-result-stream.ts b/src/partial-result-stream.ts index 69439f534..225b48f37 100644 --- a/src/partial-result-stream.ts +++ b/src/partial-result-stream.ts @@ -227,7 +227,7 @@ export class PartialResultStream extends Transform implements ResultEvents { _transform( chunk: google.spanner.v1.PartialResultSet, enc: string, - next: Function + next: Function, ): void { this.emit('response', chunk); @@ -274,8 +274,8 @@ export class PartialResultStream extends Transform implements ResultEvents { if (this._numPushFailed === this._options.maxResumeRetries) { this.destroy( new Error( - `Stream is still not ready to receive data after ${this._numPushFailed} attempts to resume.` - ) + `Stream is still not ready to receive data after ${this._numPushFailed} attempts to resume.`, + ), ); return; } @@ -311,7 +311,7 @@ export class PartialResultStream extends Transform implements ResultEvents { const merged = PartialResultStream.merge( field.type as google.spanner.v1.Type, this._pendingValue, - values.shift() + values.shift(), ); values.unshift(...merged); @@ -382,7 +382,7 @@ export class PartialResultStream extends Transform implements ResultEvents { value: codec.decode( value, type as google.spanner.v1.Type, - columnMetadata + columnMetadata, ), }; }); @@ -410,7 +410,7 @@ export class PartialResultStream extends Transform implements ResultEvents { static merge( type: google.spanner.v1.Type, head: Value, - tail: Value + tail: Value, ): Value[] { if ( type.code === google.spanner.v1.TypeCode.ARRAY || @@ -444,7 +444,7 @@ export class PartialResultStream extends Transform implements ResultEvents { static mergeLists( type: google.spanner.v1.Type, head: Value[], - tail: Value[] + tail: Value[], ): Value[] { let listType: google.spanner.v1.Type; @@ -461,7 +461,7 @@ export class PartialResultStream extends Transform implements ResultEvents { const merged = PartialResultStream.merge( listType, head.pop(), - tail.shift() + tail.shift(), ); return [...head, ...merged, ...tail]; @@ -486,7 +486,7 @@ export class PartialResultStream extends Transform implements ResultEvents { */ export function partialResultStream( requestFn: RequestFunction, - options?: RowOptions + options?: RowOptions, ): PartialResultStream { const retryableCodes = [grpc.status.UNAVAILABLE]; const maxQueued = 10; @@ -550,7 +550,7 @@ export function partialResultStream( // checkpoint stream has queued. After that, we will destroy the // user's stream with the Deadline exceeded error. setImmediate(() => - batchAndSplitOnTokenStream.destroy(new DeadlineError(err)) + batchAndSplitOnTokenStream.destroy(new DeadlineError(err)), ); return; } @@ -595,7 +595,7 @@ export function partialResultStream( (requestsStream as any).intercept('error', err => // Retry __after__ all pending data has been processed to ensure that the // checkpoint stream is reset at the correct position. - setImmediate(() => retry(err)) + setImmediate(() => retry(err)), ); return ( diff --git a/src/request_id_header.ts b/src/request_id_header.ts index d3f103f3f..99c081de6 100644 --- a/src/request_id_header.ts +++ b/src/request_id_header.ts @@ -29,7 +29,7 @@ class AtomicCounter { constructor(initialValue?: number) { this.backingBuffer = new Uint32Array( - new SharedArrayBuffer(Uint32Array.BYTES_PER_ELEMENT) + new SharedArrayBuffer(Uint32Array.BYTES_PER_ELEMENT), ); if (initialValue) { this.increment(initialValue); @@ -63,7 +63,7 @@ function craftRequestId( nthClientId: number, channelId: number, nthRequest: number, - attempt: number + attempt: number, ) { return `${REQUEST_HEADER_VERSION}.${randIdForProcess}.${nthClientId}.${channelId}.${nthRequest}.${attempt}`; } @@ -131,7 +131,7 @@ function injectRequestIDIntoHeaders( headers: {[k: string]: string}, session: any, nthRequest?: number, - attempt?: number + attempt?: number, ) { if (!session) { return headers; @@ -153,7 +153,7 @@ function _metadataWithRequestId( session: any, nthRequest: number, attempt: number, - priorMetadata?: {[k: string]: string} + priorMetadata?: {[k: string]: string}, ): {[k: string]: string} { if (!priorMetadata) { priorMetadata = {}; @@ -172,7 +172,7 @@ function _metadataWithRequestId( clientId, channelId, nthRequest, - attempt + attempt, ); return withReqId; } diff --git a/src/session-factory.ts b/src/session-factory.ts index 0ed540af0..068cac813 100644 --- a/src/session-factory.ts +++ b/src/session-factory.ts @@ -38,7 +38,7 @@ export interface GetSessionCallback { ( err: Error | null, session?: Session | null, - transaction?: Transaction | null + transaction?: Transaction | null, ): void; } @@ -109,7 +109,7 @@ export class SessionFactory constructor( database: Database, name: String, - poolOptions?: SessionPoolConstructor | SessionPoolOptions + poolOptions?: SessionPoolConstructor | SessionPoolOptions, ) { super({ parent: database, @@ -153,7 +153,7 @@ export class SessionFactory : this.pool_; sessionHandler!.getSession((err, session, transaction) => - callback(err, session, transaction) + callback(err, session, transaction), ); } diff --git a/src/session-pool.ts b/src/session-pool.ts index e684c043b..c64a20b49 100644 --- a/src/session-pool.ts +++ b/src/session-pool.ts @@ -55,7 +55,7 @@ export interface GetWriteSessionCallback { ( err: Error | null, session?: Session | null, - transaction?: Transaction | null + transaction?: Transaction | null, ): void; } @@ -226,7 +226,7 @@ export class SessionPoolExhaustedError extends GoogleError { * @return true if the error is a 'Session not found' error, and otherwise false. */ export function isSessionNotFoundError( - error: grpc.ServiceError | undefined + error: grpc.ServiceError | undefined, ): boolean { return ( error !== undefined && @@ -432,19 +432,22 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { sessions.forEach(session => this._destroy(session)); - this._requests.onIdle().then(() => { - const leaks = this._getLeaks(); - let error; + this._requests + .onIdle() + .then(() => { + const leaks = this._getLeaks(); + let error; - this._inventory.sessions = []; - this._inventory.borrowed.clear(); + this._inventory.sessions = []; + this._inventory.borrowed.clear(); - if (leaks.length) { - error = new SessionLeakError(leaks); - } + if (leaks.length) { + error = new SessionLeakError(leaks); + } - callback(error); - }); + callback(error); + }) + .catch(err => callback(err)); } /** @@ -455,7 +458,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { */ getReadSession(callback: GetReadSessionCallback): void { this.getSession((error, session) => - callback(error as ServiceError, session) + callback(error as ServiceError, session), ); } @@ -477,7 +480,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { getSession(callback: GetSessionCallback): void { this._acquire().then( session => callback(null, session, session.txn!), - callback + callback, ); } @@ -593,7 +596,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { 'Could not acquire session because it was invalid. Retrying', { 'session.id': session.id.toString(), - } + }, ); this._inventory.borrowed.delete(session); return getSession(); @@ -704,7 +707,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { this._pending -= amount; this.emit('createError', e); span.addEvent( - `Requested for ${nRequested} sessions returned ${nReturned}` + `Requested for ${nRequested} sessions returned ${nReturned}`, ); setSpanErrorAndException(span, e as Error); span.end(); @@ -721,7 +724,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { } span.addEvent( - `Requested for ${nRequested} sessions returned ${nReturned}` + `Requested for ${nRequested} sessions returned ${nReturned}`, ); span.end(); }); @@ -768,7 +771,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { const index = this._inventory.sessions.indexOf(session); this._inventory.sessions.splice(index, 1); - this._destroy(session); + void this._destroy(session); } } @@ -851,7 +854,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { removeOnceCloseListener = this.removeListener.bind( this, 'close', - onceCloseListener + onceCloseListener, ); }), new Promise(resolve => { @@ -859,7 +862,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { removeListener = this.removeListener.bind( this, availableEvent, - resolve + resolve, ); }), ]; @@ -876,10 +879,10 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { const error = new Error(errors.Timeout); const timeoutFunction = setTimeout( reject.bind(null, error), - remaining + remaining, ); removeTimeoutListener = () => clearTimeout(timeoutFunction); - }) + }), ); } @@ -904,7 +907,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { new Promise((_, reject) => { this._pending -= amount; this._createSessions(amount).catch(reject); - }) + }), ); } } @@ -916,9 +919,9 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { removeErrorListener = this.removeListener.bind( this, 'createError', - reject + reject, ); - }) + }), ); try { @@ -973,7 +976,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { this.release(session); } catch (e) { this._inventory.borrowed.delete(session); - this._destroy(session); + await this._destroy(session); } } @@ -1019,7 +1022,7 @@ export class SessionPool extends EventEmitter implements SessionPoolInterface { */ _prepareTransaction(session: Session): void { const transaction = session.transaction( - (session.parent as Database).queryOptions_ + (session.parent as Database).queryOptions_, ); session.txn = transaction; } diff --git a/src/session.ts b/src/session.ts index 374958fdc..b0dfa814b 100644 --- a/src/session.ts +++ b/src/session.ts @@ -113,7 +113,6 @@ export type DeleteSessionCallback = NormalCallback; * ``` */ export class Session extends common.GrpcServiceObject { - id!: string; formattedName_?: string; txn?: Transaction; lastUsed?: number; @@ -236,7 +235,7 @@ export class Session extends common.GrpcServiceObject { createMethod: ( _: {}, optionsOrCallback: CreateSessionOptions | CreateSessionCallback, - callback: CreateSessionCallback + callback: CreateSessionCallback, ) => { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -264,7 +263,7 @@ export class Session extends common.GrpcServiceObject { this._observabilityOptions = database._observabilityOptions; this.commonHeaders_ = getCommonHeaders( (this.parent as Database).formattedName_, - this._observabilityOptions?.enableEndToEndTracing + this._observabilityOptions?.enableEndToEndTracing, ); this.request = database.request; this.requestStream = database.requestStream; @@ -310,7 +309,7 @@ export class Session extends common.GrpcServiceObject { delete(gaxOptions: CallOptions, callback: DeleteSessionCallback): void; delete( optionsOrCallback?: CallOptions | DeleteSessionCallback, - cb?: DeleteSessionCallback + cb?: DeleteSessionCallback, ): void | Promise { const gaxOpts = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -331,10 +330,10 @@ export class Session extends common.GrpcServiceObject { this.commonHeaders_, this, nextNthRequest(database), - 1 + 1, ), }, - callback! + callback!, ); } /** @@ -379,11 +378,11 @@ export class Session extends common.GrpcServiceObject { getMetadata(callback: GetSessionMetadataCallback): void; getMetadata( gaxOptions: CallOptions, - callback: GetSessionMetadataCallback + callback: GetSessionMetadataCallback, ): void; getMetadata( optionsOrCallback?: CallOptions | GetSessionMetadataCallback, - cb?: GetSessionMetadataCallback + cb?: GetSessionMetadataCallback, ): void | Promise { const gaxOpts = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -409,7 +408,7 @@ export class Session extends common.GrpcServiceObject { headers, this.session, nextNthRequest(database), - 1 + 1, ), }, (err, resp) => { @@ -419,7 +418,7 @@ export class Session extends common.GrpcServiceObject { this.metadata = resp; } callback!(err, resp); - } + }, ); } /** @@ -445,7 +444,7 @@ export class Session extends common.GrpcServiceObject { keepAlive(gaxOptions: CallOptions, callback: KeepAliveCallback): void; keepAlive( optionsOrCallback?: CallOptions | KeepAliveCallback, - cb?: KeepAliveCallback + cb?: KeepAliveCallback, ): void | Promise { const gaxOpts = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -468,10 +467,10 @@ export class Session extends common.GrpcServiceObject { this.commonHeaders_, this, nextNthRequest(database), - 1 + 1, ), }, - callback! + callback!, ); } @@ -502,7 +501,7 @@ export class Session extends common.GrpcServiceObject { */ snapshot( options?: TimestampBounds, - queryOptions?: google.spanner.v1.ExecuteSqlRequest.IQueryOptions + queryOptions?: google.spanner.v1.ExecuteSqlRequest.IQueryOptions, ) { return new Snapshot(this, options, queryOptions); } @@ -519,7 +518,7 @@ export class Session extends common.GrpcServiceObject { */ transaction( queryOptions?: google.spanner.v1.ExecuteSqlRequest.IQueryOptions, - requestOptions?: Pick + requestOptions?: Pick, ) { return new Transaction(this, undefined, queryOptions, requestOptions); } diff --git a/src/table.ts b/src/table.ts index 1649aa66c..e02a58934 100644 --- a/src/table.ts +++ b/src/table.ts @@ -174,18 +174,18 @@ class Table { */ create( schema: Schema, - gaxOptions?: CallOptions + gaxOptions?: CallOptions, ): Promise; create(schema: Schema, callback: CreateTableCallback): void; create( schema: Schema, gaxOptions: CallOptions, - callback: CreateTableCallback + callback: CreateTableCallback, ): void; create( schema: Schema, gaxOptionsOrCallback?: CallOptions | CreateTableCallback, - cb?: CreateTableCallback + cb?: CreateTableCallback, ): Promise | void { const gaxOptions = typeof gaxOptionsOrCallback === 'object' ? gaxOptionsOrCallback : {}; @@ -266,7 +266,7 @@ class Table { */ createReadStream( request: ReadRequest, - options: TimestampBounds = {} + options: TimestampBounds = {}, ): PartialResultStream { const proxyStream = through.obj(); @@ -354,7 +354,7 @@ class Table { delete(gaxOptions: CallOptions, callback: DropTableCallback): void; delete( gaxOptionsOrCallback?: CallOptions | DropTableCallback, - cb?: DropTableCallback + cb?: DropTableCallback, ): Promise | void { const gaxOptions = typeof gaxOptionsOrCallback === 'object' ? gaxOptionsOrCallback : {}; @@ -390,7 +390,7 @@ class Table { if (!callback) { return performDelete() as Promise; } else { - performDelete(); + void performDelete(); } } /** @@ -464,18 +464,18 @@ class Table { */ deleteRows( keys: Key[], - options?: DeleteRowsOptions | CallOptions + options?: DeleteRowsOptions | CallOptions, ): Promise; deleteRows(keys: Key[], callback: DeleteRowsCallback): void; deleteRows( keys: Key[], options: DeleteRowsOptions | CallOptions, - callback: DeleteRowsCallback + callback: DeleteRowsCallback, ): void; deleteRows( keys: Key[], optionsOrCallback?: DeleteRowsOptions | CallOptions | DeleteRowsCallback, - cb?: DeleteRowsCallback + cb?: DeleteRowsCallback, ): Promise | void { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -535,7 +535,7 @@ class Table { drop(gaxOptions: CallOptions, callback: DropTableCallback): void; drop( gaxOptionsOrCallback?: CallOptions | DropTableCallback, - cb?: DropTableCallback + cb?: DropTableCallback, ): Promise | void { const gaxOptions = typeof gaxOptionsOrCallback === 'object' ? gaxOptionsOrCallback : {}; @@ -626,18 +626,18 @@ class Table { */ insert( rows: object | object[], - options?: InsertRowsOptions | CallOptions + options?: InsertRowsOptions | CallOptions, ): Promise; insert(rows: object | object[], callback: InsertRowsCallback): void; insert( rows: object | object[], options: InsertRowsOptions | CallOptions, - callback: InsertRowsCallback + callback: InsertRowsCallback, ): void; insert( rows: object | object[], optionsOrCallback?: InsertRowsOptions | CallOptions | InsertRowsCallback, - cb?: InsertRowsCallback + cb?: InsertRowsCallback, ): Promise | void { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -788,12 +788,12 @@ class Table { read( request: ReadRequest, options: TimestampBounds, - callback: ReadCallback + callback: ReadCallback, ): void; read( request: ReadRequest, optionsOrCallback?: TimestampBounds | ReadCallback, - cb?: ReadCallback + cb?: ReadCallback, ): Promise | void { const rows: Row[] = []; @@ -874,18 +874,18 @@ class Table { */ replace( rows: object | object[], - options?: ReplaceRowsOptions | CallOptions + options?: ReplaceRowsOptions | CallOptions, ): Promise; replace(rows: object | object[], callback: ReplaceRowsCallback): void; replace( rows: object | object[], options: ReplaceRowsOptions | CallOptions, - callback: ReplaceRowsCallback + callback: ReplaceRowsCallback, ): void; replace( rows: object | object[], optionsOrCallback?: ReplaceRowsOptions | CallOptions | ReplaceRowsCallback, - cb?: ReplaceRowsCallback + cb?: ReplaceRowsCallback, ): Promise | void { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -963,18 +963,18 @@ class Table { */ update( rows: object | object[], - options?: UpdateRowsOptions | CallOptions + options?: UpdateRowsOptions | CallOptions, ): Promise; update(rows: object | object[], callback: UpdateRowsCallback): void; update( rows: object | object[], options: UpdateRowsOptions | CallOptions, - callback: UpdateRowsCallback + callback: UpdateRowsCallback, ): void; update( rows: object | object[], optionsOrCallback?: UpdateRowsOptions | CallOptions | UpdateRowsCallback, - cb?: UpdateRowsCallback + cb?: UpdateRowsCallback, ): Promise | void { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1049,18 +1049,18 @@ class Table { */ upsert( rows: object | object[], - options?: UpsertRowsOptions | CallOptions + options?: UpsertRowsOptions | CallOptions, ): Promise; upsert(rows: object | object[], callback: UpsertRowsCallback): void; upsert( rows: object | object[], options: UpsertRowsOptions | CallOptions, - callback: UpsertRowsCallback + callback: UpsertRowsCallback, ): void; upsert( rows: object | object[], optionsOrCallback?: UpsertRowsOptions | CallOptions | UpsertRowsCallback, - cb?: UpsertRowsCallback + cb?: UpsertRowsCallback, ): Promise | void { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1086,7 +1086,7 @@ class Table { method: 'deleteRows' | 'insert' | 'replace' | 'update' | 'upsert', rows: object | object[], options: MutateRowsOptions | CallOptions = {}, - callback: CommitCallback + callback: CommitCallback, ): void { const traceConfig: traceConfig = { opts: this._observabilityOptions, @@ -1132,7 +1132,7 @@ class Table { span.end(); callback(err, resp); }); - } + }, ); }); } diff --git a/src/transaction-runner.ts b/src/transaction-runner.ts index cebef958b..ba37e851c 100644 --- a/src/transaction-runner.ts +++ b/src/transaction-runner.ts @@ -119,7 +119,7 @@ export abstract class Runner { constructor( session: Session, transaction: Transaction, - options?: RunTransactionOptions + options?: RunTransactionOptions, ) { this.attempts = 0; this.session = session; @@ -205,10 +205,10 @@ export abstract class Runner { } const transaction = this.session.transaction( - (this.session.parent as Database).queryOptions_ + (this.session.parent as Database).queryOptions_, ); transaction!.setReadWriteTransactionOptions( - this.options as RunTransactionOptions + this.options as RunTransactionOptions, ); if (this.attempts > 0) { await transaction.begin(); @@ -277,7 +277,7 @@ export class TransactionRunner extends Runner { session: Session, transaction: Transaction, runFn: RunTransactionCallback, - options?: RunTransactionOptions + options?: RunTransactionOptions, ) { super(session, transaction, options); this.runFn = runFn; @@ -294,7 +294,7 @@ export class TransactionRunner extends Runner { */ private _interceptErrors( transaction: Transaction, - reject: ErrorCallback + reject: ErrorCallback, ): void { const request = transaction.request; @@ -364,7 +364,7 @@ export class AsyncTransactionRunner extends Runner { session: Session, transaction: Transaction, runFn: AsyncRunTransactionCallback, - options?: RunTransactionOptions + options?: RunTransactionOptions, ) { super(session, transaction, options); this.runFn = runFn; @@ -391,7 +391,7 @@ export function isRetryableInternalError(err: grpc.ServiceError): boolean { return ( err.code === grpc.status.INTERNAL && (err.message.includes( - 'Received unexpected EOS on DATA frame from server' + 'Received unexpected EOS on DATA frame from server', ) || err.message.includes('RST_STREAM') || err.message.includes('HTTP/2 error code: INTERNAL_ERROR') || diff --git a/src/transaction.ts b/src/transaction.ts index 4e89fba36..eddac9c47 100644 --- a/src/transaction.ts +++ b/src/transaction.ts @@ -16,7 +16,7 @@ import {DateStruct, PreciseDate} from '@google-cloud/precise-date'; import {promisifyAll} from '@google-cloud/promisify'; -import arrify = require('arrify'); +import {toArray} from './helper'; import Long = require('long'); import {EventEmitter} from 'events'; import {grpc, CallOptions, ServiceError, Status, GoogleError} from 'google-gax'; @@ -200,7 +200,7 @@ export interface BatchUpdateCallback { ( err: null | BatchUpdateError, rowCounts: number[], - response?: spannerClient.spanner.v1.ExecuteBatchDmlResponse + response?: spannerClient.spanner.v1.ExecuteBatchDmlResponse, ): void; } export interface BatchUpdateOptions { @@ -215,7 +215,7 @@ export interface RunCallback { err: null | grpc.ServiceError, rows: Rows, stats: spannerClient.spanner.v1.ResultSetStats, - metadata?: spannerClient.spanner.v1.ResultSetMetadata + metadata?: spannerClient.spanner.v1.ResultSetMetadata, ): void; } @@ -347,7 +347,7 @@ export class Snapshot extends EventEmitter { constructor( session: Session, options?: TimestampBounds, - queryOptions?: IQueryOptions + queryOptions?: IQueryOptions, ) { super(); @@ -365,7 +365,7 @@ export class Snapshot extends EventEmitter { this._observabilityOptions = session._observabilityOptions; this.commonHeaders_ = getCommonHeaders( this._dbName, - this._observabilityOptions?.enableEndToEndTracing + this._observabilityOptions?.enableEndToEndTracing, ); this._traceConfig = { opts: this._observabilityOptions, @@ -422,7 +422,7 @@ export class Snapshot extends EventEmitter { begin(gaxOptions: CallOptions, callback: BeginTransactionCallback): void; begin( gaxOptionsOrCallback?: CallOptions | BeginTransactionCallback, - cb?: BeginTransactionCallback + cb?: BeginTransactionCallback, ): void | Promise { const gaxOpts = typeof gaxOptionsOrCallback === 'object' ? gaxOptionsOrCallback : {}; @@ -472,7 +472,7 @@ export class Snapshot extends EventEmitter { }, ( err: null | grpc.ServiceError, - resp: spannerClient.spanner.v1.ITransaction + resp: spannerClient.spanner.v1.ITransaction, ) => { if (err) { setSpanError(span, err); @@ -481,9 +481,9 @@ export class Snapshot extends EventEmitter { } span.end(); callback!(err, resp); - } + }, ); - } + }, ); } @@ -659,7 +659,7 @@ export class Snapshot extends EventEmitter { */ createReadStream( table: string, - request = {} as ReadRequest + request = {} as ReadRequest, ): PartialResultStream { const { gaxOptions, @@ -681,7 +681,7 @@ export class Snapshot extends EventEmitter { } const directedReadOptions = this._getDirectedReadOptions( - request.directedReadOptions + request.directedReadOptions, ); request = Object.assign({}, request); @@ -703,13 +703,13 @@ export class Snapshot extends EventEmitter { requestOptions: this.configureTagOptions( typeof transaction.singleUse !== 'undefined', this.requestOptions?.transactionTag ?? undefined, - requestOptions + requestOptions, ), directedReadOptions: directedReadOptions, transaction, table, keySet, - } + }, ); const headers = this.commonHeaders_; @@ -762,7 +762,7 @@ export class Snapshot extends EventEmitter { headers, this.session, nthRequest, - attempt + attempt, ), }); }; @@ -775,7 +775,7 @@ export class Snapshot extends EventEmitter { maxResumeRetries, columnsMetadata, gaxOptions, - } + }, ) ?.on('response', response => { if (response.metadata && response.metadata!.transaction && !this.id) { @@ -787,7 +787,7 @@ export class Snapshot extends EventEmitter { const wasAborted = isErrorAborted(err); if (!this.id && this._useInRunner && !wasAborted) { // TODO: resolve https://github.com/googleapis/nodejs-spanner/issues/2170 - this.begin(); + void this.begin(); } else { if (wasAborted) { span.addEvent('Stream broken. Not safe to retry', { @@ -994,7 +994,7 @@ export class Snapshot extends EventEmitter { read( table: string, requestOrCallback: ReadRequest | ReadCallback, - cb?: ReadCallback + cb?: ReadCallback, ): void | Promise { const rows: Rows = []; @@ -1028,7 +1028,7 @@ export class Snapshot extends EventEmitter { span.end(); callback!(null, rows); }); - } + }, ); } @@ -1113,7 +1113,7 @@ export class Snapshot extends EventEmitter { run(query: string | ExecuteSqlRequest, callback: RunCallback): void; run( query: string | ExecuteSqlRequest, - callback?: RunCallback + callback?: RunCallback, ): void | Promise { const rows: Rows = []; let stats: google.spanner.v1.ResultSetStats; @@ -1146,7 +1146,7 @@ export class Snapshot extends EventEmitter { span.end(); callback!(null, rows, stats, metadata); }); - } + }, ); } @@ -1257,7 +1257,7 @@ export class Snapshot extends EventEmitter { query = Object.assign({}, query) as ExecuteSqlRequest; query.queryOptions = Object.assign( Object.assign({}, this.queryOptions), - query.queryOptions + query.queryOptions, ); const { @@ -1271,7 +1271,7 @@ export class Snapshot extends EventEmitter { let reqOpts; const directedReadOptions = this._getDirectedReadOptions( - query.directedReadOptions + query.directedReadOptions, ); const sanitizeRequest = () => { @@ -1300,7 +1300,7 @@ export class Snapshot extends EventEmitter { requestOptions: this.configureTagOptions( typeof transaction.singleUse !== 'undefined', this.requestOptions?.transactionTag ?? undefined, - requestOptions + requestOptions, ), directedReadOptions: directedReadOptions, transaction, @@ -1365,7 +1365,7 @@ export class Snapshot extends EventEmitter { headers, this.session, nthRequest, - attempt + attempt, ), }); }; @@ -1378,7 +1378,7 @@ export class Snapshot extends EventEmitter { maxResumeRetries, columnsMetadata, gaxOptions, - } + }, ) .on('response', response => { if (response.metadata && response.metadata!.transaction && !this.id) { @@ -1391,7 +1391,7 @@ export class Snapshot extends EventEmitter { if (!this.id && this._useInRunner && !wasAborted) { span.addEvent('Stream broken. Safe to retry'); // TODO: resolve https://github.com/googleapis/nodejs-spanner/issues/2170 - this.begin(); + void this.begin(); } else { if (wasAborted) { span.addEvent('Stream broken. Not safe to retry', { @@ -1428,7 +1428,7 @@ export class Snapshot extends EventEmitter { configureTagOptions( singleUse?: boolean, transactionTag?: string, - requestOptions = {} + requestOptions = {}, ): IRequestOptions | null { if (!singleUse && transactionTag) { (requestOptions as IRequestOptions).transactionTag = transactionTag; @@ -1450,13 +1450,13 @@ export class Snapshot extends EventEmitter { const keySet: spannerClient.spanner.v1.IKeySet = request.keySet || {}; if (request.keys) { - keySet.keys = arrify(request.keys as string[]).map( - codec.convertToListValue + keySet.keys = toArray(request.keys as string[]).map( + codec.convertToListValue, ); } if (request.ranges) { - keySet.ranges = arrify(request.ranges).map(range => { + keySet.ranges = toArray(request.ranges).map(range => { const encodedRange: spannerClient.spanner.v1.IKeyRange = {}; Object.keys(range).forEach(bound => { @@ -1484,7 +1484,7 @@ export class Snapshot extends EventEmitter { * @returns {object} */ static encodeTimestampBounds( - options: TimestampBounds + options: TimestampBounds, ): spannerClient.spanner.v1.TransactionOptions.IReadOnly { const readOnly: spannerClient.spanner.v1.TransactionOptions.IReadOnly = {}; const {returnReadTimestamp = true} = options; @@ -1503,13 +1503,13 @@ export class Snapshot extends EventEmitter { if (typeof options.maxStaleness === 'number') { readOnly.maxStaleness = codec.convertMsToProtoTimestamp( - options.maxStaleness as number + options.maxStaleness as number, ); } if (typeof options.exactStaleness === 'number') { readOnly.exactStaleness = codec.convertMsToProtoTimestamp( - options.exactStaleness as number + options.exactStaleness as number, ); } @@ -1573,7 +1573,7 @@ export class Snapshot extends EventEmitter { directedReadOptions: | google.spanner.v1.IDirectedReadOptions | null - | undefined + | undefined, ) { if ( !directedReadOptions && @@ -1617,7 +1617,7 @@ export class Snapshot extends EventEmitter { * @private */ private _wrapWithIdWaiter( - makeRequest: (resumeToken?: ResumeToken) => Readable + makeRequest: (resumeToken?: ResumeToken) => Readable, ): (resumeToken?: ResumeToken) => Readable { if (this.id || !this._options.readWrite) { return makeRequest; @@ -1707,11 +1707,11 @@ export class Dml extends Snapshot { runUpdate(query: string | ExecuteSqlRequest): Promise; runUpdate( query: string | ExecuteSqlRequest, - callback: RunUpdateCallback + callback: RunUpdateCallback, ): void; runUpdate( query: string | ExecuteSqlRequest, - callback?: RunUpdateCallback + callback?: RunUpdateCallback, ): void | Promise { if (typeof query === 'string') { query = {sql: query} as ExecuteSqlRequest; @@ -1731,7 +1731,7 @@ export class Dml extends Snapshot { ( err: null | grpc.ServiceError, rows: Rows, - stats: spannerClient.spanner.v1.ResultSetStats + stats: spannerClient.spanner.v1.ResultSetStats, ) => { let rowCount = 0; @@ -1745,9 +1745,9 @@ export class Dml extends Snapshot { span.end(); callback!(err, rowCount); - } + }, ); - } + }, ); } } @@ -1853,7 +1853,7 @@ export class Transaction extends Dml { session: Session, options = {} as spannerClient.spanner.v1.TransactionOptions.ReadWrite, queryOptions?: IQueryOptions, - requestOptions?: Pick + requestOptions?: Pick, ) { super(session, undefined, queryOptions); @@ -1934,21 +1934,21 @@ export class Transaction extends Dml { */ batchUpdate( queries: Array, - options?: BatchUpdateOptions | CallOptions + options?: BatchUpdateOptions | CallOptions, ): Promise; batchUpdate( queries: Array, - callback: BatchUpdateCallback + callback: BatchUpdateCallback, ): void; batchUpdate( queries: Array, options: BatchUpdateOptions | CallOptions, - callback: BatchUpdateCallback + callback: BatchUpdateCallback, ): void; batchUpdate( queries: Array, optionsOrCallback?: BatchUpdateOptions | CallOptions | BatchUpdateCallback, - cb?: BatchUpdateCallback + cb?: BatchUpdateCallback, ): Promise | void { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1990,7 +1990,7 @@ export class Transaction extends Dml { const requestOptionsWithTag = this.configureTagOptions( false, this.requestOptions?.transactionTag ?? undefined, - (options as BatchUpdateOptions).requestOptions + (options as BatchUpdateOptions).requestOptions, ); const reqOpts: spannerClient.spanner.v1.ExecuteBatchDmlRequest = { session: this.session.formattedName_!, @@ -2005,7 +2005,7 @@ export class Transaction extends Dml { this.commonHeaders_, this.session, nextNthRequest(database), - 1 + 1, ); if (this._getSpanner().routeToLeaderEnabled) { addLeaderAwareRoutingHeader(headers); @@ -2027,7 +2027,7 @@ export class Transaction extends Dml { }, ( err: null | grpc.ServiceError, - resp: spannerClient.spanner.v1.ExecuteBatchDmlResponse + resp: spannerClient.spanner.v1.ExecuteBatchDmlResponse, ) => { let batchUpdateError: BatchUpdateError; @@ -2052,7 +2052,7 @@ export class Transaction extends Dml { Number( stats[ (stats as spannerClient.spanner.v1.ResultSetStats).rowCount! - ] + ], )) || 0 ); @@ -2070,13 +2070,13 @@ export class Transaction extends Dml { span.end(); callback!(batchUpdateError!, rowCounts, resp); - } + }, ); }); } private static extractKnownMetadata( - details: IAny[] + details: IAny[], ): grpc.Metadata | undefined { if (details && typeof details[Symbol.iterator] === 'function') { const metadata = new grpc.Metadata(); @@ -2171,7 +2171,7 @@ export class Transaction extends Dml { commit(options: CommitOptions | CallOptions, callback: CommitCallback): void; commit( optionsOrCallback?: CommitOptions | CallOptions | CommitCallback, - cb?: CommitCallback + cb?: CommitCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -2211,7 +2211,7 @@ export class Transaction extends Dml { setSpanError(span, err); span.end(); callback(err, null); - } + }, ); return; } @@ -2232,7 +2232,7 @@ export class Transaction extends Dml { } reqOpts.requestOptions = Object.assign( requestOptions || {}, - this.requestOptions + this.requestOptions, ); const headers = this.commonHeaders_; @@ -2253,12 +2253,12 @@ export class Transaction extends Dml { headers, this.session, nextNthRequest(database), - 1 + 1, ), }, ( err: null | Error, - resp: spannerClient.spanner.v1.ICommitResponse + resp: spannerClient.spanner.v1.ICommitResponse, ) => { this.end(); @@ -2272,19 +2272,19 @@ export class Transaction extends Dml { if (resp && resp.commitTimestamp) { this.commitTimestampProto = resp.commitTimestamp; this.commitTimestamp = new PreciseDate( - resp.commitTimestamp as DateStruct + resp.commitTimestamp as DateStruct, ); } err = Transaction.decorateCommitError( err as ServiceError, - mutations + mutations, ); span.end(); callback!(err as ServiceError | null, resp); - } + }, ); - } + }, ); } @@ -2297,7 +2297,7 @@ export class Transaction extends Dml { */ private static decorateCommitError( err: null | ServiceError, - mutations: spannerClient.spanner.v1.Mutation[] + mutations: spannerClient.spanner.v1.Mutation[], ): null | Error { if (!err) { return err; @@ -2305,7 +2305,7 @@ export class Transaction extends Dml { if (err.code === Status.FAILED_PRECONDITION) { const mismatchErr = Transaction.decoratePossibleJsonMismatchError( err, - mutations + mutations, ); if (mismatchErr) { return mismatchErr; @@ -2327,7 +2327,7 @@ export class Transaction extends Dml { */ private static decoratePossibleJsonMismatchError( err: ServiceError, - mutations: spannerClient.spanner.v1.Mutation[] + mutations: spannerClient.spanner.v1.Mutation[], ): null | ServiceError { const errorMessage = /Invalid value for column (?.+) in table (?.+): Expected JSON./; @@ -2563,13 +2563,13 @@ export class Transaction extends Dml { rollback(callback: spannerClient.spanner.v1.Spanner.RollbackCallback): void; rollback( gaxOptions: CallOptions, - callback: spannerClient.spanner.v1.Spanner.RollbackCallback + callback: spannerClient.spanner.v1.Spanner.RollbackCallback, ): void; rollback( gaxOptionsOrCallback?: | CallOptions | spannerClient.spanner.v1.Spanner.RollbackCallback, - cb?: spannerClient.spanner.v1.Spanner.RollbackCallback + cb?: spannerClient.spanner.v1.Spanner.RollbackCallback, ): void | Promise { const gaxOpts = typeof gaxOptionsOrCallback === 'object' ? gaxOptionsOrCallback : {}; @@ -2611,7 +2611,7 @@ export class Transaction extends Dml { span.end(); this.end(); callback!(err); - } + }, ); }); } @@ -2706,7 +2706,7 @@ export class Transaction extends Dml { private _mutate( method: string, table: string, - keyVals: object | object[] + keyVals: object | object[], ): void { this._queuedMutations.push(buildMutation(method, table, keyVals)); } @@ -2803,9 +2803,9 @@ promisifyAll(Transaction, { function buildMutation( method: string, table: string, - keyVals: object | object[] + keyVals: object | object[], ): spannerClient.spanner.v1.Mutation { - const rows: object[] = arrify(keyVals); + const rows: object[] = toArray(keyVals); const columns = Transaction.getUniqueKeys(rows); const values = rows.map((row, index) => { @@ -2817,7 +2817,7 @@ function buildMutation( [ `Row at index ${index} does not contain the correct number of columns.`, `Missing columns: ${JSON.stringify(missingColumns)}`, - ].join('\n\n') + ].join('\n\n'), ); } @@ -2840,10 +2840,10 @@ function buildMutation( */ function buildDeleteMutation( table: string, - keys: Key[] + keys: Key[], ): spannerClient.spanner.v1.Mutation { const keySet: spannerClient.spanner.v1.IKeySet = { - keys: arrify(keys).map(codec.convertToListValue), + keys: toArray(keys).map(codec.convertToListValue), }; const mutation: spannerClient.spanner.v1.IMutation = { delete: {table, keySet}, @@ -3036,7 +3036,7 @@ export class MutationGroup { export class PartitionedDml extends Dml { constructor( session: Session, - options = {} as spannerClient.spanner.v1.TransactionOptions.PartitionedDml + options = {} as spannerClient.spanner.v1.TransactionOptions.PartitionedDml, ) { super(session); this._options = {partitionedDml: options}; @@ -3081,11 +3081,11 @@ export class PartitionedDml extends Dml { runUpdate(query: string | ExecuteSqlRequest): Promise; runUpdate( query: string | ExecuteSqlRequest, - callback: RunUpdateCallback + callback: RunUpdateCallback, ): void; runUpdate( query: string | ExecuteSqlRequest, - callback?: RunUpdateCallback + callback?: RunUpdateCallback, ): void | Promise { return startTrace( 'PartitionedDml.runUpdate', @@ -3102,7 +3102,7 @@ export class PartitionedDml extends Dml { span.end(); callback!(err, count); }); - } + }, ); } } diff --git a/src/v1/database_admin_client.ts b/src/v1/database_admin_client.ts index b93d58e1f..4572ad618 100644 --- a/src/v1/database_admin_client.ts +++ b/src/v1/database_admin_client.ts @@ -31,6 +31,7 @@ import type { import {Transform} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); +import {loggingUtils as logging} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -61,6 +62,8 @@ export class DatabaseAdminClient { private _defaults: {[method: string]: gax.CallSettings}; private _universeDomain: string; private _servicePath: string; + private _log = logging.log('spanner'); + auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, @@ -96,7 +99,7 @@ export class DatabaseAdminClient { * Developer's Console, e.g. 'grape-spaceship-123'. We will also check * the environment variable GCLOUD_PROJECT for your project ID. If your * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials}, * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. @@ -115,7 +118,7 @@ export class DatabaseAdminClient { */ constructor( opts?: ClientOptions, - gaxInstance?: typeof gax | typeof gax.fallback + gaxInstance?: typeof gax | typeof gax.fallback, ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof DatabaseAdminClient; @@ -125,7 +128,7 @@ export class DatabaseAdminClient { opts?.universe_domain !== opts?.universeDomain ) { throw new Error( - 'Please set either universe_domain or universeDomain, but not both.' + 'Please set either universe_domain or universeDomain, but not both.', ); } const universeDomainEnvVar = @@ -209,22 +212,22 @@ export class DatabaseAdminClient { // Create useful helper objects for these. this.pathTemplates = { backupPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/backups/{backup}' + 'projects/{project}/instances/{instance}/backups/{backup}', ), backupSchedulePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}' + 'projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}', ), cryptoKeyPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}' + 'projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}', ), databasePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/databases/{database}' + 'projects/{project}/instances/{instance}/databases/{database}', ), databaseRolePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}' + 'projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}', ), instancePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}' + 'projects/{project}/instances/{instance}', ), }; @@ -235,32 +238,32 @@ export class DatabaseAdminClient { listDatabases: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'databases' + 'databases', ), listBackups: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'backups' + 'backups', ), listDatabaseOperations: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'operations' + 'operations', ), listBackupOperations: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'operations' + 'operations', ), listDatabaseRoles: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'databaseRoles' + 'databaseRoles', ), listBackupSchedules: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'backupSchedules' + 'backupSchedules', ), }; @@ -324,72 +327,72 @@ export class DatabaseAdminClient { .lro(lroOptions) .operationsClient(opts); const createDatabaseResponse = protoFilesRoot.lookup( - '.google.spanner.admin.database.v1.Database' + '.google.spanner.admin.database.v1.Database', ) as gax.protobuf.Type; const createDatabaseMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.database.v1.CreateDatabaseMetadata' + '.google.spanner.admin.database.v1.CreateDatabaseMetadata', ) as gax.protobuf.Type; const updateDatabaseResponse = protoFilesRoot.lookup( - '.google.spanner.admin.database.v1.Database' + '.google.spanner.admin.database.v1.Database', ) as gax.protobuf.Type; const updateDatabaseMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.database.v1.UpdateDatabaseMetadata' + '.google.spanner.admin.database.v1.UpdateDatabaseMetadata', ) as gax.protobuf.Type; const updateDatabaseDdlResponse = protoFilesRoot.lookup( - '.google.protobuf.Empty' + '.google.protobuf.Empty', ) as gax.protobuf.Type; const updateDatabaseDdlMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata' + '.google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata', ) as gax.protobuf.Type; const createBackupResponse = protoFilesRoot.lookup( - '.google.spanner.admin.database.v1.Backup' + '.google.spanner.admin.database.v1.Backup', ) as gax.protobuf.Type; const createBackupMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.database.v1.CreateBackupMetadata' + '.google.spanner.admin.database.v1.CreateBackupMetadata', ) as gax.protobuf.Type; const copyBackupResponse = protoFilesRoot.lookup( - '.google.spanner.admin.database.v1.Backup' + '.google.spanner.admin.database.v1.Backup', ) as gax.protobuf.Type; const copyBackupMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.database.v1.CopyBackupMetadata' + '.google.spanner.admin.database.v1.CopyBackupMetadata', ) as gax.protobuf.Type; const restoreDatabaseResponse = protoFilesRoot.lookup( - '.google.spanner.admin.database.v1.Database' + '.google.spanner.admin.database.v1.Database', ) as gax.protobuf.Type; const restoreDatabaseMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.database.v1.RestoreDatabaseMetadata' + '.google.spanner.admin.database.v1.RestoreDatabaseMetadata', ) as gax.protobuf.Type; this.descriptors.longrunning = { createDatabase: new this._gaxModule.LongrunningDescriptor( this.operationsClient, createDatabaseResponse.decode.bind(createDatabaseResponse), - createDatabaseMetadata.decode.bind(createDatabaseMetadata) + createDatabaseMetadata.decode.bind(createDatabaseMetadata), ), updateDatabase: new this._gaxModule.LongrunningDescriptor( this.operationsClient, updateDatabaseResponse.decode.bind(updateDatabaseResponse), - updateDatabaseMetadata.decode.bind(updateDatabaseMetadata) + updateDatabaseMetadata.decode.bind(updateDatabaseMetadata), ), updateDatabaseDdl: new this._gaxModule.LongrunningDescriptor( this.operationsClient, updateDatabaseDdlResponse.decode.bind(updateDatabaseDdlResponse), - updateDatabaseDdlMetadata.decode.bind(updateDatabaseDdlMetadata) + updateDatabaseDdlMetadata.decode.bind(updateDatabaseDdlMetadata), ), createBackup: new this._gaxModule.LongrunningDescriptor( this.operationsClient, createBackupResponse.decode.bind(createBackupResponse), - createBackupMetadata.decode.bind(createBackupMetadata) + createBackupMetadata.decode.bind(createBackupMetadata), ), copyBackup: new this._gaxModule.LongrunningDescriptor( this.operationsClient, copyBackupResponse.decode.bind(copyBackupResponse), - copyBackupMetadata.decode.bind(copyBackupMetadata) + copyBackupMetadata.decode.bind(copyBackupMetadata), ), restoreDatabase: new this._gaxModule.LongrunningDescriptor( this.operationsClient, restoreDatabaseResponse.decode.bind(restoreDatabaseResponse), - restoreDatabaseMetadata.decode.bind(restoreDatabaseMetadata) + restoreDatabaseMetadata.decode.bind(restoreDatabaseMetadata), ), }; @@ -398,7 +401,7 @@ export class DatabaseAdminClient { 'google.spanner.admin.database.v1.DatabaseAdmin', gapicConfig as gax.ClientConfig, opts.clientConfig || {}, - {'x-goog-api-client': clientHeader.join(' ')} + {'x-goog-api-client': clientHeader.join(' ')}, ); // Set up a dictionary of "inner API calls"; the core implementation @@ -432,12 +435,12 @@ export class DatabaseAdminClient { this.databaseAdminStub = this._gaxGrpc.createStub( this._opts.fallback ? (this._protos as protobuf.Root).lookupService( - 'google.spanner.admin.database.v1.DatabaseAdmin' + 'google.spanner.admin.database.v1.DatabaseAdmin', ) : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.spanner.admin.database.v1.DatabaseAdmin, this._opts, - this._providedCustomServicePath + this._providedCustomServicePath, ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides @@ -482,7 +485,7 @@ export class DatabaseAdminClient { }, (err: Error | null | undefined) => () => { throw err; - } + }, ); const descriptor = @@ -493,7 +496,7 @@ export class DatabaseAdminClient { callPromise, this._defaults[methodName], descriptor, - this._opts.fallback + this._opts.fallback, ); this.innerApiCalls[methodName] = apiCall; @@ -514,7 +517,7 @@ export class DatabaseAdminClient { ) { process.emitWarning( 'Static servicePath is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'spanner.googleapis.com'; @@ -532,7 +535,7 @@ export class DatabaseAdminClient { ) { process.emitWarning( 'Static apiEndpoint is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'spanner.googleapis.com'; @@ -577,7 +580,7 @@ export class DatabaseAdminClient { * @returns {Promise} A promise that resolves to string containing the project ID. */ getProjectId( - callback?: Callback + callback?: Callback, ): Promise | void { if (callback) { this.auth.getProjectId(callback); @@ -606,7 +609,7 @@ export class DatabaseAdminClient { */ getDatabase( request?: protos.google.spanner.admin.database.v1.IGetDatabaseRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IDatabase, @@ -623,7 +626,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getDatabase( request: protos.google.spanner.admin.database.v1.IGetDatabaseRequest, @@ -633,7 +636,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getDatabase( request?: protos.google.spanner.admin.database.v1.IGetDatabaseRequest, @@ -652,7 +655,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IDatabase, @@ -675,8 +678,39 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.getDatabase(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('getDatabase request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.database.v1.IDatabase, + | protos.google.spanner.admin.database.v1.IGetDatabaseRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('getDatabase response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .getDatabase(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.database.v1.IDatabase, + ( + | protos.google.spanner.admin.database.v1.IGetDatabaseRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('getDatabase response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Drops (aka deletes) a Cloud Spanner database. @@ -698,7 +732,7 @@ export class DatabaseAdminClient { */ dropDatabase( request?: protos.google.spanner.admin.database.v1.IDropDatabaseRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.protobuf.IEmpty, @@ -715,7 +749,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; dropDatabase( request: protos.google.spanner.admin.database.v1.IDropDatabaseRequest, @@ -725,7 +759,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; dropDatabase( request?: protos.google.spanner.admin.database.v1.IDropDatabaseRequest, @@ -744,7 +778,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.protobuf.IEmpty, @@ -767,8 +801,39 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ database: request.database ?? '', }); - this.initialize(); - return this.innerApiCalls.dropDatabase(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('dropDatabase request %j', request); + const wrappedCallback: + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.spanner.admin.database.v1.IDropDatabaseRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('dropDatabase response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .dropDatabase(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.protobuf.IEmpty, + ( + | protos.google.spanner.admin.database.v1.IDropDatabaseRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('dropDatabase response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Returns the schema of a Cloud Spanner database as a list of formatted @@ -790,7 +855,7 @@ export class DatabaseAdminClient { */ getDatabaseDdl( request?: protos.google.spanner.admin.database.v1.IGetDatabaseDdlRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IGetDatabaseDdlResponse, @@ -810,7 +875,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getDatabaseDdl( request: protos.google.spanner.admin.database.v1.IGetDatabaseDdlRequest, @@ -820,7 +885,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getDatabaseDdl( request?: protos.google.spanner.admin.database.v1.IGetDatabaseDdlRequest, @@ -839,7 +904,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IGetDatabaseDdlResponse, @@ -865,8 +930,39 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ database: request.database ?? '', }); - this.initialize(); - return this.innerApiCalls.getDatabaseDdl(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('getDatabaseDdl request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.database.v1.IGetDatabaseDdlResponse, + | protos.google.spanner.admin.database.v1.IGetDatabaseDdlRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('getDatabaseDdl response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .getDatabaseDdl(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.database.v1.IGetDatabaseDdlResponse, + ( + | protos.google.spanner.admin.database.v1.IGetDatabaseDdlRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('getDatabaseDdl response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Sets the access control policy on a database or backup resource. @@ -902,7 +998,7 @@ export class DatabaseAdminClient { */ setIamPolicy( request?: protos.google.iam.v1.ISetIamPolicyRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.iam.v1.IPolicy, @@ -917,7 +1013,7 @@ export class DatabaseAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.ISetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): void; setIamPolicy( request: protos.google.iam.v1.ISetIamPolicyRequest, @@ -925,7 +1021,7 @@ export class DatabaseAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.ISetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): void; setIamPolicy( request?: protos.google.iam.v1.ISetIamPolicyRequest, @@ -940,7 +1036,7 @@ export class DatabaseAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.ISetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.iam.v1.IPolicy, @@ -963,8 +1059,34 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ resource: request.resource ?? '', }); - this.initialize(); - return this.innerApiCalls.setIamPolicy(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('setIamPolicy request %j', request); + const wrappedCallback: + | Callback< + protos.google.iam.v1.IPolicy, + protos.google.iam.v1.ISetIamPolicyRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('setIamPolicy response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .setIamPolicy(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.iam.v1.IPolicy, + protos.google.iam.v1.ISetIamPolicyRequest | undefined, + {} | undefined, + ]) => { + this._log.info('setIamPolicy response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Gets the access control policy for a database or backup resource. @@ -993,7 +1115,7 @@ export class DatabaseAdminClient { */ getIamPolicy( request?: protos.google.iam.v1.IGetIamPolicyRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.iam.v1.IPolicy, @@ -1008,7 +1130,7 @@ export class DatabaseAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.IGetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): void; getIamPolicy( request: protos.google.iam.v1.IGetIamPolicyRequest, @@ -1016,7 +1138,7 @@ export class DatabaseAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.IGetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): void; getIamPolicy( request?: protos.google.iam.v1.IGetIamPolicyRequest, @@ -1031,7 +1153,7 @@ export class DatabaseAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.IGetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.iam.v1.IPolicy, @@ -1054,8 +1176,34 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ resource: request.resource ?? '', }); - this.initialize(); - return this.innerApiCalls.getIamPolicy(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('getIamPolicy request %j', request); + const wrappedCallback: + | Callback< + protos.google.iam.v1.IPolicy, + protos.google.iam.v1.IGetIamPolicyRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('getIamPolicy response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .getIamPolicy(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.iam.v1.IPolicy, + protos.google.iam.v1.IGetIamPolicyRequest | undefined, + {} | undefined, + ]) => { + this._log.info('getIamPolicy response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Returns permissions that the caller has on the specified database or backup @@ -1088,7 +1236,7 @@ export class DatabaseAdminClient { */ testIamPermissions( request?: protos.google.iam.v1.ITestIamPermissionsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.iam.v1.ITestIamPermissionsResponse, @@ -1103,7 +1251,7 @@ export class DatabaseAdminClient { protos.google.iam.v1.ITestIamPermissionsResponse, protos.google.iam.v1.ITestIamPermissionsRequest | null | undefined, {} | null | undefined - > + >, ): void; testIamPermissions( request: protos.google.iam.v1.ITestIamPermissionsRequest, @@ -1111,7 +1259,7 @@ export class DatabaseAdminClient { protos.google.iam.v1.ITestIamPermissionsResponse, protos.google.iam.v1.ITestIamPermissionsRequest | null | undefined, {} | null | undefined - > + >, ): void; testIamPermissions( request?: protos.google.iam.v1.ITestIamPermissionsRequest, @@ -1126,7 +1274,7 @@ export class DatabaseAdminClient { protos.google.iam.v1.ITestIamPermissionsResponse, protos.google.iam.v1.ITestIamPermissionsRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.iam.v1.ITestIamPermissionsResponse, @@ -1149,8 +1297,34 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ resource: request.resource ?? '', }); - this.initialize(); - return this.innerApiCalls.testIamPermissions(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('testIamPermissions request %j', request); + const wrappedCallback: + | Callback< + protos.google.iam.v1.ITestIamPermissionsResponse, + protos.google.iam.v1.ITestIamPermissionsRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('testIamPermissions response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .testIamPermissions(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.iam.v1.ITestIamPermissionsResponse, + protos.google.iam.v1.ITestIamPermissionsRequest | undefined, + {} | undefined, + ]) => { + this._log.info('testIamPermissions response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Gets metadata on a pending or completed @@ -1171,7 +1345,7 @@ export class DatabaseAdminClient { */ getBackup( request?: protos.google.spanner.admin.database.v1.IGetBackupRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IBackup, @@ -1188,7 +1362,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getBackup( request: protos.google.spanner.admin.database.v1.IGetBackupRequest, @@ -1198,7 +1372,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getBackup( request?: protos.google.spanner.admin.database.v1.IGetBackupRequest, @@ -1217,7 +1391,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IBackup, @@ -1240,8 +1414,36 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.getBackup(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('getBackup request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.database.v1.IBackup, + | protos.google.spanner.admin.database.v1.IGetBackupRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('getBackup response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .getBackup(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.database.v1.IBackup, + protos.google.spanner.admin.database.v1.IGetBackupRequest | undefined, + {} | undefined, + ]) => { + this._log.info('getBackup response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Updates a pending or completed @@ -1269,7 +1471,7 @@ export class DatabaseAdminClient { */ updateBackup( request?: protos.google.spanner.admin.database.v1.IUpdateBackupRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IBackup, @@ -1286,7 +1488,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; updateBackup( request: protos.google.spanner.admin.database.v1.IUpdateBackupRequest, @@ -1296,7 +1498,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; updateBackup( request?: protos.google.spanner.admin.database.v1.IUpdateBackupRequest, @@ -1315,7 +1517,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IBackup, @@ -1338,8 +1540,39 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ 'backup.name': request.backup!.name ?? '', }); - this.initialize(); - return this.innerApiCalls.updateBackup(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('updateBackup request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.database.v1.IBackup, + | protos.google.spanner.admin.database.v1.IUpdateBackupRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('updateBackup response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .updateBackup(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.database.v1.IBackup, + ( + | protos.google.spanner.admin.database.v1.IUpdateBackupRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('updateBackup response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Deletes a pending or completed @@ -1360,7 +1593,7 @@ export class DatabaseAdminClient { */ deleteBackup( request?: protos.google.spanner.admin.database.v1.IDeleteBackupRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.protobuf.IEmpty, @@ -1377,7 +1610,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; deleteBackup( request: protos.google.spanner.admin.database.v1.IDeleteBackupRequest, @@ -1387,7 +1620,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; deleteBackup( request?: protos.google.spanner.admin.database.v1.IDeleteBackupRequest, @@ -1406,7 +1639,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.protobuf.IEmpty, @@ -1429,8 +1662,39 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.deleteBackup(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('deleteBackup request %j', request); + const wrappedCallback: + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.spanner.admin.database.v1.IDeleteBackupRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('deleteBackup response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .deleteBackup(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.protobuf.IEmpty, + ( + | protos.google.spanner.admin.database.v1.IDeleteBackupRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('deleteBackup response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Adds split points to specified tables, indexes of a database. @@ -1458,7 +1722,7 @@ export class DatabaseAdminClient { */ addSplitPoints( request?: protos.google.spanner.admin.database.v1.IAddSplitPointsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IAddSplitPointsResponse, @@ -1478,7 +1742,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; addSplitPoints( request: protos.google.spanner.admin.database.v1.IAddSplitPointsRequest, @@ -1488,7 +1752,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; addSplitPoints( request?: protos.google.spanner.admin.database.v1.IAddSplitPointsRequest, @@ -1507,7 +1771,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IAddSplitPointsResponse, @@ -1533,8 +1797,39 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ database: request.database ?? '', }); - this.initialize(); - return this.innerApiCalls.addSplitPoints(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('addSplitPoints request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.database.v1.IAddSplitPointsResponse, + | protos.google.spanner.admin.database.v1.IAddSplitPointsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('addSplitPoints response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .addSplitPoints(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.database.v1.IAddSplitPointsResponse, + ( + | protos.google.spanner.admin.database.v1.IAddSplitPointsRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('addSplitPoints response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Creates a new backup schedule. @@ -1558,7 +1853,7 @@ export class DatabaseAdminClient { */ createBackupSchedule( request?: protos.google.spanner.admin.database.v1.ICreateBackupScheduleRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IBackupSchedule, @@ -1578,7 +1873,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; createBackupSchedule( request: protos.google.spanner.admin.database.v1.ICreateBackupScheduleRequest, @@ -1588,7 +1883,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; createBackupSchedule( request?: protos.google.spanner.admin.database.v1.ICreateBackupScheduleRequest, @@ -1607,7 +1902,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IBackupSchedule, @@ -1633,8 +1928,39 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.createBackupSchedule(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('createBackupSchedule request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.database.v1.IBackupSchedule, + | protos.google.spanner.admin.database.v1.ICreateBackupScheduleRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('createBackupSchedule response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .createBackupSchedule(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.database.v1.IBackupSchedule, + ( + | protos.google.spanner.admin.database.v1.ICreateBackupScheduleRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('createBackupSchedule response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Gets backup schedule for the input schedule name. @@ -1654,7 +1980,7 @@ export class DatabaseAdminClient { */ getBackupSchedule( request?: protos.google.spanner.admin.database.v1.IGetBackupScheduleRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IBackupSchedule, @@ -1674,7 +2000,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getBackupSchedule( request: protos.google.spanner.admin.database.v1.IGetBackupScheduleRequest, @@ -1684,7 +2010,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getBackupSchedule( request?: protos.google.spanner.admin.database.v1.IGetBackupScheduleRequest, @@ -1703,7 +2029,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IBackupSchedule, @@ -1729,8 +2055,39 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.getBackupSchedule(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('getBackupSchedule request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.database.v1.IBackupSchedule, + | protos.google.spanner.admin.database.v1.IGetBackupScheduleRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('getBackupSchedule response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .getBackupSchedule(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.database.v1.IBackupSchedule, + ( + | protos.google.spanner.admin.database.v1.IGetBackupScheduleRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('getBackupSchedule response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Updates a backup schedule. @@ -1756,7 +2113,7 @@ export class DatabaseAdminClient { */ updateBackupSchedule( request?: protos.google.spanner.admin.database.v1.IUpdateBackupScheduleRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IBackupSchedule, @@ -1776,7 +2133,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; updateBackupSchedule( request: protos.google.spanner.admin.database.v1.IUpdateBackupScheduleRequest, @@ -1786,7 +2143,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; updateBackupSchedule( request?: protos.google.spanner.admin.database.v1.IUpdateBackupScheduleRequest, @@ -1805,7 +2162,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IBackupSchedule, @@ -1831,8 +2188,39 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ 'backup_schedule.name': request.backupSchedule!.name ?? '', }); - this.initialize(); - return this.innerApiCalls.updateBackupSchedule(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('updateBackupSchedule request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.database.v1.IBackupSchedule, + | protos.google.spanner.admin.database.v1.IUpdateBackupScheduleRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('updateBackupSchedule response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .updateBackupSchedule(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.database.v1.IBackupSchedule, + ( + | protos.google.spanner.admin.database.v1.IUpdateBackupScheduleRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('updateBackupSchedule response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Deletes a backup schedule. @@ -1852,7 +2240,7 @@ export class DatabaseAdminClient { */ deleteBackupSchedule( request?: protos.google.spanner.admin.database.v1.IDeleteBackupScheduleRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.protobuf.IEmpty, @@ -1872,7 +2260,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; deleteBackupSchedule( request: protos.google.spanner.admin.database.v1.IDeleteBackupScheduleRequest, @@ -1882,7 +2270,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; deleteBackupSchedule( request?: protos.google.spanner.admin.database.v1.IDeleteBackupScheduleRequest, @@ -1901,7 +2289,7 @@ export class DatabaseAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.protobuf.IEmpty, @@ -1927,8 +2315,39 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.deleteBackupSchedule(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('deleteBackupSchedule request %j', request); + const wrappedCallback: + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.spanner.admin.database.v1.IDeleteBackupScheduleRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('deleteBackupSchedule response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .deleteBackupSchedule(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.protobuf.IEmpty, + ( + | protos.google.spanner.admin.database.v1.IDeleteBackupScheduleRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('deleteBackupSchedule response %j', response); + return [response, options, rawResponse]; + }, + ); } /** @@ -1990,7 +2409,7 @@ export class DatabaseAdminClient { */ createDatabase( request?: protos.google.spanner.admin.database.v1.ICreateDatabaseRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -2011,7 +2430,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; createDatabase( request: protos.google.spanner.admin.database.v1.ICreateDatabaseRequest, @@ -2022,7 +2441,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; createDatabase( request?: protos.google.spanner.admin.database.v1.ICreateDatabaseRequest, @@ -2043,7 +2462,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -2069,8 +2488,40 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.createDatabase(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.database.v1.IDatabase, + protos.google.spanner.admin.database.v1.ICreateDatabaseMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('createDatabase response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('createDatabase request %j', request); + return this.innerApiCalls + .createDatabase(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.database.v1.IDatabase, + protos.google.spanner.admin.database.v1.ICreateDatabaseMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('createDatabase response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `createDatabase()`. @@ -2082,22 +2533,23 @@ export class DatabaseAdminClient { * for more details and examples. */ async checkCreateDatabaseProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.database.v1.Database, protos.google.spanner.admin.database.v1.CreateDatabaseMetadata > > { + this._log.info('createDatabase long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.createDatabase, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.database.v1.Database, @@ -2162,7 +2614,7 @@ export class DatabaseAdminClient { */ updateDatabase( request?: protos.google.spanner.admin.database.v1.IUpdateDatabaseRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -2183,7 +2635,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; updateDatabase( request: protos.google.spanner.admin.database.v1.IUpdateDatabaseRequest, @@ -2194,7 +2646,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; updateDatabase( request?: protos.google.spanner.admin.database.v1.IUpdateDatabaseRequest, @@ -2215,7 +2667,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -2241,8 +2693,40 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ 'database.name': request.database!.name ?? '', }); - this.initialize(); - return this.innerApiCalls.updateDatabase(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.database.v1.IDatabase, + protos.google.spanner.admin.database.v1.IUpdateDatabaseMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('updateDatabase response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('updateDatabase request %j', request); + return this.innerApiCalls + .updateDatabase(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.database.v1.IDatabase, + protos.google.spanner.admin.database.v1.IUpdateDatabaseMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('updateDatabase response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `updateDatabase()`. @@ -2254,22 +2738,23 @@ export class DatabaseAdminClient { * for more details and examples. */ async checkUpdateDatabaseProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.database.v1.Database, protos.google.spanner.admin.database.v1.UpdateDatabaseMetadata > > { + this._log.info('updateDatabase long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.updateDatabase, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.database.v1.Database, @@ -2340,7 +2825,7 @@ export class DatabaseAdminClient { */ updateDatabaseDdl( request?: protos.google.spanner.admin.database.v1.IUpdateDatabaseDdlRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -2361,7 +2846,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; updateDatabaseDdl( request: protos.google.spanner.admin.database.v1.IUpdateDatabaseDdlRequest, @@ -2372,7 +2857,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; updateDatabaseDdl( request?: protos.google.spanner.admin.database.v1.IUpdateDatabaseDdlRequest, @@ -2393,7 +2878,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -2419,8 +2904,40 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ database: request.database ?? '', }); - this.initialize(); - return this.innerApiCalls.updateDatabaseDdl(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.protobuf.IEmpty, + protos.google.spanner.admin.database.v1.IUpdateDatabaseDdlMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('updateDatabaseDdl response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('updateDatabaseDdl request %j', request); + return this.innerApiCalls + .updateDatabaseDdl(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.protobuf.IEmpty, + protos.google.spanner.admin.database.v1.IUpdateDatabaseDdlMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('updateDatabaseDdl response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `updateDatabaseDdl()`. @@ -2432,22 +2949,23 @@ export class DatabaseAdminClient { * for more details and examples. */ async checkUpdateDatabaseDdlProgress( - name: string + name: string, ): Promise< LROperation< protos.google.protobuf.Empty, protos.google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata > > { + this._log.info('updateDatabaseDdl long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.updateDatabaseDdl, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.protobuf.Empty, @@ -2500,7 +3018,7 @@ export class DatabaseAdminClient { */ createBackup( request?: protos.google.spanner.admin.database.v1.ICreateBackupRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -2521,7 +3039,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; createBackup( request: protos.google.spanner.admin.database.v1.ICreateBackupRequest, @@ -2532,7 +3050,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; createBackup( request?: protos.google.spanner.admin.database.v1.ICreateBackupRequest, @@ -2553,7 +3071,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -2579,8 +3097,40 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.createBackup(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.database.v1.IBackup, + protos.google.spanner.admin.database.v1.ICreateBackupMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('createBackup response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('createBackup request %j', request); + return this.innerApiCalls + .createBackup(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.database.v1.IBackup, + protos.google.spanner.admin.database.v1.ICreateBackupMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('createBackup response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `createBackup()`. @@ -2592,22 +3142,23 @@ export class DatabaseAdminClient { * for more details and examples. */ async checkCreateBackupProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.database.v1.Backup, protos.google.spanner.admin.database.v1.CreateBackupMetadata > > { + this._log.info('createBackup long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.createBackup, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.database.v1.Backup, @@ -2668,7 +3219,7 @@ export class DatabaseAdminClient { */ copyBackup( request?: protos.google.spanner.admin.database.v1.ICopyBackupRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -2689,7 +3240,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; copyBackup( request: protos.google.spanner.admin.database.v1.ICopyBackupRequest, @@ -2700,7 +3251,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; copyBackup( request?: protos.google.spanner.admin.database.v1.ICopyBackupRequest, @@ -2721,7 +3272,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -2747,8 +3298,40 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.copyBackup(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.database.v1.IBackup, + protos.google.spanner.admin.database.v1.ICopyBackupMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('copyBackup response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('copyBackup request %j', request); + return this.innerApiCalls + .copyBackup(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.database.v1.IBackup, + protos.google.spanner.admin.database.v1.ICopyBackupMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('copyBackup response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `copyBackup()`. @@ -2760,22 +3343,23 @@ export class DatabaseAdminClient { * for more details and examples. */ async checkCopyBackupProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.database.v1.Backup, protos.google.spanner.admin.database.v1.CopyBackupMetadata > > { + this._log.info('copyBackup long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.copyBackup, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.database.v1.Backup, @@ -2835,7 +3419,7 @@ export class DatabaseAdminClient { */ restoreDatabase( request?: protos.google.spanner.admin.database.v1.IRestoreDatabaseRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -2856,7 +3440,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; restoreDatabase( request: protos.google.spanner.admin.database.v1.IRestoreDatabaseRequest, @@ -2867,7 +3451,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; restoreDatabase( request?: protos.google.spanner.admin.database.v1.IRestoreDatabaseRequest, @@ -2888,7 +3472,7 @@ export class DatabaseAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -2914,8 +3498,40 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.restoreDatabase(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.database.v1.IDatabase, + protos.google.spanner.admin.database.v1.IRestoreDatabaseMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('restoreDatabase response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('restoreDatabase request %j', request); + return this.innerApiCalls + .restoreDatabase(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.database.v1.IDatabase, + protos.google.spanner.admin.database.v1.IRestoreDatabaseMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('restoreDatabase response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `restoreDatabase()`. @@ -2927,22 +3543,23 @@ export class DatabaseAdminClient { * for more details and examples. */ async checkRestoreDatabaseProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.database.v1.Database, protos.google.spanner.admin.database.v1.RestoreDatabaseMetadata > > { + this._log.info('restoreDatabase long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.restoreDatabase, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.database.v1.Database, @@ -2979,7 +3596,7 @@ export class DatabaseAdminClient { */ listDatabases( request?: protos.google.spanner.admin.database.v1.IListDatabasesRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IDatabase[], @@ -2996,7 +3613,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IDatabase - > + >, ): void; listDatabases( request: protos.google.spanner.admin.database.v1.IListDatabasesRequest, @@ -3006,7 +3623,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IDatabase - > + >, ): void; listDatabases( request?: protos.google.spanner.admin.database.v1.IListDatabasesRequest, @@ -3025,7 +3642,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IDatabase - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IDatabase[], @@ -3048,8 +3665,36 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listDatabases(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.admin.database.v1.IListDatabasesRequest, + | protos.google.spanner.admin.database.v1.IListDatabasesResponse + | null + | undefined, + protos.google.spanner.admin.database.v1.IDatabase + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listDatabases values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listDatabases request %j', request); + return this.innerApiCalls + .listDatabases(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.spanner.admin.database.v1.IDatabase[], + protos.google.spanner.admin.database.v1.IListDatabasesRequest | null, + protos.google.spanner.admin.database.v1.IListDatabasesResponse, + ]) => { + this._log.info('listDatabases values %j', response); + return [response, input, output]; + }, + ); } /** @@ -3080,7 +3725,7 @@ export class DatabaseAdminClient { */ listDatabasesStream( request?: protos.google.spanner.admin.database.v1.IListDatabasesRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -3092,11 +3737,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listDatabases']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listDatabases stream %j', request); return this.descriptors.page.listDatabases.createStream( this.innerApiCalls.listDatabases as GaxCall, request, - callSettings + callSettings, ); } @@ -3129,7 +3777,7 @@ export class DatabaseAdminClient { */ listDatabasesAsync( request?: protos.google.spanner.admin.database.v1.IListDatabasesRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -3141,11 +3789,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listDatabases']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listDatabases iterate %j', request); return this.descriptors.page.listDatabases.asyncIterate( this.innerApiCalls['listDatabases'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } /** @@ -3222,7 +3873,7 @@ export class DatabaseAdminClient { */ listBackups( request?: protos.google.spanner.admin.database.v1.IListBackupsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IBackup[], @@ -3239,7 +3890,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IBackup - > + >, ): void; listBackups( request: protos.google.spanner.admin.database.v1.IListBackupsRequest, @@ -3249,7 +3900,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IBackup - > + >, ): void; listBackups( request?: protos.google.spanner.admin.database.v1.IListBackupsRequest, @@ -3268,7 +3919,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IBackup - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IBackup[], @@ -3291,8 +3942,36 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listBackups(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.admin.database.v1.IListBackupsRequest, + | protos.google.spanner.admin.database.v1.IListBackupsResponse + | null + | undefined, + protos.google.spanner.admin.database.v1.IBackup + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listBackups values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listBackups request %j', request); + return this.innerApiCalls + .listBackups(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.spanner.admin.database.v1.IBackup[], + protos.google.spanner.admin.database.v1.IListBackupsRequest | null, + protos.google.spanner.admin.database.v1.IListBackupsResponse, + ]) => { + this._log.info('listBackups values %j', response); + return [response, input, output]; + }, + ); } /** @@ -3365,7 +4044,7 @@ export class DatabaseAdminClient { */ listBackupsStream( request?: protos.google.spanner.admin.database.v1.IListBackupsRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -3377,11 +4056,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listBackups']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listBackups stream %j', request); return this.descriptors.page.listBackups.createStream( this.innerApiCalls.listBackups as GaxCall, request, - callSettings + callSettings, ); } @@ -3456,7 +4138,7 @@ export class DatabaseAdminClient { */ listBackupsAsync( request?: protos.google.spanner.admin.database.v1.IListBackupsRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -3468,11 +4150,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listBackups']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listBackups iterate %j', request); return this.descriptors.page.listBackups.asyncIterate( this.innerApiCalls['listBackups'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } /** @@ -3559,7 +4244,7 @@ export class DatabaseAdminClient { */ listDatabaseOperations( request?: protos.google.spanner.admin.database.v1.IListDatabaseOperationsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.longrunning.IOperation[], @@ -3576,7 +4261,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): void; listDatabaseOperations( request: protos.google.spanner.admin.database.v1.IListDatabaseOperationsRequest, @@ -3586,7 +4271,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): void; listDatabaseOperations( request?: protos.google.spanner.admin.database.v1.IListDatabaseOperationsRequest, @@ -3605,7 +4290,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): Promise< [ protos.google.longrunning.IOperation[], @@ -3628,12 +4313,36 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listDatabaseOperations( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.admin.database.v1.IListDatabaseOperationsRequest, + | protos.google.spanner.admin.database.v1.IListDatabaseOperationsResponse + | null + | undefined, + protos.google.longrunning.IOperation + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listDatabaseOperations values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listDatabaseOperations request %j', request); + return this.innerApiCalls + .listDatabaseOperations(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.longrunning.IOperation[], + protos.google.spanner.admin.database.v1.IListDatabaseOperationsRequest | null, + protos.google.spanner.admin.database.v1.IListDatabaseOperationsResponse, + ]) => { + this._log.info('listDatabaseOperations values %j', response); + return [response, input, output]; + }, + ); } /** @@ -3711,7 +4420,7 @@ export class DatabaseAdminClient { */ listDatabaseOperationsStream( request?: protos.google.spanner.admin.database.v1.IListDatabaseOperationsRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -3723,11 +4432,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listDatabaseOperations']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listDatabaseOperations stream %j', request); return this.descriptors.page.listDatabaseOperations.createStream( this.innerApiCalls.listDatabaseOperations as GaxCall, request, - callSettings + callSettings, ); } @@ -3807,7 +4519,7 @@ export class DatabaseAdminClient { */ listDatabaseOperationsAsync( request?: protos.google.spanner.admin.database.v1.IListDatabaseOperationsRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -3819,11 +4531,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listDatabaseOperations']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listDatabaseOperations iterate %j', request); return this.descriptors.page.listDatabaseOperations.asyncIterate( this.innerApiCalls['listDatabaseOperations'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } /** @@ -3937,7 +4652,7 @@ export class DatabaseAdminClient { */ listBackupOperations( request?: protos.google.spanner.admin.database.v1.IListBackupOperationsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.longrunning.IOperation[], @@ -3954,7 +4669,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): void; listBackupOperations( request: protos.google.spanner.admin.database.v1.IListBackupOperationsRequest, @@ -3964,7 +4679,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): void; listBackupOperations( request?: protos.google.spanner.admin.database.v1.IListBackupOperationsRequest, @@ -3983,7 +4698,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): Promise< [ protos.google.longrunning.IOperation[], @@ -4006,8 +4721,36 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listBackupOperations(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.admin.database.v1.IListBackupOperationsRequest, + | protos.google.spanner.admin.database.v1.IListBackupOperationsResponse + | null + | undefined, + protos.google.longrunning.IOperation + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listBackupOperations values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listBackupOperations request %j', request); + return this.innerApiCalls + .listBackupOperations(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.longrunning.IOperation[], + protos.google.spanner.admin.database.v1.IListBackupOperationsRequest | null, + protos.google.spanner.admin.database.v1.IListBackupOperationsResponse, + ]) => { + this._log.info('listBackupOperations values %j', response); + return [response, input, output]; + }, + ); } /** @@ -4110,7 +4853,7 @@ export class DatabaseAdminClient { */ listBackupOperationsStream( request?: protos.google.spanner.admin.database.v1.IListBackupOperationsRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -4122,11 +4865,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listBackupOperations']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listBackupOperations stream %j', request); return this.descriptors.page.listBackupOperations.createStream( this.innerApiCalls.listBackupOperations as GaxCall, request, - callSettings + callSettings, ); } @@ -4231,7 +4977,7 @@ export class DatabaseAdminClient { */ listBackupOperationsAsync( request?: protos.google.spanner.admin.database.v1.IListBackupOperationsRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -4243,11 +4989,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listBackupOperations']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listBackupOperations iterate %j', request); return this.descriptors.page.listBackupOperations.asyncIterate( this.innerApiCalls['listBackupOperations'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } /** @@ -4281,7 +5030,7 @@ export class DatabaseAdminClient { */ listDatabaseRoles( request?: protos.google.spanner.admin.database.v1.IListDatabaseRolesRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IDatabaseRole[], @@ -4298,7 +5047,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IDatabaseRole - > + >, ): void; listDatabaseRoles( request: protos.google.spanner.admin.database.v1.IListDatabaseRolesRequest, @@ -4308,7 +5057,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IDatabaseRole - > + >, ): void; listDatabaseRoles( request?: protos.google.spanner.admin.database.v1.IListDatabaseRolesRequest, @@ -4327,7 +5076,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IDatabaseRole - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IDatabaseRole[], @@ -4350,8 +5099,36 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listDatabaseRoles(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.admin.database.v1.IListDatabaseRolesRequest, + | protos.google.spanner.admin.database.v1.IListDatabaseRolesResponse + | null + | undefined, + protos.google.spanner.admin.database.v1.IDatabaseRole + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listDatabaseRoles values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listDatabaseRoles request %j', request); + return this.innerApiCalls + .listDatabaseRoles(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.spanner.admin.database.v1.IDatabaseRole[], + protos.google.spanner.admin.database.v1.IListDatabaseRolesRequest | null, + protos.google.spanner.admin.database.v1.IListDatabaseRolesResponse, + ]) => { + this._log.info('listDatabaseRoles values %j', response); + return [response, input, output]; + }, + ); } /** @@ -4383,7 +5160,7 @@ export class DatabaseAdminClient { */ listDatabaseRolesStream( request?: protos.google.spanner.admin.database.v1.IListDatabaseRolesRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -4395,11 +5172,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listDatabaseRoles']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listDatabaseRoles stream %j', request); return this.descriptors.page.listDatabaseRoles.createStream( this.innerApiCalls.listDatabaseRoles as GaxCall, request, - callSettings + callSettings, ); } @@ -4433,7 +5213,7 @@ export class DatabaseAdminClient { */ listDatabaseRolesAsync( request?: protos.google.spanner.admin.database.v1.IListDatabaseRolesRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -4445,11 +5225,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listDatabaseRoles']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listDatabaseRoles iterate %j', request); return this.descriptors.page.listDatabaseRoles.asyncIterate( this.innerApiCalls['listDatabaseRoles'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } /** @@ -4484,7 +5267,7 @@ export class DatabaseAdminClient { */ listBackupSchedules( request?: protos.google.spanner.admin.database.v1.IListBackupSchedulesRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.database.v1.IBackupSchedule[], @@ -4501,7 +5284,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IBackupSchedule - > + >, ): void; listBackupSchedules( request: protos.google.spanner.admin.database.v1.IListBackupSchedulesRequest, @@ -4511,7 +5294,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IBackupSchedule - > + >, ): void; listBackupSchedules( request?: protos.google.spanner.admin.database.v1.IListBackupSchedulesRequest, @@ -4530,7 +5313,7 @@ export class DatabaseAdminClient { | null | undefined, protos.google.spanner.admin.database.v1.IBackupSchedule - > + >, ): Promise< [ protos.google.spanner.admin.database.v1.IBackupSchedule[], @@ -4553,8 +5336,36 @@ export class DatabaseAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listBackupSchedules(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.admin.database.v1.IListBackupSchedulesRequest, + | protos.google.spanner.admin.database.v1.IListBackupSchedulesResponse + | null + | undefined, + protos.google.spanner.admin.database.v1.IBackupSchedule + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listBackupSchedules values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listBackupSchedules request %j', request); + return this.innerApiCalls + .listBackupSchedules(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.spanner.admin.database.v1.IBackupSchedule[], + protos.google.spanner.admin.database.v1.IListBackupSchedulesRequest | null, + protos.google.spanner.admin.database.v1.IListBackupSchedulesResponse, + ]) => { + this._log.info('listBackupSchedules values %j', response); + return [response, input, output]; + }, + ); } /** @@ -4587,7 +5398,7 @@ export class DatabaseAdminClient { */ listBackupSchedulesStream( request?: protos.google.spanner.admin.database.v1.IListBackupSchedulesRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -4599,11 +5410,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listBackupSchedules']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listBackupSchedules stream %j', request); return this.descriptors.page.listBackupSchedules.createStream( this.innerApiCalls.listBackupSchedules as GaxCall, request, - callSettings + callSettings, ); } @@ -4638,7 +5452,7 @@ export class DatabaseAdminClient { */ listBackupSchedulesAsync( request?: protos.google.spanner.admin.database.v1.IListBackupSchedulesRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -4650,11 +5464,14 @@ export class DatabaseAdminClient { }); const defaultCallSettings = this._defaults['listBackupSchedules']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listBackupSchedules iterate %j', request); return this.descriptors.page.listBackupSchedules.asyncIterate( this.innerApiCalls['listBackupSchedules'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } /** @@ -4700,7 +5517,7 @@ export class DatabaseAdminClient { protos.google.longrunning.Operation, protos.google.longrunning.GetOperationRequest, {} | null | undefined - > + >, ): Promise<[protos.google.longrunning.Operation]> { let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { @@ -4750,8 +5567,8 @@ export class DatabaseAdminClient { */ listOperationsAsync( request: protos.google.longrunning.ListOperationsRequest, - options?: gax.CallOptions - ): AsyncIterable { + options?: gax.CallOptions, + ): AsyncIterable { options = options || {}; options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; @@ -4805,7 +5622,7 @@ export class DatabaseAdminClient { protos.google.longrunning.CancelOperationRequest, protos.google.protobuf.Empty, {} | undefined | null - > + >, ): Promise { let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { @@ -4862,7 +5679,7 @@ export class DatabaseAdminClient { protos.google.protobuf.Empty, protos.google.longrunning.DeleteOperationRequest, {} | null | undefined - > + >, ): Promise { let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { @@ -4947,7 +5764,7 @@ export class DatabaseAdminClient { project: string, instance: string, database: string, - schedule: string + schedule: string, ) { return this.pathTemplates.backupSchedulePathTemplate.render({ project: project, @@ -4966,7 +5783,7 @@ export class DatabaseAdminClient { */ matchProjectFromBackupScheduleName(backupScheduleName: string) { return this.pathTemplates.backupSchedulePathTemplate.match( - backupScheduleName + backupScheduleName, ).project; } @@ -4979,7 +5796,7 @@ export class DatabaseAdminClient { */ matchInstanceFromBackupScheduleName(backupScheduleName: string) { return this.pathTemplates.backupSchedulePathTemplate.match( - backupScheduleName + backupScheduleName, ).instance; } @@ -4992,7 +5809,7 @@ export class DatabaseAdminClient { */ matchDatabaseFromBackupScheduleName(backupScheduleName: string) { return this.pathTemplates.backupSchedulePathTemplate.match( - backupScheduleName + backupScheduleName, ).database; } @@ -5005,7 +5822,7 @@ export class DatabaseAdminClient { */ matchScheduleFromBackupScheduleName(backupScheduleName: string) { return this.pathTemplates.backupSchedulePathTemplate.match( - backupScheduleName + backupScheduleName, ).schedule; } @@ -5022,7 +5839,7 @@ export class DatabaseAdminClient { project: string, location: string, keyRing: string, - cryptoKey: string + cryptoKey: string, ) { return this.pathTemplates.cryptoKeyPathTemplate.render({ project: project, @@ -5142,7 +5959,7 @@ export class DatabaseAdminClient { project: string, instance: string, database: string, - role: string + role: string, ) { return this.pathTemplates.databaseRolePathTemplate.render({ project: project, @@ -5245,9 +6062,10 @@ export class DatabaseAdminClient { close(): Promise { if (this.databaseAdminStub && !this._terminated) { return this.databaseAdminStub.then(stub => { + this._log.info('ending gRPC channel'); this._terminated = true; stub.close(); - this.operationsClient.close(); + void this.operationsClient.close(); }); } return Promise.resolve(); diff --git a/src/v1/instance_admin_client.ts b/src/v1/instance_admin_client.ts index 2375fb0d7..4ec8d9979 100644 --- a/src/v1/instance_admin_client.ts +++ b/src/v1/instance_admin_client.ts @@ -31,6 +31,7 @@ import type { import {Transform} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); +import {loggingUtils as logging} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -75,6 +76,8 @@ export class InstanceAdminClient { private _defaults: {[method: string]: gax.CallSettings}; private _universeDomain: string; private _servicePath: string; + private _log = logging.log('spanner'); + auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, @@ -110,7 +113,7 @@ export class InstanceAdminClient { * Developer's Console, e.g. 'grape-spaceship-123'. We will also check * the environment variable GCLOUD_PROJECT for your project ID. If your * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials}, * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. @@ -129,7 +132,7 @@ export class InstanceAdminClient { */ constructor( opts?: ClientOptions, - gaxInstance?: typeof gax | typeof gax.fallback + gaxInstance?: typeof gax | typeof gax.fallback, ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof InstanceAdminClient; @@ -139,7 +142,7 @@ export class InstanceAdminClient { opts?.universe_domain !== opts?.universeDomain ) { throw new Error( - 'Please set either universe_domain or universeDomain, but not both.' + 'Please set either universe_domain or universeDomain, but not both.', ); } const universeDomainEnvVar = @@ -223,16 +226,16 @@ export class InstanceAdminClient { // Create useful helper objects for these. this.pathTemplates = { instancePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}' + 'projects/{project}/instances/{instance}', ), instanceConfigPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instanceConfigs/{instance_config}' + 'projects/{project}/instanceConfigs/{instance_config}', ), instancePartitionPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/instancePartitions/{instance_partition}' + 'projects/{project}/instances/{instance}/instancePartitions/{instance_partition}', ), projectPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}' + 'projects/{project}', ), }; @@ -243,27 +246,27 @@ export class InstanceAdminClient { listInstanceConfigs: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'instanceConfigs' + 'instanceConfigs', ), listInstanceConfigOperations: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'operations' + 'operations', ), listInstances: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'instances' + 'instances', ), listInstancePartitions: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'instancePartitions' + 'instancePartitions', ), listInstancePartitionOperations: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'operations' + 'operations', ), }; @@ -312,91 +315,91 @@ export class InstanceAdminClient { .lro(lroOptions) .operationsClient(opts); const createInstanceConfigResponse = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.InstanceConfig' + '.google.spanner.admin.instance.v1.InstanceConfig', ) as gax.protobuf.Type; const createInstanceConfigMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.CreateInstanceConfigMetadata' + '.google.spanner.admin.instance.v1.CreateInstanceConfigMetadata', ) as gax.protobuf.Type; const updateInstanceConfigResponse = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.InstanceConfig' + '.google.spanner.admin.instance.v1.InstanceConfig', ) as gax.protobuf.Type; const updateInstanceConfigMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata' + '.google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata', ) as gax.protobuf.Type; const createInstanceResponse = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.Instance' + '.google.spanner.admin.instance.v1.Instance', ) as gax.protobuf.Type; const createInstanceMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.CreateInstanceMetadata' + '.google.spanner.admin.instance.v1.CreateInstanceMetadata', ) as gax.protobuf.Type; const updateInstanceResponse = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.Instance' + '.google.spanner.admin.instance.v1.Instance', ) as gax.protobuf.Type; const updateInstanceMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.UpdateInstanceMetadata' + '.google.spanner.admin.instance.v1.UpdateInstanceMetadata', ) as gax.protobuf.Type; const createInstancePartitionResponse = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.InstancePartition' + '.google.spanner.admin.instance.v1.InstancePartition', ) as gax.protobuf.Type; const createInstancePartitionMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.CreateInstancePartitionMetadata' + '.google.spanner.admin.instance.v1.CreateInstancePartitionMetadata', ) as gax.protobuf.Type; const updateInstancePartitionResponse = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.InstancePartition' + '.google.spanner.admin.instance.v1.InstancePartition', ) as gax.protobuf.Type; const updateInstancePartitionMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata' + '.google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata', ) as gax.protobuf.Type; const moveInstanceResponse = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.MoveInstanceResponse' + '.google.spanner.admin.instance.v1.MoveInstanceResponse', ) as gax.protobuf.Type; const moveInstanceMetadata = protoFilesRoot.lookup( - '.google.spanner.admin.instance.v1.MoveInstanceMetadata' + '.google.spanner.admin.instance.v1.MoveInstanceMetadata', ) as gax.protobuf.Type; this.descriptors.longrunning = { createInstanceConfig: new this._gaxModule.LongrunningDescriptor( this.operationsClient, createInstanceConfigResponse.decode.bind(createInstanceConfigResponse), - createInstanceConfigMetadata.decode.bind(createInstanceConfigMetadata) + createInstanceConfigMetadata.decode.bind(createInstanceConfigMetadata), ), updateInstanceConfig: new this._gaxModule.LongrunningDescriptor( this.operationsClient, updateInstanceConfigResponse.decode.bind(updateInstanceConfigResponse), - updateInstanceConfigMetadata.decode.bind(updateInstanceConfigMetadata) + updateInstanceConfigMetadata.decode.bind(updateInstanceConfigMetadata), ), createInstance: new this._gaxModule.LongrunningDescriptor( this.operationsClient, createInstanceResponse.decode.bind(createInstanceResponse), - createInstanceMetadata.decode.bind(createInstanceMetadata) + createInstanceMetadata.decode.bind(createInstanceMetadata), ), updateInstance: new this._gaxModule.LongrunningDescriptor( this.operationsClient, updateInstanceResponse.decode.bind(updateInstanceResponse), - updateInstanceMetadata.decode.bind(updateInstanceMetadata) + updateInstanceMetadata.decode.bind(updateInstanceMetadata), ), createInstancePartition: new this._gaxModule.LongrunningDescriptor( this.operationsClient, createInstancePartitionResponse.decode.bind( - createInstancePartitionResponse + createInstancePartitionResponse, ), createInstancePartitionMetadata.decode.bind( - createInstancePartitionMetadata - ) + createInstancePartitionMetadata, + ), ), updateInstancePartition: new this._gaxModule.LongrunningDescriptor( this.operationsClient, updateInstancePartitionResponse.decode.bind( - updateInstancePartitionResponse + updateInstancePartitionResponse, ), updateInstancePartitionMetadata.decode.bind( - updateInstancePartitionMetadata - ) + updateInstancePartitionMetadata, + ), ), moveInstance: new this._gaxModule.LongrunningDescriptor( this.operationsClient, moveInstanceResponse.decode.bind(moveInstanceResponse), - moveInstanceMetadata.decode.bind(moveInstanceMetadata) + moveInstanceMetadata.decode.bind(moveInstanceMetadata), ), }; @@ -405,7 +408,7 @@ export class InstanceAdminClient { 'google.spanner.admin.instance.v1.InstanceAdmin', gapicConfig as gax.ClientConfig, opts.clientConfig || {}, - {'x-goog-api-client': clientHeader.join(' ')} + {'x-goog-api-client': clientHeader.join(' ')}, ); // Set up a dictionary of "inner API calls"; the core implementation @@ -439,12 +442,12 @@ export class InstanceAdminClient { this.instanceAdminStub = this._gaxGrpc.createStub( this._opts.fallback ? (this._protos as protobuf.Root).lookupService( - 'google.spanner.admin.instance.v1.InstanceAdmin' + 'google.spanner.admin.instance.v1.InstanceAdmin', ) : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.spanner.admin.instance.v1.InstanceAdmin, this._opts, - this._providedCustomServicePath + this._providedCustomServicePath, ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides @@ -484,7 +487,7 @@ export class InstanceAdminClient { }, (err: Error | null | undefined) => () => { throw err; - } + }, ); const descriptor = @@ -495,7 +498,7 @@ export class InstanceAdminClient { callPromise, this._defaults[methodName], descriptor, - this._opts.fallback + this._opts.fallback, ); this.innerApiCalls[methodName] = apiCall; @@ -516,7 +519,7 @@ export class InstanceAdminClient { ) { process.emitWarning( 'Static servicePath is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'spanner.googleapis.com'; @@ -534,7 +537,7 @@ export class InstanceAdminClient { ) { process.emitWarning( 'Static apiEndpoint is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'spanner.googleapis.com'; @@ -579,7 +582,7 @@ export class InstanceAdminClient { * @returns {Promise} A promise that resolves to string containing the project ID. */ getProjectId( - callback?: Callback + callback?: Callback, ): Promise | void { if (callback) { this.auth.getProjectId(callback); @@ -608,7 +611,7 @@ export class InstanceAdminClient { */ getInstanceConfig( request?: protos.google.spanner.admin.instance.v1.IGetInstanceConfigRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstanceConfig, @@ -628,7 +631,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getInstanceConfig( request: protos.google.spanner.admin.instance.v1.IGetInstanceConfigRequest, @@ -638,7 +641,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getInstanceConfig( request?: protos.google.spanner.admin.instance.v1.IGetInstanceConfigRequest, @@ -657,7 +660,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstanceConfig, @@ -683,8 +686,39 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.getInstanceConfig(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('getInstanceConfig request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.instance.v1.IInstanceConfig, + | protos.google.spanner.admin.instance.v1.IGetInstanceConfigRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('getInstanceConfig response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .getInstanceConfig(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.instance.v1.IInstanceConfig, + ( + | protos.google.spanner.admin.instance.v1.IGetInstanceConfigRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('getInstanceConfig response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Deletes the instance configuration. Deletion is only allowed when no @@ -722,7 +756,7 @@ export class InstanceAdminClient { */ deleteInstanceConfig( request?: protos.google.spanner.admin.instance.v1.IDeleteInstanceConfigRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.protobuf.IEmpty, @@ -742,7 +776,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; deleteInstanceConfig( request: protos.google.spanner.admin.instance.v1.IDeleteInstanceConfigRequest, @@ -752,7 +786,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; deleteInstanceConfig( request?: protos.google.spanner.admin.instance.v1.IDeleteInstanceConfigRequest, @@ -771,7 +805,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.protobuf.IEmpty, @@ -797,8 +831,39 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.deleteInstanceConfig(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('deleteInstanceConfig request %j', request); + const wrappedCallback: + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.spanner.admin.instance.v1.IDeleteInstanceConfigRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('deleteInstanceConfig response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .deleteInstanceConfig(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.protobuf.IEmpty, + ( + | protos.google.spanner.admin.instance.v1.IDeleteInstanceConfigRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('deleteInstanceConfig response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Gets information about a particular instance. @@ -822,7 +887,7 @@ export class InstanceAdminClient { */ getInstance( request?: protos.google.spanner.admin.instance.v1.IGetInstanceRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstance, @@ -839,7 +904,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getInstance( request: protos.google.spanner.admin.instance.v1.IGetInstanceRequest, @@ -849,7 +914,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getInstance( request?: protos.google.spanner.admin.instance.v1.IGetInstanceRequest, @@ -868,7 +933,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstance, @@ -891,8 +956,39 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.getInstance(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('getInstance request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.instance.v1.IInstance, + | protos.google.spanner.admin.instance.v1.IGetInstanceRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('getInstance response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .getInstance(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.instance.v1.IInstance, + ( + | protos.google.spanner.admin.instance.v1.IGetInstanceRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('getInstance response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Deletes an instance. @@ -921,7 +1017,7 @@ export class InstanceAdminClient { */ deleteInstance( request?: protos.google.spanner.admin.instance.v1.IDeleteInstanceRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.protobuf.IEmpty, @@ -941,7 +1037,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; deleteInstance( request: protos.google.spanner.admin.instance.v1.IDeleteInstanceRequest, @@ -951,7 +1047,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; deleteInstance( request?: protos.google.spanner.admin.instance.v1.IDeleteInstanceRequest, @@ -970,7 +1066,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.protobuf.IEmpty, @@ -996,8 +1092,39 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.deleteInstance(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('deleteInstance request %j', request); + const wrappedCallback: + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.spanner.admin.instance.v1.IDeleteInstanceRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('deleteInstance response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .deleteInstance(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.protobuf.IEmpty, + ( + | protos.google.spanner.admin.instance.v1.IDeleteInstanceRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('deleteInstance response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Sets the access control policy on an instance resource. Replaces any @@ -1031,7 +1158,7 @@ export class InstanceAdminClient { */ setIamPolicy( request?: protos.google.iam.v1.ISetIamPolicyRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.iam.v1.IPolicy, @@ -1046,7 +1173,7 @@ export class InstanceAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.ISetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): void; setIamPolicy( request: protos.google.iam.v1.ISetIamPolicyRequest, @@ -1054,7 +1181,7 @@ export class InstanceAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.ISetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): void; setIamPolicy( request?: protos.google.iam.v1.ISetIamPolicyRequest, @@ -1069,7 +1196,7 @@ export class InstanceAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.ISetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.iam.v1.IPolicy, @@ -1092,8 +1219,34 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ resource: request.resource ?? '', }); - this.initialize(); - return this.innerApiCalls.setIamPolicy(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('setIamPolicy request %j', request); + const wrappedCallback: + | Callback< + protos.google.iam.v1.IPolicy, + protos.google.iam.v1.ISetIamPolicyRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('setIamPolicy response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .setIamPolicy(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.iam.v1.IPolicy, + protos.google.iam.v1.ISetIamPolicyRequest | undefined, + {} | undefined, + ]) => { + this._log.info('setIamPolicy response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Gets the access control policy for an instance resource. Returns an empty @@ -1119,7 +1272,7 @@ export class InstanceAdminClient { */ getIamPolicy( request?: protos.google.iam.v1.IGetIamPolicyRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.iam.v1.IPolicy, @@ -1134,7 +1287,7 @@ export class InstanceAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.IGetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): void; getIamPolicy( request: protos.google.iam.v1.IGetIamPolicyRequest, @@ -1142,7 +1295,7 @@ export class InstanceAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.IGetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): void; getIamPolicy( request?: protos.google.iam.v1.IGetIamPolicyRequest, @@ -1157,7 +1310,7 @@ export class InstanceAdminClient { protos.google.iam.v1.IPolicy, protos.google.iam.v1.IGetIamPolicyRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.iam.v1.IPolicy, @@ -1180,8 +1333,34 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ resource: request.resource ?? '', }); - this.initialize(); - return this.innerApiCalls.getIamPolicy(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('getIamPolicy request %j', request); + const wrappedCallback: + | Callback< + protos.google.iam.v1.IPolicy, + protos.google.iam.v1.IGetIamPolicyRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('getIamPolicy response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .getIamPolicy(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.iam.v1.IPolicy, + protos.google.iam.v1.IGetIamPolicyRequest | undefined, + {} | undefined, + ]) => { + this._log.info('getIamPolicy response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Returns permissions that the caller has on the specified instance resource. @@ -1210,7 +1389,7 @@ export class InstanceAdminClient { */ testIamPermissions( request?: protos.google.iam.v1.ITestIamPermissionsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.iam.v1.ITestIamPermissionsResponse, @@ -1225,7 +1404,7 @@ export class InstanceAdminClient { protos.google.iam.v1.ITestIamPermissionsResponse, protos.google.iam.v1.ITestIamPermissionsRequest | null | undefined, {} | null | undefined - > + >, ): void; testIamPermissions( request: protos.google.iam.v1.ITestIamPermissionsRequest, @@ -1233,7 +1412,7 @@ export class InstanceAdminClient { protos.google.iam.v1.ITestIamPermissionsResponse, protos.google.iam.v1.ITestIamPermissionsRequest | null | undefined, {} | null | undefined - > + >, ): void; testIamPermissions( request?: protos.google.iam.v1.ITestIamPermissionsRequest, @@ -1248,7 +1427,7 @@ export class InstanceAdminClient { protos.google.iam.v1.ITestIamPermissionsResponse, protos.google.iam.v1.ITestIamPermissionsRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.iam.v1.ITestIamPermissionsResponse, @@ -1271,8 +1450,34 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ resource: request.resource ?? '', }); - this.initialize(); - return this.innerApiCalls.testIamPermissions(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('testIamPermissions request %j', request); + const wrappedCallback: + | Callback< + protos.google.iam.v1.ITestIamPermissionsResponse, + protos.google.iam.v1.ITestIamPermissionsRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('testIamPermissions response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .testIamPermissions(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.iam.v1.ITestIamPermissionsResponse, + protos.google.iam.v1.ITestIamPermissionsRequest | undefined, + {} | undefined, + ]) => { + this._log.info('testIamPermissions response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Gets information about a particular instance partition. @@ -1292,7 +1497,7 @@ export class InstanceAdminClient { */ getInstancePartition( request?: protos.google.spanner.admin.instance.v1.IGetInstancePartitionRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstancePartition, @@ -1312,7 +1517,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getInstancePartition( request: protos.google.spanner.admin.instance.v1.IGetInstancePartitionRequest, @@ -1322,7 +1527,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; getInstancePartition( request?: protos.google.spanner.admin.instance.v1.IGetInstancePartitionRequest, @@ -1341,7 +1546,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstancePartition, @@ -1367,8 +1572,39 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.getInstancePartition(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('getInstancePartition request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.admin.instance.v1.IInstancePartition, + | protos.google.spanner.admin.instance.v1.IGetInstancePartitionRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('getInstancePartition response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .getInstancePartition(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.admin.instance.v1.IInstancePartition, + ( + | protos.google.spanner.admin.instance.v1.IGetInstancePartitionRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('getInstancePartition response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Deletes an existing instance partition. Requires that the @@ -1399,7 +1635,7 @@ export class InstanceAdminClient { */ deleteInstancePartition( request?: protos.google.spanner.admin.instance.v1.IDeleteInstancePartitionRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.protobuf.IEmpty, @@ -1419,7 +1655,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; deleteInstancePartition( request: protos.google.spanner.admin.instance.v1.IDeleteInstancePartitionRequest, @@ -1429,7 +1665,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): void; deleteInstancePartition( request?: protos.google.spanner.admin.instance.v1.IDeleteInstancePartitionRequest, @@ -1448,7 +1684,7 @@ export class InstanceAdminClient { | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.protobuf.IEmpty, @@ -1474,12 +1710,39 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.deleteInstancePartition( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + this._log.info('deleteInstancePartition request %j', request); + const wrappedCallback: + | Callback< + protos.google.protobuf.IEmpty, + | protos.google.spanner.admin.instance.v1.IDeleteInstancePartitionRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('deleteInstancePartition response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .deleteInstancePartition(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.protobuf.IEmpty, + ( + | protos.google.spanner.admin.instance.v1.IDeleteInstancePartitionRequest + | undefined + ), + {} | undefined, + ]) => { + this._log.info('deleteInstancePartition response %j', response); + return [response, options, rawResponse]; + }, + ); } /** @@ -1555,7 +1818,7 @@ export class InstanceAdminClient { */ createInstanceConfig( request?: protos.google.spanner.admin.instance.v1.ICreateInstanceConfigRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -1576,7 +1839,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; createInstanceConfig( request: protos.google.spanner.admin.instance.v1.ICreateInstanceConfigRequest, @@ -1587,7 +1850,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; createInstanceConfig( request?: protos.google.spanner.admin.instance.v1.ICreateInstanceConfigRequest, @@ -1608,7 +1871,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -1634,8 +1897,40 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.createInstanceConfig(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.instance.v1.IInstanceConfig, + protos.google.spanner.admin.instance.v1.ICreateInstanceConfigMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('createInstanceConfig response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('createInstanceConfig request %j', request); + return this.innerApiCalls + .createInstanceConfig(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.instance.v1.IInstanceConfig, + protos.google.spanner.admin.instance.v1.ICreateInstanceConfigMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('createInstanceConfig response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `createInstanceConfig()`. @@ -1647,22 +1942,23 @@ export class InstanceAdminClient { * for more details and examples. */ async checkCreateInstanceConfigProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.instance.v1.InstanceConfig, protos.google.spanner.admin.instance.v1.CreateInstanceConfigMetadata > > { + this._log.info('createInstanceConfig long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.createInstanceConfig, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.instance.v1.InstanceConfig, @@ -1746,7 +2042,7 @@ export class InstanceAdminClient { */ updateInstanceConfig( request?: protos.google.spanner.admin.instance.v1.IUpdateInstanceConfigRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -1767,7 +2063,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; updateInstanceConfig( request: protos.google.spanner.admin.instance.v1.IUpdateInstanceConfigRequest, @@ -1778,7 +2074,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; updateInstanceConfig( request?: protos.google.spanner.admin.instance.v1.IUpdateInstanceConfigRequest, @@ -1799,7 +2095,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -1825,8 +2121,40 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ 'instance_config.name': request.instanceConfig!.name ?? '', }); - this.initialize(); - return this.innerApiCalls.updateInstanceConfig(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.instance.v1.IInstanceConfig, + protos.google.spanner.admin.instance.v1.IUpdateInstanceConfigMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('updateInstanceConfig response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('updateInstanceConfig request %j', request); + return this.innerApiCalls + .updateInstanceConfig(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.instance.v1.IInstanceConfig, + protos.google.spanner.admin.instance.v1.IUpdateInstanceConfigMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('updateInstanceConfig response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `updateInstanceConfig()`. @@ -1838,22 +2166,23 @@ export class InstanceAdminClient { * for more details and examples. */ async checkUpdateInstanceConfigProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.instance.v1.InstanceConfig, protos.google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata > > { + this._log.info('updateInstanceConfig long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.updateInstanceConfig, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.instance.v1.InstanceConfig, @@ -1919,7 +2248,7 @@ export class InstanceAdminClient { */ createInstance( request?: protos.google.spanner.admin.instance.v1.ICreateInstanceRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -1940,7 +2269,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; createInstance( request: protos.google.spanner.admin.instance.v1.ICreateInstanceRequest, @@ -1951,7 +2280,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; createInstance( request?: protos.google.spanner.admin.instance.v1.ICreateInstanceRequest, @@ -1972,7 +2301,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -1998,8 +2327,40 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.createInstance(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.instance.v1.IInstance, + protos.google.spanner.admin.instance.v1.ICreateInstanceMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('createInstance response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('createInstance request %j', request); + return this.innerApiCalls + .createInstance(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.instance.v1.IInstance, + protos.google.spanner.admin.instance.v1.ICreateInstanceMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('createInstance response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `createInstance()`. @@ -2011,22 +2372,23 @@ export class InstanceAdminClient { * for more details and examples. */ async checkCreateInstanceProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.instance.v1.Instance, protos.google.spanner.admin.instance.v1.CreateInstanceMetadata > > { + this._log.info('createInstance long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.createInstance, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.instance.v1.Instance, @@ -2098,7 +2460,7 @@ export class InstanceAdminClient { */ updateInstance( request?: protos.google.spanner.admin.instance.v1.IUpdateInstanceRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -2119,7 +2481,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; updateInstance( request: protos.google.spanner.admin.instance.v1.IUpdateInstanceRequest, @@ -2130,7 +2492,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; updateInstance( request?: protos.google.spanner.admin.instance.v1.IUpdateInstanceRequest, @@ -2151,7 +2513,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -2177,8 +2539,40 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ 'instance.name': request.instance!.name ?? '', }); - this.initialize(); - return this.innerApiCalls.updateInstance(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.instance.v1.IInstance, + protos.google.spanner.admin.instance.v1.IUpdateInstanceMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('updateInstance response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('updateInstance request %j', request); + return this.innerApiCalls + .updateInstance(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.instance.v1.IInstance, + protos.google.spanner.admin.instance.v1.IUpdateInstanceMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('updateInstance response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `updateInstance()`. @@ -2190,22 +2584,23 @@ export class InstanceAdminClient { * for more details and examples. */ async checkUpdateInstanceProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.instance.v1.Instance, protos.google.spanner.admin.instance.v1.UpdateInstanceMetadata > > { + this._log.info('updateInstance long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.updateInstance, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.instance.v1.Instance, @@ -2276,7 +2671,7 @@ export class InstanceAdminClient { */ createInstancePartition( request?: protos.google.spanner.admin.instance.v1.ICreateInstancePartitionRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -2297,7 +2692,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; createInstancePartition( request: protos.google.spanner.admin.instance.v1.ICreateInstancePartitionRequest, @@ -2308,7 +2703,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; createInstancePartition( request?: protos.google.spanner.admin.instance.v1.ICreateInstancePartitionRequest, @@ -2329,7 +2724,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -2355,12 +2750,40 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.createInstancePartition( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.instance.v1.IInstancePartition, + protos.google.spanner.admin.instance.v1.ICreateInstancePartitionMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('createInstancePartition response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('createInstancePartition request %j', request); + return this.innerApiCalls + .createInstancePartition(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.instance.v1.IInstancePartition, + protos.google.spanner.admin.instance.v1.ICreateInstancePartitionMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('createInstancePartition response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `createInstancePartition()`. @@ -2372,22 +2795,23 @@ export class InstanceAdminClient { * for more details and examples. */ async checkCreateInstancePartitionProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.instance.v1.InstancePartition, protos.google.spanner.admin.instance.v1.CreateInstancePartitionMetadata > > { + this._log.info('createInstancePartition long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.createInstancePartition, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.instance.v1.InstancePartition, @@ -2464,7 +2888,7 @@ export class InstanceAdminClient { */ updateInstancePartition( request?: protos.google.spanner.admin.instance.v1.IUpdateInstancePartitionRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -2485,7 +2909,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; updateInstancePartition( request: protos.google.spanner.admin.instance.v1.IUpdateInstancePartitionRequest, @@ -2496,7 +2920,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; updateInstancePartition( request?: protos.google.spanner.admin.instance.v1.IUpdateInstancePartitionRequest, @@ -2517,7 +2941,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -2543,12 +2967,40 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ 'instance_partition.name': request.instancePartition!.name ?? '', }); - this.initialize(); - return this.innerApiCalls.updateInstancePartition( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.instance.v1.IInstancePartition, + protos.google.spanner.admin.instance.v1.IUpdateInstancePartitionMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('updateInstancePartition response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('updateInstancePartition request %j', request); + return this.innerApiCalls + .updateInstancePartition(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.instance.v1.IInstancePartition, + protos.google.spanner.admin.instance.v1.IUpdateInstancePartitionMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('updateInstancePartition response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `updateInstancePartition()`. @@ -2560,22 +3012,23 @@ export class InstanceAdminClient { * for more details and examples. */ async checkUpdateInstancePartitionProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.instance.v1.InstancePartition, protos.google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata > > { + this._log.info('updateInstancePartition long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.updateInstancePartition, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.instance.v1.InstancePartition, @@ -2664,7 +3117,7 @@ export class InstanceAdminClient { */ moveInstance( request?: protos.google.spanner.admin.instance.v1.IMoveInstanceRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ LROperation< @@ -2685,7 +3138,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; moveInstance( request: protos.google.spanner.admin.instance.v1.IMoveInstanceRequest, @@ -2696,7 +3149,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): void; moveInstance( request?: protos.google.spanner.admin.instance.v1.IMoveInstanceRequest, @@ -2717,7 +3170,7 @@ export class InstanceAdminClient { >, protos.google.longrunning.IOperation | null | undefined, {} | null | undefined - > + >, ): Promise< [ LROperation< @@ -2743,8 +3196,40 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.moveInstance(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | Callback< + LROperation< + protos.google.spanner.admin.instance.v1.IMoveInstanceResponse, + protos.google.spanner.admin.instance.v1.IMoveInstanceMetadata + >, + protos.google.longrunning.IOperation | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, rawResponse, _) => { + this._log.info('moveInstance response %j', rawResponse); + callback!(error, response, rawResponse, _); // We verified callback above. + } + : undefined; + this._log.info('moveInstance request %j', request); + return this.innerApiCalls + .moveInstance(request, options, wrappedCallback) + ?.then( + ([response, rawResponse, _]: [ + LROperation< + protos.google.spanner.admin.instance.v1.IMoveInstanceResponse, + protos.google.spanner.admin.instance.v1.IMoveInstanceMetadata + >, + protos.google.longrunning.IOperation | undefined, + {} | undefined, + ]) => { + this._log.info('moveInstance response %j', rawResponse); + return [response, rawResponse, _]; + }, + ); } /** * Check the status of the long running operation returned by `moveInstance()`. @@ -2756,22 +3241,23 @@ export class InstanceAdminClient { * for more details and examples. */ async checkMoveInstanceProgress( - name: string + name: string, ): Promise< LROperation< protos.google.spanner.admin.instance.v1.MoveInstanceResponse, protos.google.spanner.admin.instance.v1.MoveInstanceMetadata > > { + this._log.info('moveInstance long-running'); const request = new this._gaxModule.operationsProtos.google.longrunning.GetOperationRequest( - {name} + {name}, ); const [operation] = await this.operationsClient.getOperation(request); const decodeOperation = new this._gaxModule.Operation( operation, this.descriptors.longrunning.moveInstance, - this._gaxModule.createDefaultBackoffSettings() + this._gaxModule.createDefaultBackoffSettings(), ); return decodeOperation as LROperation< protos.google.spanner.admin.instance.v1.MoveInstanceResponse, @@ -2812,7 +3298,7 @@ export class InstanceAdminClient { */ listInstanceConfigs( request?: protos.google.spanner.admin.instance.v1.IListInstanceConfigsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstanceConfig[], @@ -2829,7 +3315,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.spanner.admin.instance.v1.IInstanceConfig - > + >, ): void; listInstanceConfigs( request: protos.google.spanner.admin.instance.v1.IListInstanceConfigsRequest, @@ -2839,7 +3325,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.spanner.admin.instance.v1.IInstanceConfig - > + >, ): void; listInstanceConfigs( request?: protos.google.spanner.admin.instance.v1.IListInstanceConfigsRequest, @@ -2858,7 +3344,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.spanner.admin.instance.v1.IInstanceConfig - > + >, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstanceConfig[], @@ -2881,8 +3367,36 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listInstanceConfigs(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.admin.instance.v1.IListInstanceConfigsRequest, + | protos.google.spanner.admin.instance.v1.IListInstanceConfigsResponse + | null + | undefined, + protos.google.spanner.admin.instance.v1.IInstanceConfig + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listInstanceConfigs values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listInstanceConfigs request %j', request); + return this.innerApiCalls + .listInstanceConfigs(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.spanner.admin.instance.v1.IInstanceConfig[], + protos.google.spanner.admin.instance.v1.IListInstanceConfigsRequest | null, + protos.google.spanner.admin.instance.v1.IListInstanceConfigsResponse, + ]) => { + this._log.info('listInstanceConfigs values %j', response); + return [response, input, output]; + }, + ); } /** @@ -2914,7 +3428,7 @@ export class InstanceAdminClient { */ listInstanceConfigsStream( request?: protos.google.spanner.admin.instance.v1.IListInstanceConfigsRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -2926,11 +3440,14 @@ export class InstanceAdminClient { }); const defaultCallSettings = this._defaults['listInstanceConfigs']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listInstanceConfigs stream %j', request); return this.descriptors.page.listInstanceConfigs.createStream( this.innerApiCalls.listInstanceConfigs as GaxCall, request, - callSettings + callSettings, ); } @@ -2964,7 +3481,7 @@ export class InstanceAdminClient { */ listInstanceConfigsAsync( request?: protos.google.spanner.admin.instance.v1.IListInstanceConfigsRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -2976,11 +3493,14 @@ export class InstanceAdminClient { }); const defaultCallSettings = this._defaults['listInstanceConfigs']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listInstanceConfigs iterate %j', request); return this.descriptors.page.listInstanceConfigs.asyncIterate( this.innerApiCalls['listInstanceConfigs'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } /** @@ -3067,7 +3587,7 @@ export class InstanceAdminClient { */ listInstanceConfigOperations( request?: protos.google.spanner.admin.instance.v1.IListInstanceConfigOperationsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.longrunning.IOperation[], @@ -3084,7 +3604,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): void; listInstanceConfigOperations( request: protos.google.spanner.admin.instance.v1.IListInstanceConfigOperationsRequest, @@ -3094,7 +3614,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): void; listInstanceConfigOperations( request?: protos.google.spanner.admin.instance.v1.IListInstanceConfigOperationsRequest, @@ -3113,7 +3633,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): Promise< [ protos.google.longrunning.IOperation[], @@ -3136,12 +3656,36 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listInstanceConfigOperations( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.admin.instance.v1.IListInstanceConfigOperationsRequest, + | protos.google.spanner.admin.instance.v1.IListInstanceConfigOperationsResponse + | null + | undefined, + protos.google.longrunning.IOperation + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listInstanceConfigOperations values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listInstanceConfigOperations request %j', request); + return this.innerApiCalls + .listInstanceConfigOperations(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.longrunning.IOperation[], + protos.google.spanner.admin.instance.v1.IListInstanceConfigOperationsRequest | null, + protos.google.spanner.admin.instance.v1.IListInstanceConfigOperationsResponse, + ]) => { + this._log.info('listInstanceConfigOperations values %j', response); + return [response, input, output]; + }, + ); } /** @@ -3216,7 +3760,7 @@ export class InstanceAdminClient { */ listInstanceConfigOperationsStream( request?: protos.google.spanner.admin.instance.v1.IListInstanceConfigOperationsRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -3228,11 +3772,14 @@ export class InstanceAdminClient { }); const defaultCallSettings = this._defaults['listInstanceConfigOperations']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listInstanceConfigOperations stream %j', request); return this.descriptors.page.listInstanceConfigOperations.createStream( this.innerApiCalls.listInstanceConfigOperations as GaxCall, request, - callSettings + callSettings, ); } @@ -3309,7 +3856,7 @@ export class InstanceAdminClient { */ listInstanceConfigOperationsAsync( request?: protos.google.spanner.admin.instance.v1.IListInstanceConfigOperationsRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -3321,11 +3868,14 @@ export class InstanceAdminClient { }); const defaultCallSettings = this._defaults['listInstanceConfigOperations']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listInstanceConfigOperations iterate %j', request); return this.descriptors.page.listInstanceConfigOperations.asyncIterate( this.innerApiCalls['listInstanceConfigOperations'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } /** @@ -3385,7 +3935,7 @@ export class InstanceAdminClient { */ listInstances( request?: protos.google.spanner.admin.instance.v1.IListInstancesRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstance[], @@ -3402,7 +3952,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.spanner.admin.instance.v1.IInstance - > + >, ): void; listInstances( request: protos.google.spanner.admin.instance.v1.IListInstancesRequest, @@ -3412,7 +3962,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.spanner.admin.instance.v1.IInstance - > + >, ): void; listInstances( request?: protos.google.spanner.admin.instance.v1.IListInstancesRequest, @@ -3431,7 +3981,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.spanner.admin.instance.v1.IInstance - > + >, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstance[], @@ -3454,8 +4004,36 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listInstances(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.admin.instance.v1.IListInstancesRequest, + | protos.google.spanner.admin.instance.v1.IListInstancesResponse + | null + | undefined, + protos.google.spanner.admin.instance.v1.IInstance + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listInstances values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listInstances request %j', request); + return this.innerApiCalls + .listInstances(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.spanner.admin.instance.v1.IInstance[], + protos.google.spanner.admin.instance.v1.IListInstancesRequest | null, + protos.google.spanner.admin.instance.v1.IListInstancesResponse, + ]) => { + this._log.info('listInstances values %j', response); + return [response, input, output]; + }, + ); } /** @@ -3513,7 +4091,7 @@ export class InstanceAdminClient { */ listInstancesStream( request?: protos.google.spanner.admin.instance.v1.IListInstancesRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -3525,11 +4103,14 @@ export class InstanceAdminClient { }); const defaultCallSettings = this._defaults['listInstances']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listInstances stream %j', request); return this.descriptors.page.listInstances.createStream( this.innerApiCalls.listInstances as GaxCall, request, - callSettings + callSettings, ); } @@ -3589,7 +4170,7 @@ export class InstanceAdminClient { */ listInstancesAsync( request?: protos.google.spanner.admin.instance.v1.IListInstancesRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -3601,11 +4182,14 @@ export class InstanceAdminClient { }); const defaultCallSettings = this._defaults['listInstances']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listInstances iterate %j', request); return this.descriptors.page.listInstances.asyncIterate( this.innerApiCalls['listInstances'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } /** @@ -3647,7 +4231,7 @@ export class InstanceAdminClient { */ listInstancePartitions( request?: protos.google.spanner.admin.instance.v1.IListInstancePartitionsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstancePartition[], @@ -3664,7 +4248,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.spanner.admin.instance.v1.IInstancePartition - > + >, ): void; listInstancePartitions( request: protos.google.spanner.admin.instance.v1.IListInstancePartitionsRequest, @@ -3674,7 +4258,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.spanner.admin.instance.v1.IInstancePartition - > + >, ): void; listInstancePartitions( request?: protos.google.spanner.admin.instance.v1.IListInstancePartitionsRequest, @@ -3693,7 +4277,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.spanner.admin.instance.v1.IInstancePartition - > + >, ): Promise< [ protos.google.spanner.admin.instance.v1.IInstancePartition[], @@ -3716,12 +4300,36 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listInstancePartitions( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.admin.instance.v1.IListInstancePartitionsRequest, + | protos.google.spanner.admin.instance.v1.IListInstancePartitionsResponse + | null + | undefined, + protos.google.spanner.admin.instance.v1.IInstancePartition + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listInstancePartitions values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listInstancePartitions request %j', request); + return this.innerApiCalls + .listInstancePartitions(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.spanner.admin.instance.v1.IInstancePartition[], + protos.google.spanner.admin.instance.v1.IListInstancePartitionsRequest | null, + protos.google.spanner.admin.instance.v1.IListInstancePartitionsResponse, + ]) => { + this._log.info('listInstancePartitions values %j', response); + return [response, input, output]; + }, + ); } /** @@ -3761,7 +4369,7 @@ export class InstanceAdminClient { */ listInstancePartitionsStream( request?: protos.google.spanner.admin.instance.v1.IListInstancePartitionsRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -3773,11 +4381,14 @@ export class InstanceAdminClient { }); const defaultCallSettings = this._defaults['listInstancePartitions']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listInstancePartitions stream %j', request); return this.descriptors.page.listInstancePartitions.createStream( this.innerApiCalls.listInstancePartitions as GaxCall, request, - callSettings + callSettings, ); } @@ -3819,7 +4430,7 @@ export class InstanceAdminClient { */ listInstancePartitionsAsync( request?: protos.google.spanner.admin.instance.v1.IListInstancePartitionsRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -3831,11 +4442,14 @@ export class InstanceAdminClient { }); const defaultCallSettings = this._defaults['listInstancePartitions']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listInstancePartitions iterate %j', request); return this.descriptors.page.listInstancePartitions.asyncIterate( this.innerApiCalls['listInstancePartitions'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } /** @@ -3932,7 +4546,7 @@ export class InstanceAdminClient { */ listInstancePartitionOperations( request?: protos.google.spanner.admin.instance.v1.IListInstancePartitionOperationsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.longrunning.IOperation[], @@ -3949,7 +4563,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): void; listInstancePartitionOperations( request: protos.google.spanner.admin.instance.v1.IListInstancePartitionOperationsRequest, @@ -3959,7 +4573,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): void; listInstancePartitionOperations( request?: protos.google.spanner.admin.instance.v1.IListInstancePartitionOperationsRequest, @@ -3978,7 +4592,7 @@ export class InstanceAdminClient { | null | undefined, protos.google.longrunning.IOperation - > + >, ): Promise< [ protos.google.longrunning.IOperation[], @@ -4001,12 +4615,36 @@ export class InstanceAdminClient { this._gaxModule.routingHeader.fromParams({ parent: request.parent ?? '', }); - this.initialize(); - return this.innerApiCalls.listInstancePartitionOperations( - request, - options, - callback - ); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.admin.instance.v1.IListInstancePartitionOperationsRequest, + | protos.google.spanner.admin.instance.v1.IListInstancePartitionOperationsResponse + | null + | undefined, + protos.google.longrunning.IOperation + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listInstancePartitionOperations values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listInstancePartitionOperations request %j', request); + return this.innerApiCalls + .listInstancePartitionOperations(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.longrunning.IOperation[], + protos.google.spanner.admin.instance.v1.IListInstancePartitionOperationsRequest | null, + protos.google.spanner.admin.instance.v1.IListInstancePartitionOperationsResponse, + ]) => { + this._log.info('listInstancePartitionOperations values %j', response); + return [response, input, output]; + }, + ); } /** @@ -4088,7 +4726,7 @@ export class InstanceAdminClient { */ listInstancePartitionOperationsStream( request?: protos.google.spanner.admin.instance.v1.IListInstancePartitionOperationsRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -4101,11 +4739,14 @@ export class InstanceAdminClient { const defaultCallSettings = this._defaults['listInstancePartitionOperations']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listInstancePartitionOperations stream %j', request); return this.descriptors.page.listInstancePartitionOperations.createStream( this.innerApiCalls.listInstancePartitionOperations as GaxCall, request, - callSettings + callSettings, ); } @@ -4189,7 +4830,7 @@ export class InstanceAdminClient { */ listInstancePartitionOperationsAsync( request?: protos.google.spanner.admin.instance.v1.IListInstancePartitionOperationsRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -4202,11 +4843,14 @@ export class InstanceAdminClient { const defaultCallSettings = this._defaults['listInstancePartitionOperations']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listInstancePartitionOperations iterate %j', request); return this.descriptors.page.listInstancePartitionOperations.asyncIterate( this.innerApiCalls['listInstancePartitionOperations'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } // -------------------- @@ -4272,7 +4916,7 @@ export class InstanceAdminClient { */ matchProjectFromInstanceConfigName(instanceConfigName: string) { return this.pathTemplates.instanceConfigPathTemplate.match( - instanceConfigName + instanceConfigName, ).project; } @@ -4285,7 +4929,7 @@ export class InstanceAdminClient { */ matchInstanceConfigFromInstanceConfigName(instanceConfigName: string) { return this.pathTemplates.instanceConfigPathTemplate.match( - instanceConfigName + instanceConfigName, ).instance_config; } @@ -4300,7 +4944,7 @@ export class InstanceAdminClient { instancePartitionPath( project: string, instance: string, - instancePartition: string + instancePartition: string, ) { return this.pathTemplates.instancePartitionPathTemplate.render({ project: project, @@ -4318,7 +4962,7 @@ export class InstanceAdminClient { */ matchProjectFromInstancePartitionName(instancePartitionName: string) { return this.pathTemplates.instancePartitionPathTemplate.match( - instancePartitionName + instancePartitionName, ).project; } @@ -4331,7 +4975,7 @@ export class InstanceAdminClient { */ matchInstanceFromInstancePartitionName(instancePartitionName: string) { return this.pathTemplates.instancePartitionPathTemplate.match( - instancePartitionName + instancePartitionName, ).instance; } @@ -4343,10 +4987,10 @@ export class InstanceAdminClient { * @returns {string} A string representing the instance_partition. */ matchInstancePartitionFromInstancePartitionName( - instancePartitionName: string + instancePartitionName: string, ) { return this.pathTemplates.instancePartitionPathTemplate.match( - instancePartitionName + instancePartitionName, ).instance_partition; } @@ -4382,9 +5026,10 @@ export class InstanceAdminClient { close(): Promise { if (this.instanceAdminStub && !this._terminated) { return this.instanceAdminStub.then(stub => { + this._log.info('ending gRPC channel'); this._terminated = true; stub.close(); - this.operationsClient.close(); + void this.operationsClient.close(); }); } return Promise.resolve(); diff --git a/src/v1/spanner_client.ts b/src/v1/spanner_client.ts index b6a3a726d..7af869322 100644 --- a/src/v1/spanner_client.ts +++ b/src/v1/spanner_client.ts @@ -29,6 +29,7 @@ import type { import {Transform, PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); +import {loggingUtils as logging} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -56,6 +57,8 @@ export class SpannerClient { private _defaults: {[method: string]: gax.CallSettings}; private _universeDomain: string; private _servicePath: string; + private _log = logging.log('spanner'); + auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, @@ -90,7 +93,7 @@ export class SpannerClient { * Developer's Console, e.g. 'grape-spaceship-123'. We will also check * the environment variable GCLOUD_PROJECT for your project ID. If your * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials}, * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. @@ -109,7 +112,7 @@ export class SpannerClient { */ constructor( opts?: ClientOptions, - gaxInstance?: typeof gax | typeof gax.fallback + gaxInstance?: typeof gax | typeof gax.fallback, ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof SpannerClient; @@ -119,7 +122,7 @@ export class SpannerClient { opts?.universe_domain !== opts?.universeDomain ) { throw new Error( - 'Please set either universe_domain or universeDomain, but not both.' + 'Please set either universe_domain or universeDomain, but not both.', ); } const universeDomainEnvVar = @@ -203,10 +206,10 @@ export class SpannerClient { // Create useful helper objects for these. this.pathTemplates = { databasePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/databases/{database}' + 'projects/{project}/instances/{instance}/databases/{database}', ), sessionPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/databases/{database}/sessions/{session}' + 'projects/{project}/instances/{instance}/databases/{database}/sessions/{session}', ), }; @@ -217,7 +220,7 @@ export class SpannerClient { listSessions: new this._gaxModule.PageDescriptor( 'pageToken', 'nextPageToken', - 'sessions' + 'sessions', ), }; @@ -227,17 +230,17 @@ export class SpannerClient { executeStreamingSql: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, - !!opts.gaxServerStreamingRetries + !!opts.gaxServerStreamingRetries, ), streamingRead: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, - !!opts.gaxServerStreamingRetries + !!opts.gaxServerStreamingRetries, ), batchWrite: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.SERVER_STREAMING, !!opts.fallback, - !!opts.gaxServerStreamingRetries + !!opts.gaxServerStreamingRetries, ), }; @@ -246,7 +249,7 @@ export class SpannerClient { 'google.spanner.v1.Spanner', gapicConfig as gax.ClientConfig, opts.clientConfig || {}, - {'x-goog-api-client': clientHeader.join(' ')} + {'x-goog-api-client': clientHeader.join(' ')}, ); // Set up a dictionary of "inner API calls"; the core implementation @@ -280,12 +283,12 @@ export class SpannerClient { this.spannerStub = this._gaxGrpc.createStub( this._opts.fallback ? (this._protos as protobuf.Root).lookupService( - 'google.spanner.v1.Spanner' + 'google.spanner.v1.Spanner', ) : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.spanner.v1.Spanner, this._opts, - this._providedCustomServicePath + this._providedCustomServicePath, ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides @@ -319,8 +322,8 @@ export class SpannerClient { stream.emit( 'error', new this._gaxModule.GoogleError( - 'The client has already been closed.' - ) + 'The client has already been closed.', + ), ); }); return stream; @@ -332,7 +335,7 @@ export class SpannerClient { }, (err: Error | null | undefined) => () => { throw err; - } + }, ); const descriptor = @@ -343,7 +346,7 @@ export class SpannerClient { callPromise, this._defaults[methodName], descriptor, - this._opts.fallback + this._opts.fallback, ); this.innerApiCalls[methodName] = apiCall; @@ -364,7 +367,7 @@ export class SpannerClient { ) { process.emitWarning( 'Static servicePath is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'spanner.googleapis.com'; @@ -382,7 +385,7 @@ export class SpannerClient { ) { process.emitWarning( 'Static apiEndpoint is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'spanner.googleapis.com'; @@ -427,7 +430,7 @@ export class SpannerClient { * @returns {Promise} A promise that resolves to string containing the project ID. */ getProjectId( - callback?: Callback + callback?: Callback, ): Promise | void { if (callback) { this.auth.getProjectId(callback); @@ -475,7 +478,7 @@ export class SpannerClient { */ createSession( request?: protos.google.spanner.v1.ICreateSessionRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.v1.ISession, @@ -490,7 +493,7 @@ export class SpannerClient { protos.google.spanner.v1.ISession, protos.google.spanner.v1.ICreateSessionRequest | null | undefined, {} | null | undefined - > + >, ): void; createSession( request: protos.google.spanner.v1.ICreateSessionRequest, @@ -498,7 +501,7 @@ export class SpannerClient { protos.google.spanner.v1.ISession, protos.google.spanner.v1.ICreateSessionRequest | null | undefined, {} | null | undefined - > + >, ): void; createSession( request?: protos.google.spanner.v1.ICreateSessionRequest, @@ -513,7 +516,7 @@ export class SpannerClient { protos.google.spanner.v1.ISession, protos.google.spanner.v1.ICreateSessionRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.v1.ISession, @@ -536,8 +539,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ database: request.database ?? '', }); - this.initialize(); - return this.innerApiCalls.createSession(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('createSession request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.v1.ISession, + protos.google.spanner.v1.ICreateSessionRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('createSession response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .createSession(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.v1.ISession, + protos.google.spanner.v1.ICreateSessionRequest | undefined, + {} | undefined, + ]) => { + this._log.info('createSession response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Creates multiple new sessions. @@ -567,7 +596,7 @@ export class SpannerClient { */ batchCreateSessions( request?: protos.google.spanner.v1.IBatchCreateSessionsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.v1.IBatchCreateSessionsResponse, @@ -582,7 +611,7 @@ export class SpannerClient { protos.google.spanner.v1.IBatchCreateSessionsResponse, protos.google.spanner.v1.IBatchCreateSessionsRequest | null | undefined, {} | null | undefined - > + >, ): void; batchCreateSessions( request: protos.google.spanner.v1.IBatchCreateSessionsRequest, @@ -590,7 +619,7 @@ export class SpannerClient { protos.google.spanner.v1.IBatchCreateSessionsResponse, protos.google.spanner.v1.IBatchCreateSessionsRequest | null | undefined, {} | null | undefined - > + >, ): void; batchCreateSessions( request?: protos.google.spanner.v1.IBatchCreateSessionsRequest, @@ -607,7 +636,7 @@ export class SpannerClient { protos.google.spanner.v1.IBatchCreateSessionsResponse, protos.google.spanner.v1.IBatchCreateSessionsRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.v1.IBatchCreateSessionsResponse, @@ -630,8 +659,36 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ database: request.database ?? '', }); - this.initialize(); - return this.innerApiCalls.batchCreateSessions(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('batchCreateSessions request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.v1.IBatchCreateSessionsResponse, + | protos.google.spanner.v1.IBatchCreateSessionsRequest + | null + | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('batchCreateSessions response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .batchCreateSessions(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.v1.IBatchCreateSessionsResponse, + protos.google.spanner.v1.IBatchCreateSessionsRequest | undefined, + {} | undefined, + ]) => { + this._log.info('batchCreateSessions response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Gets a session. Returns `NOT_FOUND` if the session does not exist. @@ -651,7 +708,7 @@ export class SpannerClient { */ getSession( request?: protos.google.spanner.v1.IGetSessionRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.v1.ISession, @@ -666,7 +723,7 @@ export class SpannerClient { protos.google.spanner.v1.ISession, protos.google.spanner.v1.IGetSessionRequest | null | undefined, {} | null | undefined - > + >, ): void; getSession( request: protos.google.spanner.v1.IGetSessionRequest, @@ -674,7 +731,7 @@ export class SpannerClient { protos.google.spanner.v1.ISession, protos.google.spanner.v1.IGetSessionRequest | null | undefined, {} | null | undefined - > + >, ): void; getSession( request?: protos.google.spanner.v1.IGetSessionRequest, @@ -689,7 +746,7 @@ export class SpannerClient { protos.google.spanner.v1.ISession, protos.google.spanner.v1.IGetSessionRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.v1.ISession, @@ -712,8 +769,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.getSession(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('getSession request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.v1.ISession, + protos.google.spanner.v1.IGetSessionRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('getSession response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .getSession(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.v1.ISession, + protos.google.spanner.v1.IGetSessionRequest | undefined, + {} | undefined, + ]) => { + this._log.info('getSession response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Ends a session, releasing server resources associated with it. This will @@ -733,7 +816,7 @@ export class SpannerClient { */ deleteSession( request?: protos.google.spanner.v1.IDeleteSessionRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.protobuf.IEmpty, @@ -748,7 +831,7 @@ export class SpannerClient { protos.google.protobuf.IEmpty, protos.google.spanner.v1.IDeleteSessionRequest | null | undefined, {} | null | undefined - > + >, ): void; deleteSession( request: protos.google.spanner.v1.IDeleteSessionRequest, @@ -756,7 +839,7 @@ export class SpannerClient { protos.google.protobuf.IEmpty, protos.google.spanner.v1.IDeleteSessionRequest | null | undefined, {} | null | undefined - > + >, ): void; deleteSession( request?: protos.google.spanner.v1.IDeleteSessionRequest, @@ -771,7 +854,7 @@ export class SpannerClient { protos.google.protobuf.IEmpty, protos.google.spanner.v1.IDeleteSessionRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.protobuf.IEmpty, @@ -794,8 +877,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ name: request.name ?? '', }); - this.initialize(); - return this.innerApiCalls.deleteSession(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('deleteSession request %j', request); + const wrappedCallback: + | Callback< + protos.google.protobuf.IEmpty, + protos.google.spanner.v1.IDeleteSessionRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('deleteSession response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .deleteSession(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.protobuf.IEmpty, + protos.google.spanner.v1.IDeleteSessionRequest | undefined, + {} | undefined, + ]) => { + this._log.info('deleteSession response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Executes an SQL statement, returning all results in a single reply. This @@ -914,7 +1023,7 @@ export class SpannerClient { */ executeSql( request?: protos.google.spanner.v1.IExecuteSqlRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.v1.IResultSet, @@ -929,7 +1038,7 @@ export class SpannerClient { protos.google.spanner.v1.IResultSet, protos.google.spanner.v1.IExecuteSqlRequest | null | undefined, {} | null | undefined - > + >, ): void; executeSql( request: protos.google.spanner.v1.IExecuteSqlRequest, @@ -937,7 +1046,7 @@ export class SpannerClient { protos.google.spanner.v1.IResultSet, protos.google.spanner.v1.IExecuteSqlRequest | null | undefined, {} | null | undefined - > + >, ): void; executeSql( request?: protos.google.spanner.v1.IExecuteSqlRequest, @@ -952,7 +1061,7 @@ export class SpannerClient { protos.google.spanner.v1.IResultSet, protos.google.spanner.v1.IExecuteSqlRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.v1.IResultSet, @@ -975,8 +1084,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ session: request.session ?? '', }); - this.initialize(); - return this.innerApiCalls.executeSql(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('executeSql request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.v1.IResultSet, + protos.google.spanner.v1.IExecuteSqlRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('executeSql response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .executeSql(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.v1.IResultSet, + protos.google.spanner.v1.IExecuteSqlRequest | undefined, + {} | undefined, + ]) => { + this._log.info('executeSql response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Executes a batch of SQL DML statements. This method allows many statements @@ -1039,7 +1174,7 @@ export class SpannerClient { */ executeBatchDml( request?: protos.google.spanner.v1.IExecuteBatchDmlRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.v1.IExecuteBatchDmlResponse, @@ -1054,7 +1189,7 @@ export class SpannerClient { protos.google.spanner.v1.IExecuteBatchDmlResponse, protos.google.spanner.v1.IExecuteBatchDmlRequest | null | undefined, {} | null | undefined - > + >, ): void; executeBatchDml( request: protos.google.spanner.v1.IExecuteBatchDmlRequest, @@ -1062,7 +1197,7 @@ export class SpannerClient { protos.google.spanner.v1.IExecuteBatchDmlResponse, protos.google.spanner.v1.IExecuteBatchDmlRequest | null | undefined, {} | null | undefined - > + >, ): void; executeBatchDml( request?: protos.google.spanner.v1.IExecuteBatchDmlRequest, @@ -1077,7 +1212,7 @@ export class SpannerClient { protos.google.spanner.v1.IExecuteBatchDmlResponse, protos.google.spanner.v1.IExecuteBatchDmlRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.v1.IExecuteBatchDmlResponse, @@ -1100,8 +1235,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ session: request.session ?? '', }); - this.initialize(); - return this.innerApiCalls.executeBatchDml(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('executeBatchDml request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.v1.IExecuteBatchDmlResponse, + protos.google.spanner.v1.IExecuteBatchDmlRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('executeBatchDml response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .executeBatchDml(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.v1.IExecuteBatchDmlResponse, + protos.google.spanner.v1.IExecuteBatchDmlRequest | undefined, + {} | undefined, + ]) => { + this._log.info('executeBatchDml response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Reads rows from the database using key lookups and scans, as a @@ -1201,7 +1362,7 @@ export class SpannerClient { */ read( request?: protos.google.spanner.v1.IReadRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.v1.IResultSet, @@ -1216,7 +1377,7 @@ export class SpannerClient { protos.google.spanner.v1.IResultSet, protos.google.spanner.v1.IReadRequest | null | undefined, {} | null | undefined - > + >, ): void; read( request: protos.google.spanner.v1.IReadRequest, @@ -1224,7 +1385,7 @@ export class SpannerClient { protos.google.spanner.v1.IResultSet, protos.google.spanner.v1.IReadRequest | null | undefined, {} | null | undefined - > + >, ): void; read( request?: protos.google.spanner.v1.IReadRequest, @@ -1239,7 +1400,7 @@ export class SpannerClient { protos.google.spanner.v1.IResultSet, protos.google.spanner.v1.IReadRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.v1.IResultSet, @@ -1262,8 +1423,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ session: request.session ?? '', }); - this.initialize(); - return this.innerApiCalls.read(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('read request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.v1.IResultSet, + protos.google.spanner.v1.IReadRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('read response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .read(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.v1.IResultSet, + protos.google.spanner.v1.IReadRequest | undefined, + {} | undefined, + ]) => { + this._log.info('read response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Begins a new transaction. This step can often be skipped: @@ -1300,7 +1487,7 @@ export class SpannerClient { */ beginTransaction( request?: protos.google.spanner.v1.IBeginTransactionRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.v1.ITransaction, @@ -1315,7 +1502,7 @@ export class SpannerClient { protos.google.spanner.v1.ITransaction, protos.google.spanner.v1.IBeginTransactionRequest | null | undefined, {} | null | undefined - > + >, ): void; beginTransaction( request: protos.google.spanner.v1.IBeginTransactionRequest, @@ -1323,7 +1510,7 @@ export class SpannerClient { protos.google.spanner.v1.ITransaction, protos.google.spanner.v1.IBeginTransactionRequest | null | undefined, {} | null | undefined - > + >, ): void; beginTransaction( request?: protos.google.spanner.v1.IBeginTransactionRequest, @@ -1338,7 +1525,7 @@ export class SpannerClient { protos.google.spanner.v1.ITransaction, protos.google.spanner.v1.IBeginTransactionRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.v1.ITransaction, @@ -1361,8 +1548,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ session: request.session ?? '', }); - this.initialize(); - return this.innerApiCalls.beginTransaction(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('beginTransaction request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.v1.ITransaction, + protos.google.spanner.v1.IBeginTransactionRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('beginTransaction response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .beginTransaction(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.v1.ITransaction, + protos.google.spanner.v1.IBeginTransactionRequest | undefined, + {} | undefined, + ]) => { + this._log.info('beginTransaction response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Commits a transaction. The request includes the mutations to be @@ -1428,7 +1641,7 @@ export class SpannerClient { */ commit( request?: protos.google.spanner.v1.ICommitRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.v1.ICommitResponse, @@ -1443,7 +1656,7 @@ export class SpannerClient { protos.google.spanner.v1.ICommitResponse, protos.google.spanner.v1.ICommitRequest | null | undefined, {} | null | undefined - > + >, ): void; commit( request: protos.google.spanner.v1.ICommitRequest, @@ -1451,7 +1664,7 @@ export class SpannerClient { protos.google.spanner.v1.ICommitResponse, protos.google.spanner.v1.ICommitRequest | null | undefined, {} | null | undefined - > + >, ): void; commit( request?: protos.google.spanner.v1.ICommitRequest, @@ -1466,7 +1679,7 @@ export class SpannerClient { protos.google.spanner.v1.ICommitResponse, protos.google.spanner.v1.ICommitRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.v1.ICommitResponse, @@ -1489,8 +1702,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ session: request.session ?? '', }); - this.initialize(); - return this.innerApiCalls.commit(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('commit request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.v1.ICommitResponse, + protos.google.spanner.v1.ICommitRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('commit response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .commit(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.v1.ICommitResponse, + protos.google.spanner.v1.ICommitRequest | undefined, + {} | undefined, + ]) => { + this._log.info('commit response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Rolls back a transaction, releasing any locks it holds. It is a good @@ -1518,7 +1757,7 @@ export class SpannerClient { */ rollback( request?: protos.google.spanner.v1.IRollbackRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.protobuf.IEmpty, @@ -1533,7 +1772,7 @@ export class SpannerClient { protos.google.protobuf.IEmpty, protos.google.spanner.v1.IRollbackRequest | null | undefined, {} | null | undefined - > + >, ): void; rollback( request: protos.google.spanner.v1.IRollbackRequest, @@ -1541,7 +1780,7 @@ export class SpannerClient { protos.google.protobuf.IEmpty, protos.google.spanner.v1.IRollbackRequest | null | undefined, {} | null | undefined - > + >, ): void; rollback( request?: protos.google.spanner.v1.IRollbackRequest, @@ -1556,7 +1795,7 @@ export class SpannerClient { protos.google.protobuf.IEmpty, protos.google.spanner.v1.IRollbackRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.protobuf.IEmpty, @@ -1579,8 +1818,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ session: request.session ?? '', }); - this.initialize(); - return this.innerApiCalls.rollback(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('rollback request %j', request); + const wrappedCallback: + | Callback< + protos.google.protobuf.IEmpty, + protos.google.spanner.v1.IRollbackRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('rollback response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .rollback(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.protobuf.IEmpty, + protos.google.spanner.v1.IRollbackRequest | undefined, + {} | undefined, + ]) => { + this._log.info('rollback response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Creates a set of partition tokens that can be used to execute a query @@ -1650,7 +1915,7 @@ export class SpannerClient { */ partitionQuery( request?: protos.google.spanner.v1.IPartitionQueryRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.v1.IPartitionResponse, @@ -1665,7 +1930,7 @@ export class SpannerClient { protos.google.spanner.v1.IPartitionResponse, protos.google.spanner.v1.IPartitionQueryRequest | null | undefined, {} | null | undefined - > + >, ): void; partitionQuery( request: protos.google.spanner.v1.IPartitionQueryRequest, @@ -1673,7 +1938,7 @@ export class SpannerClient { protos.google.spanner.v1.IPartitionResponse, protos.google.spanner.v1.IPartitionQueryRequest | null | undefined, {} | null | undefined - > + >, ): void; partitionQuery( request?: protos.google.spanner.v1.IPartitionQueryRequest, @@ -1688,7 +1953,7 @@ export class SpannerClient { protos.google.spanner.v1.IPartitionResponse, protos.google.spanner.v1.IPartitionQueryRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.v1.IPartitionResponse, @@ -1711,8 +1976,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ session: request.session ?? '', }); - this.initialize(); - return this.innerApiCalls.partitionQuery(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('partitionQuery request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.v1.IPartitionResponse, + protos.google.spanner.v1.IPartitionQueryRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('partitionQuery response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .partitionQuery(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.v1.IPartitionResponse, + protos.google.spanner.v1.IPartitionQueryRequest | undefined, + {} | undefined, + ]) => { + this._log.info('partitionQuery response %j', response); + return [response, options, rawResponse]; + }, + ); } /** * Creates a set of partition tokens that can be used to execute a read @@ -1771,7 +2062,7 @@ export class SpannerClient { */ partitionRead( request?: protos.google.spanner.v1.IPartitionReadRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.v1.IPartitionResponse, @@ -1786,7 +2077,7 @@ export class SpannerClient { protos.google.spanner.v1.IPartitionResponse, protos.google.spanner.v1.IPartitionReadRequest | null | undefined, {} | null | undefined - > + >, ): void; partitionRead( request: protos.google.spanner.v1.IPartitionReadRequest, @@ -1794,7 +2085,7 @@ export class SpannerClient { protos.google.spanner.v1.IPartitionResponse, protos.google.spanner.v1.IPartitionReadRequest | null | undefined, {} | null | undefined - > + >, ): void; partitionRead( request?: protos.google.spanner.v1.IPartitionReadRequest, @@ -1809,7 +2100,7 @@ export class SpannerClient { protos.google.spanner.v1.IPartitionResponse, protos.google.spanner.v1.IPartitionReadRequest | null | undefined, {} | null | undefined - > + >, ): Promise< [ protos.google.spanner.v1.IPartitionResponse, @@ -1832,8 +2123,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ session: request.session ?? '', }); - this.initialize(); - return this.innerApiCalls.partitionRead(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + this._log.info('partitionRead request %j', request); + const wrappedCallback: + | Callback< + protos.google.spanner.v1.IPartitionResponse, + protos.google.spanner.v1.IPartitionReadRequest | null | undefined, + {} | null | undefined + > + | undefined = callback + ? (error, response, options, rawResponse) => { + this._log.info('partitionRead response %j', response); + callback!(error, response, options, rawResponse); // We verified callback above. + } + : undefined; + return this.innerApiCalls + .partitionRead(request, options, wrappedCallback) + ?.then( + ([response, options, rawResponse]: [ + protos.google.spanner.v1.IPartitionResponse, + protos.google.spanner.v1.IPartitionReadRequest | undefined, + {} | undefined, + ]) => { + this._log.info('partitionRead response %j', response); + return [response, options, rawResponse]; + }, + ); } /** @@ -1945,7 +2262,7 @@ export class SpannerClient { */ executeStreamingSql( request?: protos.google.spanner.v1.IExecuteSqlRequest, - options?: CallOptions + options?: CallOptions, ): gax.CancellableStream { request = request || {}; options = options || {}; @@ -1955,7 +2272,10 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ session: request.session ?? '', }); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('executeStreamingSql stream %j', options); return this.innerApiCalls.executeStreamingSql(request, options); } @@ -2048,7 +2368,7 @@ export class SpannerClient { */ streamingRead( request?: protos.google.spanner.v1.IReadRequest, - options?: CallOptions + options?: CallOptions, ): gax.CancellableStream { request = request || {}; options = options || {}; @@ -2058,7 +2378,10 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ session: request.session ?? '', }); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('streamingRead stream %j', options); return this.innerApiCalls.streamingRead(request, options); } @@ -2109,7 +2432,7 @@ export class SpannerClient { */ batchWrite( request?: protos.google.spanner.v1.IBatchWriteRequest, - options?: CallOptions + options?: CallOptions, ): gax.CancellableStream { request = request || {}; options = options || {}; @@ -2119,7 +2442,10 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ session: request.session ?? '', }); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('batchWrite stream %j', options); return this.innerApiCalls.batchWrite(request, options); } @@ -2163,7 +2489,7 @@ export class SpannerClient { */ listSessions( request?: protos.google.spanner.v1.IListSessionsRequest, - options?: CallOptions + options?: CallOptions, ): Promise< [ protos.google.spanner.v1.ISession[], @@ -2178,7 +2504,7 @@ export class SpannerClient { protos.google.spanner.v1.IListSessionsRequest, protos.google.spanner.v1.IListSessionsResponse | null | undefined, protos.google.spanner.v1.ISession - > + >, ): void; listSessions( request: protos.google.spanner.v1.IListSessionsRequest, @@ -2186,7 +2512,7 @@ export class SpannerClient { protos.google.spanner.v1.IListSessionsRequest, protos.google.spanner.v1.IListSessionsResponse | null | undefined, protos.google.spanner.v1.ISession - > + >, ): void; listSessions( request?: protos.google.spanner.v1.IListSessionsRequest, @@ -2201,7 +2527,7 @@ export class SpannerClient { protos.google.spanner.v1.IListSessionsRequest, protos.google.spanner.v1.IListSessionsResponse | null | undefined, protos.google.spanner.v1.ISession - > + >, ): Promise< [ protos.google.spanner.v1.ISession[], @@ -2224,8 +2550,34 @@ export class SpannerClient { this._gaxModule.routingHeader.fromParams({ database: request.database ?? '', }); - this.initialize(); - return this.innerApiCalls.listSessions(request, options, callback); + this.initialize().catch(err => { + throw err; + }); + const wrappedCallback: + | PaginationCallback< + protos.google.spanner.v1.IListSessionsRequest, + protos.google.spanner.v1.IListSessionsResponse | null | undefined, + protos.google.spanner.v1.ISession + > + | undefined = callback + ? (error, values, nextPageRequest, rawResponse) => { + this._log.info('listSessions values %j', values); + callback!(error, values, nextPageRequest, rawResponse); // We verified callback above. + } + : undefined; + this._log.info('listSessions request %j', request); + return this.innerApiCalls + .listSessions(request, options, wrappedCallback) + ?.then( + ([response, input, output]: [ + protos.google.spanner.v1.ISession[], + protos.google.spanner.v1.IListSessionsRequest | null, + protos.google.spanner.v1.IListSessionsResponse, + ]) => { + this._log.info('listSessions values %j', response); + return [response, input, output]; + }, + ); } /** @@ -2266,7 +2618,7 @@ export class SpannerClient { */ listSessionsStream( request?: protos.google.spanner.v1.IListSessionsRequest, - options?: CallOptions + options?: CallOptions, ): Transform { request = request || {}; options = options || {}; @@ -2278,11 +2630,14 @@ export class SpannerClient { }); const defaultCallSettings = this._defaults['listSessions']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listSessions stream %j', request); return this.descriptors.page.listSessions.createStream( this.innerApiCalls.listSessions as GaxCall, request, - callSettings + callSettings, ); } @@ -2325,7 +2680,7 @@ export class SpannerClient { */ listSessionsAsync( request?: protos.google.spanner.v1.IListSessionsRequest, - options?: CallOptions + options?: CallOptions, ): AsyncIterable { request = request || {}; options = options || {}; @@ -2337,11 +2692,14 @@ export class SpannerClient { }); const defaultCallSettings = this._defaults['listSessions']; const callSettings = defaultCallSettings.merge(options); - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('listSessions iterate %j', request); return this.descriptors.page.listSessions.asyncIterate( this.innerApiCalls['listSessions'] as GaxCall, request as {}, - callSettings + callSettings, ) as AsyncIterable; } // -------------------- @@ -2410,7 +2768,7 @@ export class SpannerClient { project: string, instance: string, database: string, - session: string + session: string, ) { return this.pathTemplates.sessionPathTemplate.render({ project: project, @@ -2473,6 +2831,7 @@ export class SpannerClient { close(): Promise { if (this.spannerStub && !this._terminated) { return this.spannerStub.then(stub => { + this._log.info('ending gRPC channel'); this._terminated = true; stub.close(); }); diff --git a/src/v1/spanner_executor_proxy_client.ts b/src/v1/spanner_executor_proxy_client.ts index b5749792d..e381a7e45 100644 --- a/src/v1/spanner_executor_proxy_client.ts +++ b/src/v1/spanner_executor_proxy_client.ts @@ -27,6 +27,7 @@ import type { import {PassThrough} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); +import {loggingUtils as logging} from 'google-gax'; /** * Client JSON configuration object, loaded from @@ -51,6 +52,8 @@ export class SpannerExecutorProxyClient { private _defaults: {[method: string]: gax.CallSettings}; private _universeDomain: string; private _servicePath: string; + private _log = logging.log('executor'); + auth: gax.GoogleAuth; descriptors: Descriptors = { page: {}, @@ -85,7 +88,7 @@ export class SpannerExecutorProxyClient { * Developer's Console, e.g. 'grape-spaceship-123'. We will also check * the environment variable GCLOUD_PROJECT for your project ID. If your * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * {@link https://cloud.google.com/docs/authentication/application-default-credentials Application Default Credentials}, * your project ID will be detected automatically. * @param {string} [options.apiEndpoint] - The domain name of the * API remote host. @@ -104,7 +107,7 @@ export class SpannerExecutorProxyClient { */ constructor( opts?: ClientOptions, - gaxInstance?: typeof gax | typeof gax.fallback + gaxInstance?: typeof gax | typeof gax.fallback, ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof SpannerExecutorProxyClient; @@ -114,7 +117,7 @@ export class SpannerExecutorProxyClient { opts?.universe_domain !== opts?.universeDomain ) { throw new Error( - 'Please set either universe_domain or universeDomain, but not both.' + 'Please set either universe_domain or universeDomain, but not both.', ); } const universeDomainEnvVar = @@ -195,28 +198,28 @@ export class SpannerExecutorProxyClient { // Create useful helper objects for these. this.pathTemplates = { backupPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/backups/{backup}' + 'projects/{project}/instances/{instance}/backups/{backup}', ), backupSchedulePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}' + 'projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}', ), databasePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/databases/{database}' + 'projects/{project}/instances/{instance}/databases/{database}', ), databaseRolePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}' + 'projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}', ), instancePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}' + 'projects/{project}/instances/{instance}', ), instanceConfigPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instanceConfigs/{instance_config}' + 'projects/{project}/instanceConfigs/{instance_config}', ), instancePartitionPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/instancePartitions/{instance_partition}' + 'projects/{project}/instances/{instance}/instancePartitions/{instance_partition}', ), sessionPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/instances/{instance}/databases/{database}/sessions/{session}' + 'projects/{project}/instances/{instance}/databases/{database}/sessions/{session}', ), }; @@ -226,7 +229,7 @@ export class SpannerExecutorProxyClient { executeActionAsync: new this._gaxModule.StreamDescriptor( this._gaxModule.StreamType.BIDI_STREAMING, !!opts.fallback, - !!opts.gaxServerStreamingRetries + !!opts.gaxServerStreamingRetries, ), }; @@ -235,7 +238,7 @@ export class SpannerExecutorProxyClient { 'google.spanner.executor.v1.SpannerExecutorProxy', gapicConfig as gax.ClientConfig, opts.clientConfig || {}, - {'x-goog-api-client': clientHeader.join(' ')} + {'x-goog-api-client': clientHeader.join(' ')}, ); // Set up a dictionary of "inner API calls"; the core implementation @@ -269,12 +272,12 @@ export class SpannerExecutorProxyClient { this.spannerExecutorProxyStub = this._gaxGrpc.createStub( this._opts.fallback ? (this._protos as protobuf.Root).lookupService( - 'google.spanner.executor.v1.SpannerExecutorProxy' + 'google.spanner.executor.v1.SpannerExecutorProxy', ) : // eslint-disable-next-line @typescript-eslint/no-explicit-any (this._protos as any).google.spanner.executor.v1.SpannerExecutorProxy, this._opts, - this._providedCustomServicePath + this._providedCustomServicePath, ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides @@ -291,8 +294,8 @@ export class SpannerExecutorProxyClient { stream.emit( 'error', new this._gaxModule.GoogleError( - 'The client has already been closed.' - ) + 'The client has already been closed.', + ), ); }); return stream; @@ -304,7 +307,7 @@ export class SpannerExecutorProxyClient { }, (err: Error | null | undefined) => () => { throw err; - } + }, ); const descriptor = this.descriptors.stream[methodName] || undefined; @@ -312,7 +315,7 @@ export class SpannerExecutorProxyClient { callPromise, this._defaults[methodName], descriptor, - this._opts.fallback + this._opts.fallback, ); this.innerApiCalls[methodName] = apiCall; @@ -333,7 +336,7 @@ export class SpannerExecutorProxyClient { ) { process.emitWarning( 'Static servicePath is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'spanner-cloud-executor.googleapis.com'; @@ -351,7 +354,7 @@ export class SpannerExecutorProxyClient { ) { process.emitWarning( 'Static apiEndpoint is deprecated, please use the instance method instead.', - 'DeprecationWarning' + 'DeprecationWarning', ); } return 'spanner-cloud-executor.googleapis.com'; @@ -393,7 +396,7 @@ export class SpannerExecutorProxyClient { * @returns {Promise} A promise that resolves to string containing the project ID. */ getProjectId( - callback?: Callback + callback?: Callback, ): Promise | void { if (callback) { this.auth.getProjectId(callback); @@ -429,7 +432,10 @@ export class SpannerExecutorProxyClient { * region_tag:spanner-cloud-executor_v1_generated_SpannerExecutorProxy_ExecuteActionAsync_async */ executeActionAsync(options?: CallOptions): gax.CancellableStream { - this.initialize(); + this.initialize().catch(err => { + throw err; + }); + this._log.info('executeActionAsync stream %j', options); return this.innerApiCalls.executeActionAsync(null, options); } @@ -499,7 +505,7 @@ export class SpannerExecutorProxyClient { project: string, instance: string, database: string, - schedule: string + schedule: string, ) { return this.pathTemplates.backupSchedulePathTemplate.render({ project: project, @@ -518,7 +524,7 @@ export class SpannerExecutorProxyClient { */ matchProjectFromBackupScheduleName(backupScheduleName: string) { return this.pathTemplates.backupSchedulePathTemplate.match( - backupScheduleName + backupScheduleName, ).project; } @@ -531,7 +537,7 @@ export class SpannerExecutorProxyClient { */ matchInstanceFromBackupScheduleName(backupScheduleName: string) { return this.pathTemplates.backupSchedulePathTemplate.match( - backupScheduleName + backupScheduleName, ).instance; } @@ -544,7 +550,7 @@ export class SpannerExecutorProxyClient { */ matchDatabaseFromBackupScheduleName(backupScheduleName: string) { return this.pathTemplates.backupSchedulePathTemplate.match( - backupScheduleName + backupScheduleName, ).database; } @@ -557,7 +563,7 @@ export class SpannerExecutorProxyClient { */ matchScheduleFromBackupScheduleName(backupScheduleName: string) { return this.pathTemplates.backupSchedulePathTemplate.match( - backupScheduleName + backupScheduleName, ).schedule; } @@ -623,7 +629,7 @@ export class SpannerExecutorProxyClient { project: string, instance: string, database: string, - role: string + role: string, ) { return this.pathTemplates.databaseRolePathTemplate.render({ project: project, @@ -740,7 +746,7 @@ export class SpannerExecutorProxyClient { */ matchProjectFromInstanceConfigName(instanceConfigName: string) { return this.pathTemplates.instanceConfigPathTemplate.match( - instanceConfigName + instanceConfigName, ).project; } @@ -753,7 +759,7 @@ export class SpannerExecutorProxyClient { */ matchInstanceConfigFromInstanceConfigName(instanceConfigName: string) { return this.pathTemplates.instanceConfigPathTemplate.match( - instanceConfigName + instanceConfigName, ).instance_config; } @@ -768,7 +774,7 @@ export class SpannerExecutorProxyClient { instancePartitionPath( project: string, instance: string, - instancePartition: string + instancePartition: string, ) { return this.pathTemplates.instancePartitionPathTemplate.render({ project: project, @@ -786,7 +792,7 @@ export class SpannerExecutorProxyClient { */ matchProjectFromInstancePartitionName(instancePartitionName: string) { return this.pathTemplates.instancePartitionPathTemplate.match( - instancePartitionName + instancePartitionName, ).project; } @@ -799,7 +805,7 @@ export class SpannerExecutorProxyClient { */ matchInstanceFromInstancePartitionName(instancePartitionName: string) { return this.pathTemplates.instancePartitionPathTemplate.match( - instancePartitionName + instancePartitionName, ).instance; } @@ -811,10 +817,10 @@ export class SpannerExecutorProxyClient { * @returns {string} A string representing the instance_partition. */ matchInstancePartitionFromInstancePartitionName( - instancePartitionName: string + instancePartitionName: string, ) { return this.pathTemplates.instancePartitionPathTemplate.match( - instancePartitionName + instancePartitionName, ).instance_partition; } @@ -831,7 +837,7 @@ export class SpannerExecutorProxyClient { project: string, instance: string, database: string, - session: string + session: string, ) { return this.pathTemplates.sessionPathTemplate.render({ project: project, @@ -894,6 +900,7 @@ export class SpannerExecutorProxyClient { close(): Promise { if (this.spannerExecutorProxyStub && !this._terminated) { return this.spannerExecutorProxyStub.then(stub => { + this._log.info('ending gRPC channel'); this._terminated = true; stub.close(); }); diff --git a/system-test/spanner.ts b/system-test/spanner.ts index ac75b3163..e07ee86f4 100644 --- a/system-test/spanner.ts +++ b/system-test/spanner.ts @@ -40,6 +40,8 @@ import { ExecuteSqlRequest, TimestampBounds, MutationGroup, + ReadResponse, + RunResponse, } from '../src/transaction'; import {Row} from '../src/partial-result-stream'; import {GetDatabaseConfig} from '../src/database'; @@ -52,9 +54,14 @@ const singer = require('../test/data/singer'); const music = singer.examples.spanner.music; import {util} from 'protobufjs'; import Long = util.Long; +import { + CreateQueryPartitionsResponse, + CreateReadPartitionsResponse, +} from '../src/batch-transaction'; const fs = require('fs'); const SKIP_BACKUPS = process.env.SKIP_BACKUPS; +const KOKORO_JOB_NAME = process.env.KOKORO_JOB_NAME; const SKIP_FGAC_TESTS = (process.env.SKIP_FGAC_TESTS || 'false').toLowerCase(); const IAM_MEMBER = process.env.IAM_MEMBER; @@ -127,7 +134,7 @@ describe('Spanner', () => { instance: { config: instanceAdminClient.instanceConfigPath( projectId!, - INSTANCE_CONFIG.config + INSTANCE_CONFIG.config, ), nodeCount: 1, displayName: 'Test name for instance.', @@ -189,7 +196,7 @@ describe('Spanner', () => { database: databaseAdminClient.databasePath( projectId!, instanceId!, - pgdatabaseId + pgdatabaseId, ), statements: statements, }); @@ -211,7 +218,7 @@ describe('Spanner', () => { if ('database' in operation.metadata!) { assert.strictEqual( operation.metadata!.name, - `${instance.formattedName_}/backups/${backupId}` + `${instance.formattedName_}/backups/${backupId}`, ); } @@ -231,7 +238,7 @@ describe('Spanner', () => { } else { instance = spanner.instance(envInstanceName); console.log( - `Not creating temp instance, using + ${instance.formattedName_}...` + `Not creating temp instance, using + ${instance.formattedName_}...`, ); } if (IS_EMULATOR_ENABLED) { @@ -266,8 +273,8 @@ describe('Spanner', () => { // Deleting all backups before an instance can be deleted. await Promise.all( RESOURCES_TO_CLEAN.filter(resource => resource instanceof Backup).map( - backup => backup.delete(GAX_OPTIONS) - ) + backup => backup.delete(GAX_OPTIONS), + ), ); /** * Deleting instances created during this test. @@ -276,8 +283,8 @@ describe('Spanner', () => { */ await Promise.all( RESOURCES_TO_CLEAN.filter( - resource => resource instanceof Instance - ).map(instance => instance.delete(GAX_OPTIONS)) + resource => resource instanceof Instance, + ).map(instance => instance.delete(GAX_OPTIONS)), ); } else { /** @@ -288,8 +295,8 @@ describe('Spanner', () => { const limit = pLimit(5); await Promise.all( RESOURCES_TO_CLEAN.map(resource => - limit(() => resource.delete(GAX_OPTIONS)) - ) + limit(() => resource.delete(GAX_OPTIONS)), + ), ); } } catch (err) { @@ -310,7 +317,7 @@ describe('Spanner', () => { }); assert.strictEqual( metadata!.name, - instanceAdminClient.instancePath(projectId, instanceId) + instanceAdminClient.instancePath(projectId, instanceId), ); } catch (err) { if (!err) { @@ -335,18 +342,18 @@ describe('Spanner', () => { name: databaseAdminClient.databasePath( projectId, instanceId, - database + database, ), }); assert.strictEqual( metadata!.name, - databaseAdminClient.databasePath(projectId, instanceId, database) + databaseAdminClient.databasePath(projectId, instanceId, database), ); assert.strictEqual(metadata!.state, 'READY'); if (IS_EMULATOR_ENABLED) { assert.strictEqual( metadata!.databaseDialect, - 'DATABASE_DIALECT_UNSPECIFIED' + 'DATABASE_DIALECT_UNSPECIFIED', ); } else { assert.strictEqual(metadata!.databaseDialect, dialect); @@ -354,7 +361,7 @@ describe('Spanner', () => { } it('GOOGLE_STANDARD_SQL should have created the database', async () => { - createDatabase(DATABASE, 'GOOGLE_STANDARD_SQL'); + void createDatabase(DATABASE, 'GOOGLE_STANDARD_SQL'); }); }); }); @@ -393,7 +400,7 @@ describe('Spanner', () => { TimestampArray ARRAY< TIMESTAMP >, CommitTimestamp TIMESTAMP OPTIONS (allow_commit_timestamp= true) ) PRIMARY KEY (Key) - ` + `, ); await googleSqlOperationUpdateDDL.promise(); const [postgreSqlOperationUpdateDDL] = await PG_DATABASE.updateSchema( @@ -423,7 +430,7 @@ describe('Spanner', () => { "JsonbArray" JSONB[], "CommitTimestamp" SPANNER.COMMIT_TIMESTAMP ); - ` + `, ); await postgreSqlOperationUpdateDDL.promise(); } else { @@ -458,7 +465,7 @@ describe('Spanner', () => { ProtoEnumArray ARRAY, CommitTimestamp TIMESTAMP OPTIONS (allow_commit_timestamp= true) ) PRIMARY KEY (Key) - ` + `, ); await googleSqlOperationUpdateDDL.promise(); // TODO: add columns using Interval Value and Interval Array Value. @@ -489,7 +496,7 @@ describe('Spanner', () => { "JsonbArray" JSONB[], "CommitTimestamp" SPANNER.COMMIT_TIMESTAMP ); - ` + `, ); await postgreSqlOperationUpdateDDL.promise(); } @@ -507,7 +514,7 @@ describe('Spanner', () => { insertData, dialect, callback, - columnsMetadataForRead?: {} + columnsMetadataForRead?: {}, ) { const id = generateName('id'); @@ -656,11 +663,11 @@ describe('Spanner', () => { assert.deepStrictEqual( JSON.stringify(rows![0][0].value[0][0]), - JSON.stringify(expected[0].value[0][0]) + JSON.stringify(expected[0].value[0][0]), ); assert.deepStrictEqual( JSON.stringify(rows![0][0].value[0][1]), - JSON.stringify(expected[0].value[0][1]) + JSON.stringify(expected[0].value[0][1]), ); done(); @@ -702,15 +709,15 @@ describe('Spanner', () => { assert.deepStrictEqual( JSON.stringify(rows![0][0]), - JSON.stringify(expected[0]) + JSON.stringify(expected[0]), ); assert.deepStrictEqual( JSON.stringify(rows![0][1].value[0][0]), - JSON.stringify(expected[1].value[0][0]) + JSON.stringify(expected[1].value[0][0]), ); assert.deepStrictEqual( JSON.stringify(rows![0][1].value[0][1]), - JSON.stringify(expected[1].value[0][1]) + JSON.stringify(expected[1].value[0][1]), ); done(); @@ -783,7 +790,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().BoolArray, [true, false]); done(); - } + }, ); }); @@ -1035,7 +1042,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().Float32Array, [null]); done(); - } + }, ); }); @@ -1059,7 +1066,7 @@ describe('Spanner', () => { assert.ok(row.toJSON().Float32Array[i] - values[i] <= 0.00001); } done(); - } + }, ); }); @@ -1158,7 +1165,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().FloatArray, [null]); done(); - } + }, ); }); @@ -1180,7 +1187,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().FloatArray, values); done(); - } + }, ); }); @@ -1208,7 +1215,7 @@ describe('Spanner', () => { numericInsert( done, Spanner.GOOGLE_STANDARD_SQL, - Spanner.numeric('3.141592653') + Spanner.numeric('3.141592653'), ); }); @@ -1216,7 +1223,7 @@ describe('Spanner', () => { numericInsert( done, Spanner.POSTGRESQL, - Spanner.pgNumeric('3.141592653') + Spanner.pgNumeric('3.141592653'), ); }); @@ -1243,7 +1250,7 @@ describe('Spanner', () => { numericInsertOutOfBounds( done, Spanner.GOOGLE_STANDARD_SQL, - Spanner.numeric('3.1415926535') + Spanner.numeric('3.1415926535'), ); }); @@ -1251,7 +1258,7 @@ describe('Spanner', () => { numericInsertOutOfBounds( done, Spanner.POSTGRESQL, - Spanner.pgNumeric('1e131072') + Spanner.pgNumeric('1e131072'), ); }); @@ -1279,7 +1286,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().NumericArray, [null]); done(); - } + }, ); }); @@ -1305,7 +1312,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().NumericArray, values); done(); - } + }, ); }); @@ -1373,7 +1380,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().StringArray, [null]); done(); - } + }, ); }); @@ -1393,7 +1400,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().StringArray, ['abc', 'def']); done(); - } + }, ); }); @@ -1405,7 +1412,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().StringArray, ['abc', 'def']); done(); - } + }, ); }); }); @@ -1459,7 +1466,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().BytesArray, [null]); done(); - } + }, ); }); @@ -1481,7 +1488,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().BytesArray, values); done(); - } + }, ); }); @@ -1514,7 +1521,7 @@ describe('Spanner', () => { key2: 'value2', }); done(); - } + }, ); }); @@ -1553,7 +1560,7 @@ describe('Spanner', () => { {key2: 'value2'}, ]); done(); - } + }, ); }); }); @@ -1602,7 +1609,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().TimestampArray, []); done(); - } + }, ); }); @@ -1622,7 +1629,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().TimestampArray, [null]); done(); - } + }, ); }); @@ -1644,7 +1651,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(row.toJSON().TimestampArray, values); done(); - } + }, ); }); @@ -1665,7 +1672,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual( Spanner.date(row.toJSON().DateValue), - Spanner.date() + Spanner.date(), ); done(); }); @@ -1777,17 +1784,17 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual( row.toJSON().ProtoMessageValue, - music.SingerInfo.toObject(protoMessageParams.value) + music.SingerInfo.toObject(protoMessageParams.value), ); done(); }, - {ProtoMessageValue: music.SingerInfo} + {ProtoMessageValue: music.SingerInfo}, ); }); it('GOOGLE_STANDARD_SQL should write bytes in the protoMessage column', done => { const value = music.SingerInfo.encode( - protoMessageParams.value + protoMessageParams.value, ).finish(); insert( {ProtoMessageValue: value}, @@ -1796,10 +1803,10 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual( row.toJSON().ProtoMessageValue, - value.toString() + value.toString(), ); done(); - } + }, ); }); @@ -1811,7 +1818,7 @@ describe('Spanner', () => { assert.ifError(err); assert.equal(row.toJSON().ProtoMessageValue, null); done(); - } + }, ); }); @@ -1827,13 +1834,13 @@ describe('Spanner', () => { ]); done(); }, - {ProtoMessageArray: music.SingerInfo} + {ProtoMessageArray: music.SingerInfo}, ); }); it('GOOGLE_STANDARD_SQL should write bytes array in the protoMessageArray column', done => { const value = music.SingerInfo.encode( - protoMessageParams.value + protoMessageParams.value, ).finish(); insert( {ProtoMessageArray: [value]}, @@ -1844,7 +1851,7 @@ describe('Spanner', () => { value.toString(), ]); done(); - } + }, ); }); @@ -1856,7 +1863,7 @@ describe('Spanner', () => { assert.ifError(err); assert.equal(row.toJSON().ProtoMessageArray, null); done(); - } + }, ); }); }); @@ -1883,11 +1890,11 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual( row.toJSON().ProtoEnumValue, - Object.getPrototypeOf(music.Genre)[enumParams.value] + Object.getPrototypeOf(music.Genre)[enumParams.value], ); done(); }, - {ProtoEnumValue: music.Genre} + {ProtoEnumValue: music.Genre}, ); }); @@ -1900,10 +1907,10 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual( row.toJSON().ProtoEnumValue, - value.toString() + value.toString(), ); done(); - } + }, ); }); @@ -1915,7 +1922,7 @@ describe('Spanner', () => { assert.ifError(err); assert.equal(row.toJSON().ProtoEnumValue, null); done(); - } + }, ); }); @@ -1931,7 +1938,7 @@ describe('Spanner', () => { ]); done(); }, - {ProtoEnumArray: music.Genre} + {ProtoEnumArray: music.Genre}, ); }); @@ -1946,7 +1953,7 @@ describe('Spanner', () => { value.toString(), ]); done(); - } + }, ); }); @@ -1958,7 +1965,7 @@ describe('Spanner', () => { assert.ifError(err); assert.equal(row.toJSON().ProtoEnumArray, null); done(); - } + }, ); }); }); @@ -2017,7 +2024,7 @@ describe('Spanner', () => { Spanner.pgJsonb({key2: 'value2'}), ]); done(); - } + }, ); }); }); @@ -2079,7 +2086,7 @@ describe('Spanner', () => { { autoCreate: true, }, - INSTANCE_CONFIG + INSTANCE_CONFIG, ); instance.get(config, err => { @@ -2116,7 +2123,7 @@ describe('Spanner', () => { concat(instances => { assert(instances.length > 0); done(); - }) + }), ); }); @@ -2138,7 +2145,7 @@ describe('Spanner', () => { assert.strictEqual(metadata!.displayName, newData.displayName); done(); }); - }) + }), ); }); @@ -2168,7 +2175,7 @@ describe('Spanner', () => { await instanceAdminClient.getInstanceConfig({ name: instanceAdminClient.instanceConfigPath( projectId!, - INSTANCE_CONFIG.config + INSTANCE_CONFIG.config, ), }); const [instanceConfigCreationOperation] = @@ -2178,15 +2185,15 @@ describe('Spanner', () => { instanceConfig: { name: instanceAdminClient.instanceConfigPath( projectId!, - instanceConfigId + instanceConfigId, ), baseConfig: instanceAdminClient.instanceConfigPath( projectId!, - INSTANCE_CONFIG.config + INSTANCE_CONFIG.config, ), displayName: instanceConfigId, replicas: baseInstanceConfig.replicas!.concat( - baseInstanceConfig.optionalReplicas![0] + baseInstanceConfig.optionalReplicas![0], ), }, }); @@ -2203,8 +2210,8 @@ describe('Spanner', () => { */ await Promise.all( INSTANCE_CONFIGS_TO_CLEAN.map(instanceConfig => - instanceConfig.delete({gaxOpts: GAX_OPTIONS}) - ) + instanceConfig.delete({gaxOpts: GAX_OPTIONS}), + ), ); }); @@ -2246,7 +2253,7 @@ describe('Spanner', () => { concat(instanceConfigs => { assert(instanceConfigs.length > 0); done(); - }) + }), ); }); @@ -2300,12 +2307,12 @@ describe('Spanner', () => { const operationForCurrentInstanceConfig = operationsWithoutFilter!.find( operation => operation.name && - operation.name.includes(instanceConfig.formattedName_) + operation.name.includes(instanceConfig.formattedName_), ); assert.ok(operationForCurrentInstanceConfig); assert.strictEqual( operationForCurrentInstanceConfig!.metadata!.type_url, - 'type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata' + 'type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata', ); }); @@ -2324,12 +2331,12 @@ describe('Spanner', () => { const operationForCurrentInstanceConfigWithFilterMetadata = CreateInstanceConfigMetadata.decode( operationForCurrentInstanceConfigWithFilter!.metadata! - .value! as Uint8Array + .value! as Uint8Array, ); assert.strictEqual( operationForCurrentInstanceConfigWithFilterMetadata.instanceConfig! .name, - `${instanceConfig.formattedName_}` + `${instanceConfig.formattedName_}`, ); }); @@ -2352,11 +2359,11 @@ describe('Spanner', () => { assert.ifError(err); assert.strictEqual( metadata!.displayName, - newData.instanceConfig.displayName + newData.instanceConfig.displayName, ); done(); }); - }) + }), ); }); @@ -2407,7 +2414,7 @@ describe('Spanner', () => { if (IS_EMULATOR_ENABLED) { assert.strictEqual( metadata!.databaseDialect, - 'DATABASE_DIALECT_UNSPECIFIED' + 'DATABASE_DIALECT_UNSPECIFIED', ); } else { assert.strictEqual(metadata!.databaseDialect, dialect); @@ -2455,7 +2462,7 @@ describe('Spanner', () => { concat(databases => { assert(databases.length > 0); done(); - }) + }), ); }); @@ -2496,12 +2503,12 @@ describe('Spanner', () => { statements!.some( s => replaceNewLinesAndSpacing(s, dialect) === - replaceNewLinesAndSpacing(createTableStatement, dialect) - ) + replaceNewLinesAndSpacing(createTableStatement, dialect), + ), ); done(); }); - }) + }), ); }; @@ -2517,7 +2524,7 @@ describe('Spanner', () => { done, DATABASE, Spanner.GOOGLE_STANDARD_SQL, - createTableStatement + createTableStatement, ); }); @@ -2552,10 +2559,10 @@ describe('Spanner', () => { const databaseCreateOperation = databaseCreateOperations[0]; assert.strictEqual( databaseCreateOperation.metadata!.type_url, - 'type.googleapis.com/google.spanner.admin.database.v1.CreateDatabaseMetadata' + 'type.googleapis.com/google.spanner.admin.database.v1.CreateDatabaseMetadata', ); const createMeta = CreateDatabaseMetadata.decode( - databaseCreateOperation.metadata!.value! as Uint8Array + databaseCreateOperation.metadata!.value! as Uint8Array, ); assert.strictEqual(createMeta.database, databaseFullName); }); @@ -2573,10 +2580,10 @@ describe('Spanner', () => { const databaseCreateOperation = databaseOperations.find( op => op.metadata!.type_url === - 'type.googleapis.com/google.spanner.admin.database.v1.CreateDatabaseMetadata' + 'type.googleapis.com/google.spanner.admin.database.v1.CreateDatabaseMetadata', ); const createMeta = CreateDatabaseMetadata.decode( - databaseCreateOperation!.metadata!.value! as Uint8Array + databaseCreateOperation!.metadata!.value! as Uint8Array, ); assert.strictEqual(createMeta.database, databaseFullName); }; @@ -2640,7 +2647,7 @@ describe('Spanner', () => { assert.ifError(err); assert.ok(statements.includes(query)); }); - }) + }), ); }; @@ -2663,7 +2670,7 @@ describe('Spanner', () => { const grantAccessToRole = async ( database, createRoleQuery, - grantAccessQuery + grantAccessQuery, ) => { database.updateSchema( [createRoleQuery, grantAccessQuery], @@ -2674,7 +2681,7 @@ describe('Spanner', () => { assert.ok(statements.includes(createRoleQuery)); assert.ok(statements.includes(grantAccessQuery)); }); - }) + }), ); }; @@ -2685,7 +2692,7 @@ describe('Spanner', () => { await grantAccessToRole( DATABASE, 'CREATE ROLE child', - 'GRANT SELECT ON TABLE Singers TO ROLE child' + 'GRANT SELECT ON TABLE Singers TO ROLE child', ); await new Promise(resolve => setTimeout(resolve, 60000)); }); @@ -2697,7 +2704,7 @@ describe('Spanner', () => { await grantAccessToRole( PG_DATABASE, 'CREATE ROLE child', - 'GRANT SELECT ON TABLE singers TO child' + 'GRANT SELECT ON TABLE singers TO child', ); await new Promise(resolve => setTimeout(resolve, 60000)); }); @@ -2706,7 +2713,7 @@ describe('Spanner', () => { database, createRoleQuery, grantPermissionQuery, - revokePermissionQuery + revokePermissionQuery, ) => { database.updateSchema( [createRoleQuery, grantPermissionQuery], @@ -2724,10 +2731,10 @@ describe('Spanner', () => { assert.ifError(err); assert.ok(!statements.includes(grantPermissionQuery)); }); - }) + }), ); }); - }) + }), ); }; @@ -2739,7 +2746,7 @@ describe('Spanner', () => { DATABASE, 'CREATE ROLE orphan', 'GRANT SELECT ON TABLE Singers TO ROLE orphan', - 'REVOKE SELECT ON TABLE Singers FROM ROLE orphan' + 'REVOKE SELECT ON TABLE Singers FROM ROLE orphan', ); await new Promise(resolve => setTimeout(resolve, 60000)); }); @@ -2752,7 +2759,7 @@ describe('Spanner', () => { PG_DATABASE, 'CREATE ROLE orphan', 'GRANT SELECT ON TABLE singers TO orphan', - 'REVOKE SELECT ON TABLE singers FROM orphan' + 'REVOKE SELECT ON TABLE singers FROM orphan', ); await new Promise(resolve => setTimeout(resolve, 60000)); }); @@ -2760,7 +2767,7 @@ describe('Spanner', () => { const userDefinedDatabaseRoleDropped = async ( database, createRoleQuery, - dropRoleQuery + dropRoleQuery, ) => { database.updateSchema( [createRoleQuery], @@ -2777,10 +2784,10 @@ describe('Spanner', () => { assert.ifError(err); assert.ok(!statements.includes(createRoleQuery)); }); - }) + }), ); }); - }) + }), ); }; @@ -2791,7 +2798,7 @@ describe('Spanner', () => { await userDefinedDatabaseRoleDropped( DATABASE, 'CREATE ROLE new_parent', - 'DROP ROLE new_parent' + 'DROP ROLE new_parent', ); await new Promise(resolve => setTimeout(resolve, 60000)); }); @@ -2803,7 +2810,7 @@ describe('Spanner', () => { await userDefinedDatabaseRoleDropped( PG_DATABASE, 'CREATE ROLE new_parent', - 'DROP ROLE new_parent' + 'DROP ROLE new_parent', ); await new Promise(resolve => setTimeout(resolve, 60000)); }); @@ -2833,9 +2840,9 @@ describe('Spanner', () => { table.deleteRows([id]); done(); }); - } + }, ); - }) + }), ); }; @@ -2846,7 +2853,7 @@ describe('Spanner', () => { grantAccessSuccess( done, DATABASE, - 'GRANT SELECT ON TABLE Singers TO ROLE read_access' + 'GRANT SELECT ON TABLE Singers TO ROLE read_access', ); }); @@ -2857,7 +2864,7 @@ describe('Spanner', () => { grantAccessSuccess( done, PG_DATABASE, - 'GRANT SELECT ON TABLE singers TO read_access' + 'GRANT SELECT ON TABLE singers TO read_access', ); }); @@ -2886,9 +2893,9 @@ describe('Spanner', () => { table.deleteRows([id]); done(); }); - } + }, ); - }) + }), ); }; @@ -2899,7 +2906,7 @@ describe('Spanner', () => { grantAccessFailure( done, DATABASE, - 'GRANT INSERT ON TABLE Singers TO ROLE write_access' + 'GRANT INSERT ON TABLE Singers TO ROLE write_access', ); }); @@ -2910,7 +2917,7 @@ describe('Spanner', () => { grantAccessFailure( done, PG_DATABASE, - 'GRANT INSERT ON TABLE singers TO write_access' + 'GRANT INSERT ON TABLE singers TO write_access', ); }); @@ -2926,8 +2933,8 @@ describe('Spanner', () => { databaseRoles.find( role => role.name === - database.formattedName_ + '/databaseRoles/new_parent' - ) + database.formattedName_ + '/databaseRoles/new_parent', + ), ); }; @@ -3045,11 +3052,11 @@ describe('Spanner', () => { const createDatabaseWithFKADC = async ( dialect, database_id, - database_schema + database_schema, ) => { const [database, operation] = await instance.createDatabase( database_id, - {databaseDialect: dialect} + {databaseDialect: dialect}, ); await operation.promise(); @@ -3060,7 +3067,7 @@ describe('Spanner', () => { const [schema] = await database.getSchema(); assert.strictEqual( schema.filter(x => x.includes('FKShoppingCartsCustomerId')).length, - 1 + 1, ); }; @@ -3068,7 +3075,7 @@ describe('Spanner', () => { await createDatabaseWithFKADC( Spanner.GOOGLE_STANDARD_SQL, fkadc_database_id, - fkadc_schema + fkadc_schema, ); }); @@ -3076,7 +3083,7 @@ describe('Spanner', () => { await createDatabaseWithFKADC( Spanner.POSTGRESQL, fkadc_database_pg_id, - fkadc_pg_schema + fkadc_pg_schema, ); }); @@ -3090,26 +3097,26 @@ describe('Spanner', () => { `ALTER TABLE ShoppingCarts ADD CONSTRAINT ${constraint_name} FOREIGN KEY (CustomerName) REFERENCES Customers(CustomerName) ON DELETE CASCADE`, ]; const [operationAddConstraint] = await database.updateSchema( - ddl_statements_add_constraints + ddl_statements_add_constraints, ); await operationAddConstraint.promise(); const [schema] = await database.getSchema(); assert.strictEqual( schema.filter(x => x.includes('FKShoppingCartsCustomerName')).length, - 1 + 1, ); const ddl_statements_drop_constraints = [ 'ALTER TABLE ShoppingCarts DROP CONSTRAINT FKShoppingCartsCustomerName', ]; const [operationDropConstraint] = await database.updateSchema( - ddl_statements_drop_constraints + ddl_statements_drop_constraints, ); await operationDropConstraint.promise(); const [schema1] = await database.getSchema(); assert.strictEqual( schema1.filter(x => x.includes('FKShoppingCartsCustomerName')).length, - 0 + 0, ); }; @@ -3117,7 +3124,7 @@ describe('Spanner', () => { const fkadc_database = instance.database(fkadc_database_id); await alterDatabaseWithFKADC( Spanner.GOOGLE_STANDARD_SQL, - fkadc_database + fkadc_database, ); }); @@ -3178,7 +3185,7 @@ describe('Spanner', () => { } catch (err) { assert.match( (err as grpc.ServiceError).message, - /Foreign key constraint `FKShoppingCartsCustomerId` is violated on table `ShoppingCarts`\./ + /Foreign key constraint `FKShoppingCartsCustomerId` is violated on table `ShoppingCarts`\./, ); } }); @@ -3190,14 +3197,14 @@ describe('Spanner', () => { } catch (err) { assert.match( (err as grpc.ServiceError).message, - /Foreign key constraint `FKShoppingCartsCustomerId` is violated on table `shoppingcarts`\./ + /Foreign key constraint `FKShoppingCartsCustomerId` is violated on table `shoppingcarts`\./, ); } }); const insertAndDeleteInSameTransactionErrorWithFKADC = ( done, - database + database, ) => { database.runTransaction((err, transaction) => { assert.ifError(err); @@ -3209,7 +3216,7 @@ describe('Spanner', () => { transaction!.commit(err => { assert.match( (err as grpc.ServiceError).message.toLowerCase(), - /9 failed_precondition: cannot write a value for the referenced column `customers.customerid` and delete it in the same transaction\./ + /9 failed_precondition: cannot write a value for the referenced column `customers.customerid` and delete it in the same transaction\./, ); done(); }); @@ -3228,7 +3235,7 @@ describe('Spanner', () => { const insertReferencingKeyAndDeleteReferencedKeyErrorWithFKADC = ( done, - database + database, ) => { const customersTable = database.table('Customers'); const cartsTable = database.table('ShoppingCarts'); @@ -3264,14 +3271,14 @@ describe('Spanner', () => { transaction!.commit(err => { assert.match( (err as grpc.ServiceError).message.toLowerCase(), - /9 failed_precondition: cannot modify a row in the table `shoppingcarts` because a referential action is deleting it in the same transaction\./ + /9 failed_precondition: cannot modify a row in the table `shoppingcarts` because a referential action is deleting it in the same transaction\./, ); done(); }); }); - } + }, ); - } + }, ); }; @@ -3279,7 +3286,7 @@ describe('Spanner', () => { const fkadc_database = instance.database(fkadc_database_id); insertReferencingKeyAndDeleteReferencedKeyErrorWithFKADC( done, - fkadc_database + fkadc_database, ); }); @@ -3287,13 +3294,13 @@ describe('Spanner', () => { const fkadc_database_pg = instance.database(fkadc_database_pg_id); insertReferencingKeyAndDeleteReferencedKeyErrorWithFKADC( done, - fkadc_database_pg + fkadc_database_pg, ); }); const deleteRuleOnInformationSchemaReferentialConstraints = ( done, - database + database, ) => { database.getSnapshot((err, transaction) => { assert.ifError(err); @@ -3305,7 +3312,7 @@ describe('Spanner', () => { assert.strictEqual(rows[0][0].value, 'CASCADE'); transaction!.end(); done(); - } + }, ); }); }; @@ -3314,7 +3321,7 @@ describe('Spanner', () => { const fkadc_database = instance.database(fkadc_database_id); deleteRuleOnInformationSchemaReferentialConstraints( done, - fkadc_database + fkadc_database, ); }); @@ -3322,7 +3329,7 @@ describe('Spanner', () => { const fkadc_database_pg = instance.database(fkadc_database_pg_id); deleteRuleOnInformationSchemaReferentialConstraints( done, - fkadc_database_pg + fkadc_database_pg, ); }); }); @@ -3357,7 +3364,7 @@ describe('Spanner', () => { if (IS_EMULATOR_ENABLED) { this.skip(); } - if (SKIP_BACKUPS === 'true') { + if (SKIP_BACKUPS === 'true' || KOKORO_JOB_NAME?.includes('presubmit')) { this.skip(); } googleSqlDatabase1 = DATABASE; @@ -3391,12 +3398,12 @@ describe('Spanner', () => { await createBackup( googleSqlDatabase1, googleSqlBackup1Name, - backupExpiryDate + backupExpiryDate, ); await createBackup( googleSqlDatabase2, googleSqlBackup2Name, - backupExpiryDate + backupExpiryDate, ); googleSqlBackup1 = instance.backup(googleSqlBackup1Name); @@ -3408,12 +3415,12 @@ describe('Spanner', () => { await createBackup( postgreSqlDatabase1, postgreSqlBackup1Name, - backupExpiryDate + backupExpiryDate, ); await createBackup( postgreSqlDatabase2, postgreSqlBackup2Name, - backupExpiryDate + backupExpiryDate, ); postgreSqlBackup1 = instance.backup(postgreSqlBackup1Name); @@ -3433,13 +3440,13 @@ describe('Spanner', () => { assert.strictEqual(backupInfo.state, 'READY'); assert.strictEqual( backupInfo.name, - `${instance.formattedName_}/backups/${backup1Name}` + `${instance.formattedName_}/backups/${backup1Name}`, ); assert.strictEqual(backupInfo.database, database1.formattedName_); assert.ok(backupInfo.createTime); assert.deepStrictEqual( Number(backupInfo.expireTime!.seconds), - backupExpiryPreciseDate.toStruct().seconds + backupExpiryPreciseDate.toStruct().seconds, ); assert.ok(backupInfo.sizeBytes! > 0); @@ -3449,7 +3456,7 @@ describe('Spanner', () => { const expireTime = await backup1.getExpireTime(); assert.deepStrictEqual( expireTime!.getFullTime(), - backupExpiryPreciseDate.getFullTime() + backupExpiryPreciseDate.getFullTime(), ); const exists = await backup1.exists(); assert.strictEqual(exists, true); @@ -3459,7 +3466,7 @@ describe('Spanner', () => { await completedBackup( googleSqlBackup1, googleSqlBackup1Name, - googleSqlDatabase1 + googleSqlDatabase1, ); }); @@ -3467,7 +3474,7 @@ describe('Spanner', () => { await completedBackup( postgreSqlBackup1, postgreSqlBackup1Name, - postgreSqlDatabase1 + postgreSqlDatabase1, ); }); @@ -3482,13 +3489,13 @@ describe('Spanner', () => { expireTime: backupExpiryDate, }); assert.fail( - 'Backup should have failed for expiration time in the past' + 'Backup should have failed for expiration time in the past', ); } catch (err) { // Expect to get invalid argument error indicating the expiry date assert.strictEqual( (err as grpc.ServiceError).code, - grpc.status.INVALID_ARGUMENT + grpc.status.INVALID_ARGUMENT, ); } }; @@ -3515,14 +3522,15 @@ describe('Spanner', () => { assert.ok(backups.length > 0); assert.ok( backups.find( - backup => backup.formattedName_ === googleSqlBackup1.formattedName_ - ) + backup => backup.formattedName_ === googleSqlBackup1.formattedName_, + ), ); if (!IS_EMULATOR_ENABLED && !SKIP_POSTGRESQL_BACKUP_TESTS) { assert.ok( backups.find( - backup => backup.formattedName_ === postgreSqlBackup1.formattedName_ - ) + backup => + backup.formattedName_ === postgreSqlBackup1.formattedName_, + ), ); } }); @@ -3552,24 +3560,26 @@ describe('Spanner', () => { assert.notStrictEqual(page2[0].formattedName_, page1[0].formattedName_); assert.ok( page3.find( - backup => backup.formattedName_ === googleSqlBackup1.formattedName_ - ) + backup => backup.formattedName_ === googleSqlBackup1.formattedName_, + ), ); assert.ok( page3.find( - backup => backup.formattedName_ === googleSqlBackup2.formattedName_ - ) + backup => backup.formattedName_ === googleSqlBackup2.formattedName_, + ), ); if (!IS_EMULATOR_ENABLED && !SKIP_POSTGRESQL_BACKUP_TESTS) { assert.ok( page3.find( - backup => backup.formattedName_ === postgreSqlBackup1.formattedName_ - ) + backup => + backup.formattedName_ === postgreSqlBackup1.formattedName_, + ), ); assert.ok( page3.find( - backup => backup.formattedName_ === postgreSqlBackup2.formattedName_ - ) + backup => + backup.formattedName_ === postgreSqlBackup2.formattedName_, + ), ); } }); @@ -3593,13 +3603,13 @@ describe('Spanner', () => { const [databaseMetadata] = await restoreDatabase.getMetadata(); assert.ok( databaseMetadata.state === 'READY' || - databaseMetadata.state === 'READY_OPTIMIZING' + databaseMetadata.state === 'READY_OPTIMIZING', ); // Validate restore state of database directly. const restoreState = await restoreDatabase.getState(); assert.ok( - restoreState === 'READY' || restoreState === 'READY_OPTIMIZING' + restoreState === 'READY' || restoreState === 'READY_OPTIMIZING', ); // Validate new database has restored data. @@ -3613,12 +3623,12 @@ describe('Spanner', () => { const restoreInfo = await restoreDatabase.getRestoreInfo(); assert.strictEqual( restoreInfo!.backupInfo!.backup, - backup1.formattedName_ + backup1.formattedName_, ); const [originalDatabaseMetadata] = await database1.getMetadata(); assert.strictEqual( restoreInfo!.backupInfo!.sourceDatabase, - originalDatabaseMetadata.name + originalDatabaseMetadata.name, ); assert.strictEqual(restoreInfo!.sourceType, 'BACKUP'); @@ -3634,7 +3644,7 @@ describe('Spanner', () => { await restoreBackup( googleSqlRestoreDatabaseId, googleSqlBackup1, - googleSqlDatabase1 + googleSqlDatabase1, ); }); @@ -3643,7 +3653,7 @@ describe('Spanner', () => { await restoreBackup( postgreSqlRestoreDatabaseId, postgreSqlBackup1, - postgreSqlDatabase1 + postgreSqlDatabase1, ); }); @@ -3656,7 +3666,7 @@ describe('Spanner', () => { // Expect to get error indicating database already exists. assert.strictEqual( (err as grpc.ServiceError).code, - grpc.status.ALREADY_EXISTS + grpc.status.ALREADY_EXISTS, ); } }; @@ -3677,12 +3687,12 @@ describe('Spanner', () => { // Read metadata, verify expiry date was updated. const [updatedMetadata] = await backup1.getMetadata(); const expiryDateFromMetadataAfterUpdate = new PreciseDate( - updatedMetadata.expireTime as DateStruct + updatedMetadata.expireTime as DateStruct, ); assert.deepStrictEqual( expiryDateFromMetadataAfterUpdate, - Spanner.timestamp(updatedBackupExpiryDate) + Spanner.timestamp(updatedBackupExpiryDate), ); }; @@ -3700,13 +3710,13 @@ describe('Spanner', () => { try { await backup1.updateExpireTime(expiryDateInPast); assert.fail( - 'Backup should have failed for expiration time in the past' + 'Backup should have failed for expiration time in the past', ); } catch (err) { // Expect to get invalid argument error indicating the expiry date. assert.strictEqual( (err as grpc.ServiceError).code, - grpc.status.INVALID_ARGUMENT + grpc.status.INVALID_ARGUMENT, ); } }; @@ -3731,7 +3741,7 @@ describe('Spanner', () => { } catch (err) { assert.strictEqual( (err as grpc.ServiceError).code, - grpc.status.NOT_FOUND + grpc.status.NOT_FOUND, ); } }; @@ -3750,12 +3760,12 @@ describe('Spanner', () => { const [operationsWithoutFilter] = await instance.getBackupOperations(); const operationForCurrentBackup = operationsWithoutFilter.find( operation => - operation.name && operation.name.includes(backup1.formattedName_) + operation.name && operation.name.includes(backup1.formattedName_), ); assert.ok(operationForCurrentBackup); assert.strictEqual( operationForCurrentBackup!.metadata!.type_url, - 'type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata' + 'type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata', ); // With a filter. @@ -3767,15 +3777,15 @@ describe('Spanner', () => { assert.ok(operationForCurrentBackupWithFilter); assert.strictEqual( operationForCurrentBackupWithFilter!.metadata!.type_url, - 'type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata' + 'type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata', ); const operationForCurrentBackupWithFilterMetadata = CreateBackupMetadata.decode( - operationForCurrentBackupWithFilter!.metadata!.value! as Uint8Array + operationForCurrentBackupWithFilter!.metadata!.value! as Uint8Array, ); assert.strictEqual( operationForCurrentBackupWithFilterMetadata.database, - database1.formattedName_ + database1.formattedName_, ); }; @@ -3910,7 +3920,7 @@ describe('Spanner', () => { const metadata = await session.getMetadata(); assert.strictEqual('parent_role', metadata[0].databaseRole); await session.delete(); - }) + }), ); }); @@ -3930,7 +3940,7 @@ describe('Spanner', () => { const metadata = await session.getMetadata(); assert.strictEqual('child_role', metadata[0].databaseRole); await session.delete(); - }) + }), ); }); @@ -3950,7 +3960,7 @@ describe('Spanner', () => { const metadata = await session.getMetadata(); assert.strictEqual('orphan_role', metadata[0].databaseRole); await session.delete(); - }) + }), ); }); }); @@ -3978,7 +3988,7 @@ describe('Spanner', () => { PhoneNumbers ARRAY, HasGear BOOL, ) PRIMARY KEY(SingerId)`, - GAX_OPTIONS + GAX_OPTIONS, ); await onPromiseOperationComplete(googleSqlCreateTable); @@ -3994,7 +4004,7 @@ describe('Spanner', () => { "Created" TIMESTAMPTZ, "HasGear" BOOL )`, - GAX_OPTIONS + GAX_OPTIONS, ); await onPromiseOperationComplete(postgreSqlCreateTable); }); @@ -4009,7 +4019,7 @@ describe('Spanner', () => { err => { assert.strictEqual(err!.code, 5); done(); - } + }, ); }; @@ -4030,7 +4040,7 @@ describe('Spanner', () => { err => { assert.strictEqual(err!.code, 5); done(); - } + }, ); }; @@ -4078,7 +4088,7 @@ describe('Spanner', () => { done(); }); - } + }, ); }; @@ -4122,7 +4132,7 @@ describe('Spanner', () => { done(); }); - } + }, ); }; @@ -4163,7 +4173,7 @@ describe('Spanner', () => { assert.strictEqual(rows[0].Int.value, '8'); done(); }); - } + }, ); }; @@ -4206,7 +4216,7 @@ describe('Spanner', () => { done(); }); }); - } + }, ); }; @@ -4258,7 +4268,7 @@ describe('Spanner', () => { done(); }); }); - } + }, ); }; @@ -4272,7 +4282,7 @@ describe('Spanner', () => { const insertAndDeleteMultipleCompositeKeyRows = ( database, - createTableStatement + createTableStatement, ) => { const id1 = 1; const name1 = generateName('name1'); @@ -4346,7 +4356,7 @@ describe('Spanner', () => { )`; insertAndDeleteMultipleCompositeKeyRows( PG_DATABASE, - createTableStatement + createTableStatement, ); }); @@ -4387,7 +4397,7 @@ describe('Spanner', () => { done(); }); - } + }, ); }; @@ -4396,7 +4406,7 @@ describe('Spanner', () => { done, DATABASE, googleSqlTable, - `SELECT * FROM ${TABLE_NAME} ORDER BY SingerId` + `SELECT * FROM ${TABLE_NAME} ORDER BY SingerId`, ); }); @@ -4405,7 +4415,7 @@ describe('Spanner', () => { done, PG_DATABASE, postgreSqlTable, - `SELECT * FROM ${TABLE_NAME} ORDER BY "SingerId"` + `SELECT * FROM ${TABLE_NAME} ORDER BY "SingerId"`, ); }); @@ -4439,7 +4449,7 @@ describe('Spanner', () => { assert.strictEqual(row.Name, null); done(); - } + }, ); }); }); @@ -4484,7 +4494,7 @@ describe('Spanner', () => { assert.strictEqual(row.Name, updatedRow.Name); done(); - } + }, ); }); }); @@ -4555,7 +4565,7 @@ describe('Spanner', () => { for (const [key, value] of Object.entries(actualRows)) { if (value && key === 'Float32') { assert.ok( - EXPECTED_ROW[key] - (value as unknown as number) <= 0.00001 + EXPECTED_ROW[key] - (value as unknown as number) <= 0.00001, ); } else { assert.deepStrictEqual(EXPECTED_ROW[key], value); @@ -4593,7 +4603,7 @@ describe('Spanner', () => { for (const [key, value] of Object.entries(rows)) { if (key === 'Float32') { assert.ok( - EXPECTED_ROW[key] - (value as unknown as number) <= 0.00001 + EXPECTED_ROW[key] - (value as unknown as number) <= 0.00001, ); } else { assert.deepStrictEqual(EXPECTED_ROW[key], value); @@ -4638,7 +4648,7 @@ describe('Spanner', () => { for (const [key, value] of Object.entries(actualRows)) { if (key === 'Float32') { assert.ok( - EXPECTED_ROW[key] - (value as unknown as number) <= 0.00001 + EXPECTED_ROW[key] - (value as unknown as number) <= 0.00001, ); } else { assert.deepStrictEqual(EXPECTED_ROW[key], value); @@ -4728,7 +4738,7 @@ describe('Spanner', () => { // one mutation group is getting success assert.deepStrictEqual( actualStatusCode.sort(), - expectedStatusCode.sort() + expectedStatusCode.sort(), ); done(); }); @@ -4770,7 +4780,7 @@ describe('Spanner', () => { if (value && key === 'Float32') { assert.ok( GOOGLE_SQL_EXPECTED_ROW[key] - (value as unknown as number) <= - 0.00001 + 0.00001, ); } else { assert.deepStrictEqual(GOOGLE_SQL_EXPECTED_ROW[key], value); @@ -4801,7 +4811,7 @@ describe('Spanner', () => { if (value && key === 'Float32') { assert.ok( POSTGRESQL_EXPECTED_ROW[key] - (value as unknown as number) <= - 0.00001 + 0.00001, ); } else { assert.deepStrictEqual(POSTGRESQL_EXPECTED_ROW[key], value); @@ -4849,13 +4859,13 @@ describe('Spanner', () => { assert.strictEqual(values[0][0].value, 'a'); assert.deepStrictEqual( JSON.stringify(values[0][1].value), - JSON.stringify({value: '1'}) + JSON.stringify({value: '1'}), ); assert.strictEqual(values[1][0].value, 'b'); assert.deepStrictEqual( JSON.stringify(values[1][1].value), - JSON.stringify({value: '2'}) + JSON.stringify({value: '2'}), ); done(); @@ -5069,7 +5079,7 @@ describe('Spanner', () => { assert.strictEqual( JSON.stringify(rows[0][0].value), - JSON.stringify(expected) + JSON.stringify(expected), ); done(); }); @@ -5293,7 +5303,7 @@ describe('Spanner', () => { assert.deepStrictEqual(rows[0][0].value[i], expected[i]); } else { assert.ok( - rows[0][0].value[i] - expected[i]!['value'] <= 0.00001 + rows[0][0].value[i] - expected[i]!['value'] <= 0.00001, ); } } @@ -5448,7 +5458,7 @@ describe('Spanner', () => { assert.strictEqual( JSON.stringify(rows[0][0].value), - JSON.stringify(expected) + JSON.stringify(expected), ); done(); }); @@ -5533,7 +5543,7 @@ describe('Spanner', () => { assert.strictEqual( JSON.stringify(rows[0][0].value), - JSON.stringify(expected) + JSON.stringify(expected), ); done(); }); @@ -5662,7 +5672,7 @@ describe('Spanner', () => { assert.strictEqual( JSON.stringify(rows[0][0].value), - JSON.stringify(expected) + JSON.stringify(expected), ); done(); }); @@ -6557,7 +6567,7 @@ describe('Spanner', () => { done, DATABASE, query, - new Interval(19, 768, BigInt('123456789123')) + new Interval(19, 768, BigInt('123456789123')), ); }); @@ -6572,7 +6582,7 @@ describe('Spanner', () => { done, DATABASE, query, - new Interval(19, 768, BigInt('123456789123')) + new Interval(19, 768, BigInt('123456789123')), ); }); @@ -6590,7 +6600,7 @@ describe('Spanner', () => { done, PG_DATABASE, query, - new Interval(19, 768, BigInt('123456789123')) + new Interval(19, 768, BigInt('123456789123')), ); }); @@ -6605,7 +6615,7 @@ describe('Spanner', () => { done, PG_DATABASE, query, - new Interval(-19, -768, BigInt('123456789123')) + new Interval(-19, -768, BigInt('123456789123')), ); }); @@ -6680,7 +6690,7 @@ describe('Spanner', () => { done, DATABASE, query, - new Interval(100, 201, BigInt('123456789123')) + new Interval(100, 201, BigInt('123456789123')), ); }); @@ -6852,7 +6862,7 @@ describe('Spanner', () => { BytesValue BYTES(MAX), BytesArray ARRAY ) PRIMARY KEY (Key)`, - GAX_OPTIONS + GAX_OPTIONS, ); await onPromiseOperationComplete(googleSqlCreateTable); await googleSqlTable.insert(googleSqlExpectedRow); @@ -6863,7 +6873,7 @@ describe('Spanner', () => { "StringValue" VARCHAR, "BytesValue" BYTEA )`, - GAX_OPTIONS + GAX_OPTIONS, ); await onPromiseOperationComplete(postgreSqlCreateTable); await postgreSqlTable.insert(postgreSqlExpectedRow); @@ -6889,11 +6899,11 @@ describe('Spanner', () => { assert.strictEqual(row.Key, googleSqlExpectedRow.Key); assert.strictEqual( row.StringValue, - googleSqlExpectedRow.StringValue + googleSqlExpectedRow.StringValue, ); assert.deepStrictEqual( row.StringArray, - googleSqlExpectedRow.StringArray + googleSqlExpectedRow.StringArray, ); row.BytesValue = base64ToBuffer(row.BytesValue); @@ -6901,15 +6911,15 @@ describe('Spanner', () => { assert.deepStrictEqual( row.BytesValue, - googleSqlExpectedRow.BytesValue + googleSqlExpectedRow.BytesValue, ); assert.deepStrictEqual( row.BytesArray, - googleSqlExpectedRow.BytesArray + googleSqlExpectedRow.BytesArray, ); done(); - } + }, ); }); @@ -6927,17 +6937,17 @@ describe('Spanner', () => { assert.strictEqual(row.Key, postgreSqlExpectedRow.Key); assert.strictEqual( row.StringValue, - postgreSqlExpectedRow.StringValue + postgreSqlExpectedRow.StringValue, ); row.BytesValue = base64ToBuffer(row.BytesValue); assert.deepStrictEqual( row.BytesValue, - postgreSqlExpectedRow.BytesValue + postgreSqlExpectedRow.BytesValue, ); done(); - } + }, ); }); @@ -6957,11 +6967,11 @@ describe('Spanner', () => { assert.strictEqual(row.Key, googleSqlExpectedRow.Key); assert.strictEqual( row.StringValue, - googleSqlExpectedRow.StringValue + googleSqlExpectedRow.StringValue, ); assert.deepStrictEqual( row.StringArray, - googleSqlExpectedRow.StringArray + googleSqlExpectedRow.StringArray, ); row.BytesValue = base64ToBuffer(row.BytesValue); @@ -6969,11 +6979,11 @@ describe('Spanner', () => { assert.deepStrictEqual( row.BytesValue, - googleSqlExpectedRow.BytesValue + googleSqlExpectedRow.BytesValue, ); assert.deepStrictEqual( row.BytesArray, - googleSqlExpectedRow.BytesArray + googleSqlExpectedRow.BytesArray, ); done(); @@ -6996,13 +7006,13 @@ describe('Spanner', () => { assert.strictEqual(row.Key, postgreSqlExpectedRow.Key); assert.strictEqual( row.StringValue, - postgreSqlExpectedRow.StringValue + postgreSqlExpectedRow.StringValue, ); row.BytesValue = base64ToBuffer(row.BytesValue); assert.deepStrictEqual( row.BytesValue, - postgreSqlExpectedRow.BytesValue + postgreSqlExpectedRow.BytesValue, ); done(); @@ -7038,7 +7048,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(rows![0].toJSON(), ROW); done(); - } + }, ); }); }); @@ -7065,7 +7075,7 @@ describe('Spanner', () => { assert.ifError(err); assert.deepStrictEqual(rows![0].toJSON(), ROW); done(); - } + }, ); }); }; @@ -7095,7 +7105,7 @@ describe('Spanner', () => { Key STRING(MAX) NOT NULL, StringValue STRING(MAX) ) PRIMARY KEY (Key)`, - GAX_OPTIONS + GAX_OPTIONS, ); await onPromiseOperationComplete(googleSqlCreateTable); const googleSqlCreateIndex = await DATABASE.updateSchema(` @@ -7118,7 +7128,7 @@ describe('Spanner', () => { "Key" VARCHAR NOT NULL PRIMARY KEY, "StringValue" VARCHAR )`, - GAX_OPTIONS + GAX_OPTIONS, ); await onPromiseOperationComplete(postgreSqlCreateTable); const postgreSqlCreateIndex = await PG_DATABASE.updateSchema(` @@ -7364,7 +7374,7 @@ describe('Spanner', () => { { index: 'ReadByValue', }, - test.query + test.query, ); if (query.keys) { @@ -7516,12 +7526,12 @@ describe('Spanner', () => { rows!.some( r => JSON.stringify(r.toJSON()) === - JSON.stringify({SingerId: id, Name: name}) - ) + JSON.stringify({SingerId: id, Name: name}), + ), ); done(); }); - } + }, ); }); @@ -7568,9 +7578,9 @@ describe('Spanner', () => { ]); done(); - } + }, ); - } + }, ); }); @@ -7610,7 +7620,7 @@ describe('Spanner', () => { done(); }); - } + }, ); }); @@ -7644,9 +7654,9 @@ describe('Spanner', () => { ]); done(); - } + }, ); - } + }, ); }); }); @@ -7692,12 +7702,12 @@ describe('Spanner', () => { } }; await onPromiseOperationComplete( - await googleSqlTable.create(googleSqlSchema, GAX_OPTIONS) + await googleSqlTable.create(googleSqlSchema, GAX_OPTIONS), ); await insertRecords(googleSqlTable, googleSqlRecords); await onPromiseOperationComplete( - await postgreSqlTable.create(postgreSqlSchema, GAX_OPTIONS) + await postgreSqlTable.create(postgreSqlSchema, GAX_OPTIONS), ); await insertRecords(postgreSqlTable, postgreSqlRecords); }); @@ -7762,7 +7772,7 @@ describe('Spanner', () => { done, PG_DATABASE, postgreSqlTable, - postgreSqlRecords + postgreSqlRecords, ); }); @@ -7852,7 +7862,7 @@ describe('Spanner', () => { done, DATABASE, googleSqlRecords, - `SELECT * FROM ${TABLE_NAME} ORDER BY Key` + `SELECT * FROM ${TABLE_NAME} ORDER BY Key`, ); }); @@ -7861,7 +7871,7 @@ describe('Spanner', () => { done, PG_DATABASE, postgreSqlRecords, - `SELECT * FROM ${TABLE_NAME} ORDER BY "Key"` + `SELECT * FROM ${TABLE_NAME} ORDER BY "Key"`, ); }); @@ -7893,7 +7903,7 @@ describe('Spanner', () => { done, database, table, - records + records, ) => { const options = { strong: true, @@ -7925,7 +7935,7 @@ describe('Spanner', () => { transaction!.end(); done(); }); - } + }, ); }); }); @@ -7936,7 +7946,7 @@ describe('Spanner', () => { done, DATABASE, googleSqlTable, - googleSqlRecords + googleSqlRecords, ); }); @@ -7945,7 +7955,7 @@ describe('Spanner', () => { done, PG_DATABASE, postgreSqlTable, - postgreSqlRecords + postgreSqlRecords, ); }); @@ -7953,7 +7963,7 @@ describe('Spanner', () => { done, database, table, - records + records, ) => { const options = { readTimestamp: records[records.length - 1].commitTimestamp, @@ -7988,7 +7998,7 @@ describe('Spanner', () => { transaction!.end(); done(); }); - } + }, ); }); }); @@ -7999,7 +8009,7 @@ describe('Spanner', () => { done, DATABASE, googleSqlTable, - googleSqlRecords + googleSqlRecords, ); }); @@ -8008,7 +8018,7 @@ describe('Spanner', () => { done, PG_DATABASE, postgreSqlTable, - postgreSqlRecords + postgreSqlRecords, ); }); @@ -8016,7 +8026,7 @@ describe('Spanner', () => { done, database, table, - records + records, ) => { const options = { exactStaleness: Date.now() - records[0].localTimestamp, @@ -8046,7 +8056,7 @@ describe('Spanner', () => { transaction!.end(); done(); }); - } + }, ); }); }); @@ -8057,7 +8067,7 @@ describe('Spanner', () => { done, DATABASE, googleSqlTable, - googleSqlRecords + googleSqlRecords, ); }); @@ -8066,7 +8076,7 @@ describe('Spanner', () => { done, PG_DATABASE, postgreSqlTable, - postgreSqlRecords + postgreSqlRecords, ); }); @@ -8098,7 +8108,7 @@ describe('Spanner', () => { assert.strictEqual(rows.length, googleSqlRecords.length); transaction!.end(); done(); - } + }, ); }); }); @@ -8119,7 +8129,7 @@ describe('Spanner', () => { }, }); await transaction!.commit(); - } + }, ); const gsqlTransaction = DATABASE.runTransactionAsync( @@ -8135,7 +8145,7 @@ describe('Spanner', () => { }, }); await transaction!.commit(); - } + }, ); return Promise.all([psqlTransaction, gsqlTransaction]); @@ -8223,7 +8233,7 @@ describe('Spanner', () => { database, insertQuery, updateQuery, - selectQuery + selectQuery, ) => { database.runTransaction((err, transaction) => { assert.ifError(err); @@ -8308,7 +8318,7 @@ describe('Spanner', () => { PG_DATABASE, insertQuery, updateQuery, - selectQuery + selectQuery, ); }); @@ -8316,7 +8326,7 @@ describe('Spanner', () => { done, database, updateQuery, - selectQuery + selectQuery, ) => { database.runTransaction((err, transaction) => { assert.ifError(err); @@ -8372,7 +8382,7 @@ describe('Spanner', () => { done, database, updateQuery, - selectQuery + selectQuery, ) => { database.runTransaction((err, transaction) => { assert.ifError(err); @@ -8561,7 +8571,7 @@ describe('Spanner', () => { database, insertQuery, updateQuery, - deletequery + deletequery, ) => { database.runTransaction((err, transaction) => { assert.ifError(err); @@ -8597,7 +8607,7 @@ describe('Spanner', () => { DATABASE, googleSqlInsertReturning, googleSqlUpdateReturning, - googleSqlDeleteReturning + googleSqlDeleteReturning, ); }); @@ -8610,7 +8620,7 @@ describe('Spanner', () => { PG_DATABASE, postgreSqlInsertReturning, postgreSqlUpdateReturning, - postgreSqlDeleteReturning + postgreSqlDeleteReturning, ); }); @@ -8631,7 +8641,7 @@ describe('Spanner', () => { database, insertQuery, updateQuery, - deletequery + deletequery, ) => { database.runTransaction((err, transaction) => { assert.ifError(err); @@ -8664,7 +8674,7 @@ describe('Spanner', () => { DATABASE, googleSqlInsertReturning, googleSqlUpdateReturning, - googleSqlDeleteReturning + googleSqlDeleteReturning, ); }); @@ -8678,7 +8688,7 @@ describe('Spanner', () => { PG_DATABASE, postgreSqlInsertReturning, postgreSqlUpdateReturning, - postgreSqlDeleteReturning + postgreSqlDeleteReturning, ); }); @@ -8686,7 +8696,7 @@ describe('Spanner', () => { database.runPartitionedUpdate(query, err => { assert.match( err.details, - /THEN RETURN is not supported in Partitioned DML\./ + /THEN RETURN is not supported in Partitioned DML\./, ); done(); }); @@ -8711,7 +8721,7 @@ describe('Spanner', () => { database, insertquery, updateQuery, - deleteQuery + deleteQuery, ) => { const rowCounts = await database.runTransactionAsync(async txn => { const [rowCounts] = await txn.batchUpdate([ @@ -8733,7 +8743,7 @@ describe('Spanner', () => { DATABASE, googleSqlInsertReturning, googleSqlUpdateReturning, - googleSqlDelete + googleSqlDelete, ); }); @@ -8746,7 +8756,7 @@ describe('Spanner', () => { PG_DATABASE, postgreSqlInsertReturning, postgreSqlUpdateReturning, - postgreSqlDelete + postgreSqlDelete, ); }); }); @@ -8793,7 +8803,7 @@ describe('Spanner', () => { const longRunningPdml = async function ( database, updateQuery, - selectQuery + selectQuery, ) { const count = 10000; @@ -8939,7 +8949,7 @@ describe('Spanner', () => { assert.strictEqual( err.message, - 'batchUpdate requires at least 1 DML statement.' + 'batchUpdate requires at least 1 DML statement.', ); assert.strictEqual(err.code, 3); }; @@ -8966,7 +8976,7 @@ describe('Spanner', () => { await multipleDependingStatements( DATABASE, googleSqlInsert, - googleSqlUpdate + googleSqlUpdate, ); }); @@ -8974,7 +8984,7 @@ describe('Spanner', () => { await multipleDependingStatements( PG_DATABASE, postgreSqlInsert, - posgreSqlUpdate + posgreSqlUpdate, ); }); @@ -9016,7 +9026,7 @@ describe('Spanner', () => { await runBeforeRunUpdate( PG_DATABASE, postgreSqlInsert, - posgreSqlUpdate + posgreSqlUpdate, ); }); @@ -9024,7 +9034,7 @@ describe('Spanner', () => { database, insert, borked, - update + update, ) => { const err = await database.runTransactionAsync(async txn => { let err; @@ -9052,7 +9062,7 @@ describe('Spanner', () => { DATABASE, googleSqlInsert, googleSqlBorked, - googleSqlUpdate + googleSqlUpdate, ); }); @@ -9061,14 +9071,14 @@ describe('Spanner', () => { PG_DATABASE, postgreSqlInsert, postgreSqlBorked, - posgreSqlUpdate + posgreSqlUpdate, ); }); const ignoreAdditionalStatementErrors = async ( database, insert, - borked + borked, ) => { const err = await database.runTransactionAsync(async txn => { let err; @@ -9091,7 +9101,7 @@ describe('Spanner', () => { await ignoreAdditionalStatementErrors( DATABASE, googleSqlInsert, - googleSqlBorked + googleSqlBorked, ); }); @@ -9099,7 +9109,7 @@ describe('Spanner', () => { await ignoreAdditionalStatementErrors( PG_DATABASE, postgreSqlInsert, - postgreSqlBorked + postgreSqlBorked, ); }); }); @@ -9266,7 +9276,7 @@ describe('Spanner', () => { const row = rows[0].toJSON(); callback(null, row.NumberValue); - } + }, ); } }; @@ -9364,7 +9374,7 @@ describe('Spanner', () => { done, database, table, - query + query, ) => { const key = 'k888'; let attempts = 0; @@ -9459,7 +9469,7 @@ describe('Spanner', () => { done, PG_DATABASE, postgreSqlTable, - query + query, ); }); @@ -9552,7 +9562,7 @@ describe('Spanner', () => { done, PG_DATABASE, postgreSqlTable, - query + query, ); }); @@ -9568,7 +9578,7 @@ describe('Spanner', () => { if (attempts++ === 1) { assert.strictEqual(err!.code, 4); assert( - err!.message.startsWith('Deadline for Transaction exceeded.') + err!.message.startsWith('Deadline for Transaction exceeded.'), ); done(); @@ -9659,7 +9669,7 @@ describe('Spanner', () => { assert.strictEqual(err?.details, expectedErrorMessage); transaction!.end(); done(); - } + }, ); }); }); @@ -9685,7 +9695,7 @@ describe('Spanner', () => { err => { assert.ifError(err); transaction!.commit(done); - } + }, ); }); } else { @@ -9693,7 +9703,7 @@ describe('Spanner', () => { } }); - it('should create and execute a query partition', function (done) { + it('should create and execute a query partition using callback', function (done) { if (IS_EMULATOR_ENABLED) { this.skip(); } @@ -9723,7 +9733,7 @@ describe('Spanner', () => { }); }); - it('should create and execute a read partition', function (done) { + it('should create and execute a read partition using callback', function (done) { if (IS_EMULATOR_ENABLED) { this.skip(); } @@ -9754,6 +9764,71 @@ describe('Spanner', () => { }); }); }); + + it('should create and execute a query partition using await', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + + const [transaction] = await DATABASE.createBatchTransaction(); + const selectQuery = { + sql: 'SELECT * FROM TxnTable where Key = @id', + params: { + id: 'k998', + }, + }; + + let row_count = 0; + try { + const [queryPartitions]: CreateQueryPartitionsResponse = + await transaction.createQueryPartitions(selectQuery); + assert.deepStrictEqual(queryPartitions.length, 1); + + const promises = queryPartitions.map(async queryPartition => { + const [results]: RunResponse = + await transaction.execute(queryPartition); + row_count += results.map(row => row.toJSON()).length; + assert.strictEqual(row_count, 1); + }); + + await Promise.all(promises); + } catch (err) { + assert.ifError(err); + } + }); + + it('should create and execute a read partition using await', async function () { + if (IS_EMULATOR_ENABLED) { + this.skip(); + } + const [transaction] = await DATABASE.createBatchTransaction(); + const key = 'k998'; + const QUERY = { + table: googleSqlTable.name, + // Set databoostenabled to true for enabling serveless analytics. + dataBoostEnabled: true, + keys: [key], + columns: ['Key'], + }; + + let read_row_count = 0; + try { + const [readPartitions]: CreateReadPartitionsResponse = + await transaction.createReadPartitions(QUERY); + assert.deepStrictEqual(readPartitions.length, 1); + + const promises = readPartitions.map(async readPartition => { + const [results]: ReadResponse = + await transaction.execute(readPartition); + read_row_count += results.map(row => row.toJSON()).length; + assert.strictEqual(read_row_count, 1); + }); + + await Promise.all(promises); + } catch (err) { + assert.ifError(err); + } + }); }); }); }); @@ -9810,7 +9885,7 @@ async function deleteOldTestInstances() { const toDelete = instances.filter( instance => instance.id.includes(PREFIX) && - isOld(Number(instance.metadata!.labels!.created)) + isOld(Number(instance.metadata!.labels!.created)), ); return deleteInstanceArray(toDelete); @@ -9826,8 +9901,8 @@ function deleteInstanceArray(instanceArray) { const limit = pLimit(5); return Promise.all( instanceArray.map(instance => - limit(() => setTimeout(deleteInstance, delay, instance)) - ) + limit(() => setTimeout(deleteInstance, delay, instance)), + ), ); } async function deleteInstance(instance: Instance) { diff --git a/test/backup.ts b/test/backup.ts index b1cfcd7d4..a331f82be 100644 --- a/test/backup.ts +++ b/test/backup.ts @@ -119,7 +119,7 @@ describe('Backup', () => { it('should localize the formatted instance name', () => { assert.strictEqual( backup.instanceFormattedName_, - INSTANCE.formattedName_ + INSTANCE.formattedName_, ); }); @@ -184,7 +184,7 @@ describe('Backup', () => { databasePath: DATABASE_FORMATTED_NAME, expireTime: BACKUP_EXPIRE_TIME, }, - assert.ifError + assert.ifError, ); }); @@ -204,7 +204,7 @@ describe('Backup', () => { expireTime: BACKUP_EXPIRE_TIME, gaxOptions, }, - assert.ifError + assert.ifError, ); }); @@ -225,7 +225,7 @@ describe('Backup', () => { expireTime: BACKUP_EXPIRE_TIME, encryptionConfig, }, - assert.ifError + assert.ifError, ); }); @@ -243,7 +243,7 @@ describe('Backup', () => { backup.request = config => { assert.deepStrictEqual( config.reqOpts.backup.expireTime, - EXP_BACKUP_EXPIRE_TIME.toStruct() + EXP_BACKUP_EXPIRE_TIME.toStruct(), ); done(); }; @@ -253,7 +253,7 @@ describe('Backup', () => { databasePath: DATABASE_FORMATTED_NAME, expireTime: BACKUP_EXPIRE_TIME, }, - assert.ifError + assert.ifError, ); }); @@ -268,7 +268,7 @@ describe('Backup', () => { databasePath: DATABASE_FORMATTED_NAME, expireTime: BACKUP_EXPIRE_TIME, }, - assert.ifError + assert.ifError, ); }); @@ -276,7 +276,7 @@ describe('Backup', () => { backup.request = config => { assert.deepStrictEqual( config.reqOpts.backup.versionTime, - EXP_BACKUP_VERSION_TIME.toStruct() + EXP_BACKUP_VERSION_TIME.toStruct(), ); done(); }; @@ -287,7 +287,7 @@ describe('Backup', () => { expireTime: BACKUP_EXPIRE_TIME, versionTime: BACKUP_VERSION_TIME, }, - assert.ifError + assert.ifError, ); }); @@ -313,7 +313,7 @@ describe('Backup', () => { assert.strictEqual(op, null); assert.deepStrictEqual(resp, API_RESPONSE); done(); - } + }, ); }); }); @@ -340,7 +340,7 @@ describe('Backup', () => { assert.strictEqual(op, OPERATION); assert.strictEqual(resp, API_RESPONSE); done(); - } + }, ); }); }); @@ -699,7 +699,7 @@ describe('Backup', () => { done(); }; - backup.delete(); + void backup.delete(); }); it('should accept gaxOpts', done => { @@ -733,14 +733,14 @@ describe('Backup', () => { it('should return the name if already formatted', () => { assert.strictEqual( Backup.formatName_(INSTANCE.formattedName_, BACKUP_FORMATTED_NAME), - BACKUP_FORMATTED_NAME + BACKUP_FORMATTED_NAME, ); }); it('should format the name', () => { const formattedName_ = Backup.formatName_( INSTANCE.formattedName_, - BACKUP_NAME + BACKUP_NAME, ); assert.strictEqual(formattedName_, BACKUP_FORMATTED_NAME); }); diff --git a/test/batch-transaction.ts b/test/batch-transaction.ts index 87ad8343c..6f8d13894 100644 --- a/test/batch-transaction.ts +++ b/test/batch-transaction.ts @@ -32,6 +32,8 @@ import { CLOUD_RESOURCE_HEADER, LEADER_AWARE_ROUTING_HEADER, } from '../src/common'; +import {ExecuteSqlRequest} from '../src/transaction'; +import {CallOptions} from 'google-gax'; let promisified = false; const fakePfy = extend({}, pfy, { @@ -159,16 +161,16 @@ describe('BatchTransaction', () => { }, }; - const QUERY = { + const QUERY: ExecuteSqlRequest = { sql: 'SELECT * FROM Singers', - gaxOptions: GAX_OPTS, + gaxOptions: GAX_OPTS as CallOptions, params: {}, types: {}, dataBoostEnabled: true, directedReadOptions: fakeDirectedReadOptionsForRequest, }; - it('should make the correct request', () => { + it('should make the correct request using callback', () => { const fakeParams = { params: {a: 'b'}, paramTypes: {a: 'string'}, @@ -192,11 +194,11 @@ describe('BatchTransaction', () => { assert.strictEqual(gaxOpts, GAX_OPTS); assert.deepStrictEqual( headers, - Object.assign({[LEADER_AWARE_ROUTING_HEADER]: 'true'}) + Object.assign({[LEADER_AWARE_ROUTING_HEADER]: 'true'}), ); }); - it('should accept query as string', () => { + it('should accept query as string in a callback based request to createQueryPartitions', () => { const query = 'SELECT * FROM Singers'; const expectedQuery = Object.assign({}, {sql: query}); @@ -214,6 +216,34 @@ describe('BatchTransaction', () => { assert.deepStrictEqual(reqOpts, expectedQuery); assert.strictEqual(gaxOpts, undefined); }); + + it('should make the correct request using await', async () => { + const fakeParams = { + params: {a: 'b'}, + paramTypes: {a: 'string'}, + dataBoostEnabled: true, + directedReadOptions: fakeDirectedReadOptionsForRequest, + }; + + const expectedQuery = Object.assign({sql: QUERY.sql}, fakeParams); + const stub = sandbox.stub(batchTransaction, 'createPartitions_'); + + (sandbox.stub(FakeTransaction, 'encodeParams') as sinon.SinonStub) + .withArgs(QUERY) + .returns(fakeParams); + + await batchTransaction.createQueryPartitions(QUERY); + + const {client, method, reqOpts, gaxOpts, headers} = stub.lastCall.args[0]; + assert.strictEqual(client, 'SpannerClient'); + assert.strictEqual(method, 'partitionQuery'); + assert.deepStrictEqual(reqOpts, expectedQuery); + assert.strictEqual(gaxOpts, GAX_OPTS); + assert.deepStrictEqual( + headers, + Object.assign({[LEADER_AWARE_ROUTING_HEADER]: 'true'}), + ); + }); }); describe('createPartitions_', () => { @@ -341,7 +371,7 @@ describe('BatchTransaction', () => { directedReadOptions: fakeDirectedReadOptionsForRequest, }; - it('should make the correct request', () => { + it('should make the correct request using callback', () => { const fakeKeySet = {}; const expectedQuery = { table: QUERY.table, @@ -365,7 +395,35 @@ describe('BatchTransaction', () => { assert.strictEqual(gaxOpts, GAX_OPTS); assert.deepStrictEqual( headers, - Object.assign({[LEADER_AWARE_ROUTING_HEADER]: 'true'}) + Object.assign({[LEADER_AWARE_ROUTING_HEADER]: 'true'}), + ); + }); + + it('should make the correct request using await', async () => { + const fakeKeySet = {}; + const expectedQuery = { + table: QUERY.table, + keySet: fakeKeySet, + dataBoostEnabled: true, + directedReadOptions: fakeDirectedReadOptionsForRequest, + }; + + const stub = sandbox.stub(batchTransaction, 'createPartitions_'); + + (sandbox.stub(FakeTransaction, 'encodeKeySet') as sinon.SinonStub) + .withArgs(QUERY) + .returns(fakeKeySet); + + await batchTransaction.createReadPartitions(QUERY); + + const {client, method, reqOpts, gaxOpts, headers} = stub.lastCall.args[0]; + assert.strictEqual(client, 'SpannerClient'); + assert.strictEqual(method, 'partitionRead'); + assert.deepStrictEqual(reqOpts, expectedQuery); + assert.strictEqual(gaxOpts, GAX_OPTS); + assert.deepStrictEqual( + headers, + Object.assign({[LEADER_AWARE_ROUTING_HEADER]: 'true'}), ); }); }); @@ -383,7 +441,7 @@ describe('BatchTransaction', () => { }, }; - it('should make read requests for read partitions', () => { + it('should make read requests for read partitions using callback', () => { const partition = {table: 'abc'}; const stub = sandbox.stub(batchTransaction, 'read'); @@ -432,6 +490,17 @@ describe('BatchTransaction', () => { const query = stub.lastCall.args[0]; assert.strictEqual(query, partition); }); + + it('should make read requests for read partitions using await', async () => { + const partition = {table: 'abc'}; + const stub = sandbox.stub(batchTransaction, 'read'); + + await batchTransaction.execute(partition); + + const [table, options] = stub.lastCall.args; + assert.strictEqual(table, partition.table); + assert.strictEqual(options, partition); + }); }); describe('executeStream', () => { @@ -472,7 +541,7 @@ describe('BatchTransaction', () => { beforeEach(() => { batchTransaction.id = ID; - batchTransaction.session = SESSION as Session; + batchTransaction.session = SESSION as unknown as Session; batchTransaction.readTimestampProto = TIMESTAMP; }); diff --git a/test/codec.ts b/test/codec.ts index 758d8e015..9d364d493 100644 --- a/test/codec.ts +++ b/test/codec.ts @@ -210,7 +210,7 @@ describe('codec', () => { () => { int.valueOf(); }, - new RegExp('Integer ' + value + ' is out of bounds.') + new RegExp('Integer ' + value + ' is out of bounds.'), ); }); }); @@ -239,7 +239,7 @@ describe('codec', () => { () => { oid.valueOf(); }, - new RegExp('PG.OID ' + value + ' is out of bounds.') + new RegExp('PG.OID ' + value + ' is out of bounds.'), ); }); }); @@ -320,14 +320,14 @@ describe('codec', () => { it('should throw an error if months is not an integer', () => { assert.throws( () => new codec.Interval(1.5, 2, BigInt(1000)), - new RegExp('Invalid months: 1.5, months should be an integral value') + new RegExp('Invalid months: 1.5, months should be an integral value'), ); }); it('should throw an error if days is not an integer', () => { assert.throws( () => new codec.Interval(1, 2.5, BigInt(1000)), - new RegExp('Invalid days: 2.5, days should be an integral value') + new RegExp('Invalid days: 2.5, days should be an integral value'), ); }); @@ -335,8 +335,8 @@ describe('codec', () => { assert.throws( () => new codec.Interval(1, 2, null), new RegExp( - 'Invalid nanoseconds: null, nanoseconds should be a valid bigint value' - ) + 'Invalid nanoseconds: null, nanoseconds should be a valid bigint value', + ), ); }); }); @@ -403,7 +403,7 @@ describe('codec', () => { it('should throw an error if input is undefined', () => { assert.throws( () => codec.Interval.fromMilliseconds(undefined), - GoogleError + GoogleError, ); }); @@ -423,7 +423,7 @@ describe('codec', () => { it('should throw an error if input is undefined', () => { assert.throws( () => codec.Interval.fromMicroseconds(undefined), - GoogleError + GoogleError, ); }); @@ -443,7 +443,7 @@ describe('codec', () => { it('should throw an error if input is undefined', () => { assert.throws( () => codec.Interval.fromNanoseconds(undefined), - GoogleError + GoogleError, ); }); @@ -600,7 +600,7 @@ describe('codec', () => { expected: new codec.Interval( 0, 0, - BigInt('-316224000000000000000') + BigInt('-316224000000000000000'), ), }, { @@ -608,7 +608,7 @@ describe('codec', () => { expected: new codec.Interval( 25, 15, - BigInt('316223999999999999999') + BigInt('316223999999999999999'), ), }, { @@ -616,7 +616,7 @@ describe('codec', () => { expected: new codec.Interval( 25, 15, - BigInt('-316223999999999999999') + BigInt('-316223999999999999999'), ), }, ]; @@ -654,7 +654,7 @@ describe('codec', () => { codec.Interval.fromISO8601(str); }, new RegExp('Invalid ISO8601 duration string'), - `Expected exception on parsing ${str}` + `Expected exception on parsing ${str}`, ); }); }); @@ -813,27 +813,27 @@ describe('codec', () => { // Test with different values for each field (including negative) assert.equal( interval1.equals(new codec.Interval(1, 2, BigInt(-4))), - false + false, ); assert.equal( interval1.equals(new codec.Interval(1, -3, BigInt(3))), - false + false, ); assert.equal( interval1.equals(new codec.Interval(-2, 2, BigInt(3))), - false + false, ); assert.equal( interval3.equals(new codec.Interval(-4, -5, BigInt(6))), - false + false, ); assert.equal( interval3.equals(new codec.Interval(-4, 5, BigInt(-6))), - false + false, ); assert.equal( interval3.equals(new codec.Interval(4, -5, BigInt(-6))), - false + false, ); // Test with null and undefined @@ -917,14 +917,14 @@ describe('codec', () => { new GoogleError(`protoMessageParams cannot be used to construct the ProtoMessage. Pass the serialized buffer of the proto message as the value or provide the message object along with the - corresponding messageFunction generated by protobufjs-cli.`) + corresponding messageFunction generated by protobufjs-cli.`), ); }); it('toJSON with messageFunction', () => { assert.deepEqual( new codec.ProtoMessage(protoMessageParams).toJSON(), - music.SingerInfo.toObject(protoMessageParams.value) + music.SingerInfo.toObject(protoMessageParams.value), ); }); @@ -960,7 +960,7 @@ describe('codec', () => { new GoogleError(`protoEnumParams cannot be used for constructing the ProtoEnum. Pass the number as the value or provide the enum string constant as the value along with the corresponding enumObject generated - by protobufjs-cli.`) + by protobufjs-cli.`), ); }); @@ -974,7 +974,7 @@ describe('codec', () => { value: music.Genre.JAZZ, fullName: 'examples.spanner.music.Genre', }).toJSON(), - 1 + 1, ); }); }); @@ -1240,7 +1240,7 @@ describe('codec', () => { code: google.spanner.v1.TypeCode.PROTO, protoTypeFqn: 'examples.spanner.music.SingerInfo', }, - music.SingerInfo + music.SingerInfo, ); assert.deepStrictEqual(decoded, expected); @@ -1461,7 +1461,7 @@ describe('codec', () => { { name: 'fieldName', value: new codec.Int(value.fieldName), - } + }, ); assert(decoded instanceof codec.Struct); @@ -1499,7 +1499,7 @@ describe('codec', () => { { name: 'fieldName', value: new codec.Int(value[1]), - } + }, ); assert(decoded instanceof codec.Struct); @@ -1547,7 +1547,7 @@ describe('codec', () => { assert.strictEqual( encoded, - music.SingerInfo.encode(singerInfo).finish().toString('base64') + music.SingerInfo.encode(singerInfo).finish().toString('base64'), ); }); @@ -1701,7 +1701,7 @@ describe('codec', () => { assert.deepStrictEqual( encoded, - '{"boolKey":true,"numberKey":3.14,"stringKey":"test","objectKey":{"innerKey":"inner-value"}}' + '{"boolKey":true,"numberKey":3.14,"stringKey":"test","objectKey":{"innerKey":"inner-value"}}', ); }); @@ -1711,14 +1711,14 @@ describe('codec', () => { // a JSON string. const nesting = 100; const value = JSON.parse( - '{"k": '.repeat(nesting).concat('"v"').concat('}'.repeat(nesting)) + '{"k": '.repeat(nesting).concat('"v"').concat('}'.repeat(nesting)), ); const encoded = codec.encode(value); assert.deepStrictEqual( encoded, - '{"k":'.repeat(nesting).concat('"v"').concat('}'.repeat(nesting)) + '{"k":'.repeat(nesting).concat('"v"').concat('}'.repeat(nesting)), ); }); @@ -1738,8 +1738,8 @@ describe('codec', () => { assert.deepStrictEqual( decoded, JSON.parse( - '{"k":'.repeat(nesting).concat('"v"').concat('}'.repeat(nesting)) - ) + '{"k":'.repeat(nesting).concat('"v"').concat('}'.repeat(nesting)), + ), ); }); }); @@ -1813,7 +1813,7 @@ describe('codec', () => { codec.getType(new codec.Interval(1, 2, BigInt(3))), { type: 'interval', - } + }, ); }); diff --git a/test/common/service-object.ts b/test/common/service-object.ts index d9c9e52e5..7e5544eb8 100644 --- a/test/common/service-object.ts +++ b/test/common/service-object.ts @@ -231,7 +231,7 @@ describe('GrpcServiceObject', () => { const ret = grpcServiceObject.requestStream.apply( grpcServiceObject, - args + args, ); assert.strictEqual(ret, expectedReturnValue); }); @@ -252,7 +252,7 @@ describe('GrpcServiceObject', () => { const ret = grpcServiceObject.requestWritableStream.apply( grpcServiceObject, - args + args, ); assert.strictEqual(ret, expectedReturnValue); }); diff --git a/test/common/service.ts b/test/common/service.ts index 293f98d0f..1707c9f9c 100644 --- a/test/common/service.ts +++ b/test/common/service.ts @@ -33,7 +33,7 @@ const glob = global as {} as {GCLOUD_SANDBOX_ENV?: boolean | {}}; const gaxProtosDir = path.join( path.dirname(require.resolve('google-gax')), '..', - 'protos' + 'protos', ); let getUserAgentFromPackageJsonOverride: Function | null; @@ -72,7 +72,7 @@ const fakeGrpcProtoLoader = { loadSync(filename: string, options?: grpcProtoLoader.Options) { return (grpcProtoLoadOverride || grpcProtoLoader.loadSync)( filename, - options + options, ); }, }; @@ -201,9 +201,9 @@ describe('GrpcService', () => { grpcVersion: 'grpc-foo/1.2.3', customEndpoint: 'endpoint', }, - CONFIG + CONFIG, ), - OPTIONS + OPTIONS, ); assert.strictEqual(grpcService.grpc, fakeGrpc); assert.strictEqual(grpcService.grpcVersion, 'grpc-foo/1.2.3'); @@ -220,7 +220,7 @@ describe('GrpcService', () => { }; const grpcService = new GrpcService( Object.assign({grpc: fakeGrpc}, CONFIG), - OPTIONS + OPTIONS, ); assert.strictEqual(grpcService.grpc, fakeGrpc); assert.strictEqual(grpcService.grpcVersion, 'grpc/unknown'); @@ -368,7 +368,7 @@ describe('GrpcService', () => { const grpcService = new GrpcService(config, OPTIONS); assert.deepStrictEqual( grpcService.grpcMetadata.getMap(), - fakeGrpcMetadata + fakeGrpcMetadata, ); }); @@ -377,12 +377,12 @@ describe('GrpcService', () => { { 'x-goog-api-client': EXPECTED_API_CLIENT_HEADER, }, - CONFIG.grpcMetadata + CONFIG.grpcMetadata, ); const grpcService = new GrpcService(CONFIG, OPTIONS); assert.deepStrictEqual( grpcService.grpcMetadata.getMap(), - fakeGrpcMetadata + fakeGrpcMetadata, ); }); @@ -417,7 +417,7 @@ describe('GrpcService', () => { it('should localize the service', () => { assert.deepStrictEqual( Object.keys(grpcService.protos), - Object.keys(CONFIG.protoServices) + Object.keys(CONFIG.protoServices), ); }); @@ -442,7 +442,7 @@ describe('GrpcService', () => { for (const serviceName of Object.keys(CONFIG.protoServices)) { assert.strictEqual( grpcService.protos[serviceName], - MOCK_GRPC_API[`google.${SERVICE_PATH}.${serviceName}`] + MOCK_GRPC_API[`google.${SERVICE_PATH}.${serviceName}`], ); } }); @@ -536,7 +536,7 @@ describe('GrpcService', () => { }); assert.strictEqual( GrpcService.objToStruct_(obj, options), - convertedObject + convertedObject, ); }); }); @@ -1133,7 +1133,7 @@ describe('GrpcService', () => { { objectMode: true, }, - REQ_OPTS + REQ_OPTS, ); grpcService.requestStream(PROTO_OPTS, reqOpts); @@ -1144,7 +1144,7 @@ describe('GrpcService', () => { assert.strictEqual(retryRequestOptions.objectMode, true); assert.strictEqual( retryRequestOptions.shouldRetryFn, - GrpcService.shouldRetryRequest_ + GrpcService.shouldRetryRequest_, ); }); @@ -1684,7 +1684,7 @@ describe('GrpcService', () => { assert.strictEqual( grpcService.decorateRequest_(reqOpts), - replacedReqOpts + replacedReqOpts, ); }); }); @@ -1869,8 +1869,8 @@ describe('GrpcService', () => { { 'grpc.primary_user_agent': grpcService.userAgent, }, - GrpcService.GRPC_SERVICE_OPTIONS - ) + GrpcService.GRPC_SERVICE_OPTIONS, + ), ); return fakeService; @@ -2044,7 +2044,7 @@ describe('GrpcService', () => { assert.strictEqual( objectToStructConverter.encodeValue_(buffer).blobValue.toString(), - 'Value' + 'Value', ); }); @@ -2111,7 +2111,7 @@ describe('GrpcService', () => { it('should replace circular reference with [Circular]', () => { assert.deepStrictEqual( objectToStructConverter.encodeValue_(VALUE), - {stringValue: '[Circular]'} + {stringValue: '[Circular]'}, ); }); }); @@ -2132,7 +2132,7 @@ describe('GrpcService', () => { assert.deepStrictEqual( objectToStructConverter.encodeValue_(date, OPTIONS), - {stringValue: String(date)} + {stringValue: String(date)}, ); }); }); diff --git a/test/data/singer.d.ts b/test/data/singer.d.ts index 87d1cfc11..07913d33b 100644 --- a/test/data/singer.d.ts +++ b/test/data/singer.d.ts @@ -73,7 +73,7 @@ export namespace examples { * @returns SingerInfo instance */ public static create( - properties?: examples.spanner.music.ISingerInfo + properties?: examples.spanner.music.ISingerInfo, ): examples.spanner.music.SingerInfo; /** @@ -84,7 +84,7 @@ export namespace examples { */ public static encode( message: examples.spanner.music.ISingerInfo, - writer?: $protobuf.Writer + writer?: $protobuf.Writer, ): $protobuf.Writer; /** @@ -95,7 +95,7 @@ export namespace examples { */ public static encodeDelimited( message: examples.spanner.music.ISingerInfo, - writer?: $protobuf.Writer + writer?: $protobuf.Writer, ): $protobuf.Writer; /** @@ -108,7 +108,7 @@ export namespace examples { */ public static decode( reader: $protobuf.Reader | Uint8Array, - length?: number + length?: number, ): examples.spanner.music.SingerInfo; /** @@ -119,7 +119,7 @@ export namespace examples { * @throws {$protobuf.util.ProtocolError} If required fields are missing */ public static decodeDelimited( - reader: $protobuf.Reader | Uint8Array + reader: $protobuf.Reader | Uint8Array, ): examples.spanner.music.SingerInfo; /** @@ -146,7 +146,7 @@ export namespace examples { */ public static toObject( message: examples.spanner.music.SingerInfo, - options?: $protobuf.IConversionOptions + options?: $protobuf.IConversionOptions, ): {[k: string]: any}; /** diff --git a/test/data/singer.js b/test/data/singer.js index 203791b41..df7792d17 100644 --- a/test/data/singer.js +++ b/test/data/singer.js @@ -346,7 +346,7 @@ $root.examples = (function () { if (object.singerId !== null) if ($util.Long) (message.singerId = $util.Long.fromValue( - object.singerId + object.singerId, )).unsigned = false; else if (typeof object.singerId === 'string') message.singerId = parseInt(object.singerId, 10); @@ -355,7 +355,7 @@ $root.examples = (function () { else if (typeof object.singerId === 'object') message.singerId = new $util.LongBits( object.singerId.low >>> 0, - object.singerId.high >>> 0 + object.singerId.high >>> 0, ).toNumber(); if (object.birthDate !== null) message.birthDate = String(object.birthDate); @@ -413,7 +413,7 @@ $root.examples = (function () { : options.longs === Number ? new $util.LongBits( message.singerId.low >>> 0, - message.singerId.high >>> 0 + message.singerId.high >>> 0, ).toNumber() : message.singerId; if (options.oneofs) object._singerId = 'singerId'; diff --git a/test/database.ts b/test/database.ts index a1d97e55e..2c7f5f9c8 100644 --- a/test/database.ts +++ b/test/database.ts @@ -107,12 +107,12 @@ export class FakeSession { } partitionedDml(): FakeTransaction { return new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.PartitionedDml + {} as google.spanner.v1.TransactionOptions.PartitionedDml, ); } snapshot(): FakeTransaction { return new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadOnly + {} as google.spanner.v1.TransactionOptions.ReadOnly, ); } } @@ -189,7 +189,7 @@ class FakeTransaction extends EventEmitter { setReadWriteTransactionOptions(options: RunTransactionOptions) {} commit( options?: CommitOptions, - callback?: CommitCallback + callback?: CommitCallback, ): void | Promise { if (callback) { callback(null, {commitTimestamp: {seconds: 1, nanos: 0}}); @@ -343,7 +343,7 @@ describe('Database', () => { const database = new Database( INSTANCE, NAME, - FakePool as {} as db.SessionPoolConstructor + FakePool as {} as db.SessionPoolConstructor, ); assert(database.pool_ instanceof FakeSessionPool); }); @@ -409,14 +409,14 @@ describe('Database', () => { it('should return the name if already formatted', () => { assert.strictEqual( Database.formatName_(INSTANCE.formattedName_, DATABASE_FORMATTED_NAME), - DATABASE_FORMATTED_NAME + DATABASE_FORMATTED_NAME, ); }); it('should format the name', () => { const formattedName_ = Database.formatName_( INSTANCE.formattedName_, - NAME + NAME, ); assert.strictEqual(formattedName_, DATABASE_FORMATTED_NAME); }); @@ -443,8 +443,8 @@ describe('Database', () => { [LEADER_AWARE_ROUTING_HEADER]: true, [X_GOOG_SPANNER_REQUEST_ID_HEADER]: craftRequestId(1, 1, 1, 1), }, - database.commonHeaders_ - ) + database.commonHeaders_, + ), ); }); @@ -474,7 +474,7 @@ describe('Database', () => { database.batchCreateSessions( {count: 10, databaseRole: 'child_role'}, - assert.ifError + assert.ifError, ); const {reqOpts} = stub.lastCall.args[0]; @@ -491,7 +491,7 @@ describe('Database', () => { assert.deepStrictEqual( reqOpts.sessionTemplate.creatorRole, - 'parent_role' + 'parent_role', ); }); @@ -708,7 +708,7 @@ describe('Database', () => { mutationGroups: mutationGroups.map(mg => mg.proto()), requestOptions: options?.requestOptions, excludeTxnFromChangeStream: options?.excludeTxnFromChangeStreams, - } + }, ); database.batchWriteAtLeastOnce(mutationGroups, options); @@ -778,7 +778,7 @@ describe('Database', () => { database .batchWriteAtLeastOnce( [FakeMutationGroup1, FakeMutationGroup2], - options + options, ) .on('data', data => { assert.strictEqual(data, 'testData'); @@ -834,7 +834,7 @@ describe('Database', () => { const SESSION = new FakeSession(); const RESPONSE = {commitTimestamp: {seconds: 1, nanos: 0}}; const TRANSACTION = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadWrite + {} as google.spanner.v1.TransactionOptions.ReadWrite, ); let sessionFactory: FakeSessionFactory; @@ -867,7 +867,7 @@ describe('Database', () => { const fakeErr = new Error('err'); (sessionFactory.getSession as sinon.SinonStub).callsFake(callback => - callback(fakeErr, null, null) + callback(fakeErr, null, null), ); database.writeAtLeastOnce(mutations, err => { @@ -883,7 +883,7 @@ describe('Database', () => { assert.ifError(err); assert.deepStrictEqual( response.commitTimestamp, - RESPONSE.commitTimestamp + RESPONSE.commitTimestamp, ); }); done(); @@ -899,9 +899,9 @@ describe('Database', () => { const errorMessage = (err as grpc.ServiceError).message; assert.ok( errorMessage.includes( - "Cannot read properties of null (reading 'proto')" + "Cannot read properties of null (reading 'proto')", ) || - errorMessage.includes("Cannot read property 'proto' of null") + errorMessage.includes("Cannot read property 'proto' of null"), ); done(); @@ -921,10 +921,10 @@ describe('Database', () => { const [response] = await database.writeAtLeastOnce(mutations, {}); assert.deepStrictEqual( response.commitTimestamp, - RESPONSE.commitTimestamp + RESPONSE.commitTimestamp, ); }); - } + }, ); }); }); @@ -1623,7 +1623,7 @@ describe('Database', () => { config.reqOpts, extend({}, CONFIG.reqOpts, { session: SESSION.formattedName_, - }) + }), ); done(); }; @@ -1952,10 +1952,10 @@ describe('Database', () => { fakeSession = new FakeSession(); fakeSession2 = new FakeSession(); fakeSnapshot = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadOnly + {} as google.spanner.v1.TransactionOptions.ReadOnly, ); fakeSnapshot2 = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadOnly + {} as google.spanner.v1.TransactionOptions.ReadOnly, ); fakeStream = through.obj(); fakeStream2 = through.obj(); @@ -2079,7 +2079,7 @@ describe('Database', () => { it('should release the session on transaction end', () => { const releaseStub = sandbox.stub( fakeSessionFactory, - 'release' + 'release', ) as sinon.SinonStub; database.runStream(QUERY); @@ -2116,7 +2116,7 @@ describe('Database', () => { fakeStream2.push(null); }); } - } + }, ); }); }); @@ -2222,8 +2222,8 @@ describe('Database', () => { [LEADER_AWARE_ROUTING_HEADER]: 'true', [X_GOOG_SPANNER_REQUEST_ID_HEADER]: craftRequestId(1, 1, 1, 1), }, - database.commonHeaders_ - ) + database.commonHeaders_, + ), ); done(); @@ -2271,7 +2271,7 @@ describe('Database', () => { database.request = config => { assert.deepStrictEqual( config.reqOpts.session.creatorRole, - databaseRole.databaseRole + databaseRole.databaseRole, ); assert.deepStrictEqual(options, originalOptions); done(); @@ -2288,7 +2288,7 @@ describe('Database', () => { database.request = config => { assert.deepStrictEqual( config.reqOpts.session.creatorRole, - databaseRole.databaseRole + databaseRole.databaseRole, ); assert.deepStrictEqual(options, originalOptions); done(); @@ -2305,7 +2305,7 @@ describe('Database', () => { database.request = config => { assert.deepStrictEqual( config.reqOpts.session.multiplexed, - multiplexed.multiplexed + multiplexed.multiplexed, ); assert.deepStrictEqual(options, originalOptions); done(); @@ -2395,7 +2395,7 @@ describe('Database', () => { fakeSessionFactory = database.sessionFactory_; fakeSession = new FakeSession(); fakeSnapshot = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadOnly + {} as google.spanner.v1.TransactionOptions.ReadOnly, ); beginSnapshotStub = ( @@ -2413,7 +2413,7 @@ describe('Database', () => { ( sandbox.stub( fakeSessionFactory, - 'isMultiplexedEnabled' + 'isMultiplexedEnabled', ) as sinon.SinonStub ).returns(isMuxEnabled ? true : false); }); @@ -2431,7 +2431,7 @@ describe('Database', () => { assert.strictEqual(err, fakeError); done(); }); - } + }, ); it('should pass the timestamp bounds to the snapshot', () => { @@ -2450,7 +2450,7 @@ describe('Database', () => { assert.strictEqual(err.code, 3); assert.strictEqual( err.message, - 'maxStaleness / minReadTimestamp is not supported for multi-use read-only transactions.' + 'maxStaleness / minReadTimestamp is not supported for multi-use read-only transactions.', ); }); }); @@ -2462,7 +2462,7 @@ describe('Database', () => { assert.strictEqual(err.code, 3); assert.strictEqual( err.message, - 'maxStaleness / minReadTimestamp is not supported for multi-use read-only transactions.' + 'maxStaleness / minReadTimestamp is not supported for multi-use read-only transactions.', ); }); }); @@ -2524,7 +2524,7 @@ describe('Database', () => { const fakeSession2 = new FakeSession(); const fakeSnapshot2 = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadOnly + {} as google.spanner.v1.TransactionOptions.ReadOnly, ); ( sandbox.stub(fakeSnapshot2, 'begin') as sinon.SinonStub @@ -2570,7 +2570,7 @@ describe('Database', () => { }); }); } - } + }, ); }); }); @@ -2588,7 +2588,7 @@ describe('Database', () => { fakeSessionFactory = database.sessionFactory_; fakeSession = new FakeSession(); fakeTransaction = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadWrite + {} as google.spanner.v1.TransactionOptions.ReadWrite, ); getSessionStub = ( @@ -2700,7 +2700,7 @@ describe('Database', () => { { database: database.formattedName_, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); delete expectedReqOpts.gaxOptions; @@ -2732,7 +2732,7 @@ describe('Database', () => { pageSize: optionsPageSize, pageToken: optionsPageToken, gaxOptions, - } + }, ); const expectedReqOpts = extend( {}, @@ -2740,7 +2740,7 @@ describe('Database', () => { { database: database.formattedName_, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); delete expectedReqOpts.gaxOptions; @@ -2870,7 +2870,7 @@ describe('Database', () => { { database: database.formattedName_, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); database.requestStream = config => { @@ -2904,7 +2904,7 @@ describe('Database', () => { { database: database.formattedName_, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); database.requestStream = config => { @@ -2997,7 +2997,7 @@ describe('Database', () => { getSessionStub = ( sandbox.stub( fakeSessionFactory, - 'getSessionForPartitionedOps' + 'getSessionForPartitionedOps', ) as sinon.SinonStub ).callsFake(callback => { callback(null, fakeSession); @@ -3113,7 +3113,7 @@ describe('Database', () => { priority: RequestOptions.Priority.PRIORITY_LOW, }, }, - fakeCallback + fakeCallback, ); const [query] = runUpdateStub.lastCall.args; @@ -3133,7 +3133,7 @@ describe('Database', () => { { excludeTxnFromChangeStream: true, }, - fakeCallback + fakeCallback, ); const [query] = runUpdateStub.lastCall.args; @@ -3160,7 +3160,7 @@ describe('Database', () => { priority: RequestOptions.Priority.PRIORITY_LOW, }, }, - fakeCallback + fakeCallback, ); const [query] = runUpdateStub.lastCall.args; @@ -3172,7 +3172,7 @@ describe('Database', () => { }); assert.ok(fakeCallback.calledOnce); }); - } + }, ); }); }); @@ -3180,7 +3180,7 @@ describe('Database', () => { describe('runTransaction', () => { const SESSION = new FakeSession(); const TRANSACTION = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadWrite + {} as google.spanner.v1.TransactionOptions.ReadWrite, ); let pool: FakeSessionPool; @@ -3191,7 +3191,7 @@ describe('Database', () => { (sandbox.stub(pool, 'getSession') as sinon.SinonStub).callsFake( callback => { callback(null, SESSION, TRANSACTION); - } + }, ); }); @@ -3199,7 +3199,7 @@ describe('Database', () => { const fakeErr = new Error('err'); (pool.getSession as sinon.SinonStub).callsFake(callback => - callback(fakeErr) + callback(fakeErr), ); database.runTransaction(err => { @@ -3279,7 +3279,7 @@ describe('Database', () => { describe('runTransactionAsync', () => { const SESSION = new FakeSession(); const TRANSACTION = new FakeTransaction( - {} as google.spanner.v1.TransactionOptions.ReadWrite + {} as google.spanner.v1.TransactionOptions.ReadWrite, ); let pool: FakeSessionPool; @@ -3290,7 +3290,7 @@ describe('Database', () => { (sandbox.stub(pool, 'getSession') as sinon.SinonStub).callsFake( callback => { callback(null, SESSION, TRANSACTION); - } + }, ); }); @@ -3464,7 +3464,7 @@ describe('Database', () => { database.instance.getDatabaseOperations = async options => { assert.strictEqual( options.filter, - `(name:${DATABASE_FORMATTED_NAME}) AND (someOtherAttribute: aValue)` + `(name:${DATABASE_FORMATTED_NAME}) AND (someOtherAttribute: aValue)`, ); return [operations, {}]; }; @@ -3563,7 +3563,7 @@ describe('Database', () => { database.request = config => { assert.deepStrictEqual( config.reqOpts.encryptionConfig, - encryptionConfig + encryptionConfig, ); done(); }; @@ -3593,7 +3593,7 @@ describe('Database', () => { database.request = config => { assert.deepStrictEqual( config.reqOpts.encryptionConfig, - encryptionConfig + encryptionConfig, ); assert.deepStrictEqual(config.gaxOpts, options.gaxOptions); done(); diff --git a/test/gapic_database_admin_v1.ts b/test/gapic_database_admin_v1.ts index b6f36a281..7f42bef4b 100644 --- a/test/gapic_database_admin_v1.ts +++ b/test/gapic_database_admin_v1.ts @@ -30,7 +30,7 @@ import {protobuf, LROperation, operationsProtos} from 'google-gax'; // Dynamically loaded proto JSON is needed to get the type information // to fill in default values for request objects const root = protobuf.Root.fromJSON( - require('../protos/protos.json') + require('../protos/protos.json'), ).resolveAll(); // eslint-disable-next-line @typescript-eslint/no-unused-vars @@ -47,7 +47,7 @@ function generateSampleMessage(instance: T) { instance.constructor as typeof protobuf.Message ).toObject(instance as protobuf.Message, {defaults: true}); return (instance.constructor as typeof protobuf.Message).fromObject( - filledObject + filledObject, ) as T; } @@ -59,7 +59,7 @@ function stubSimpleCall(response?: ResponseType, error?: Error) { function stubSimpleCallWithCallback( response?: ResponseType, - error?: Error + error?: Error, ) { return error ? sinon.stub().callsArgWith(2, error) @@ -69,7 +69,7 @@ function stubSimpleCallWithCallback( function stubLongRunningCall( response?: ResponseType, callError?: Error, - lroError?: Error + lroError?: Error, ) { const innerStub = lroError ? sinon.stub().rejects(lroError) @@ -85,7 +85,7 @@ function stubLongRunningCall( function stubLongRunningCallWithCallback( response?: ResponseType, callError?: Error, - lroError?: Error + lroError?: Error, ) { const innerStub = lroError ? sinon.stub().rejects(lroError) @@ -100,7 +100,7 @@ function stubLongRunningCallWithCallback( function stubPageStreamingCall( responses?: ResponseType[], - error?: Error + error?: Error, ) { const pagingStub = sinon.stub(); if (responses) { @@ -138,7 +138,7 @@ function stubPageStreamingCall( function stubAsyncIterationCall( responses?: ResponseType[], - error?: Error + error?: Error, ) { let counter = 0; const asyncIterable = { @@ -284,11 +284,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); - assert(client.databaseAdminStub); - client.close().then(() => { - done(); + client.initialize().catch(err => { + throw err; }); + assert(client.databaseAdminStub); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has close method for the non-initialized client', done => { @@ -297,9 +304,14 @@ describe('v1.DatabaseAdminClient', () => { projectId: 'bogus', }); assert.strictEqual(client.databaseAdminStub, undefined); - client.close().then(() => { - done(); - }); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has getProjectId method', async () => { @@ -343,18 +355,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetDatabaseRequest() + new protos.google.spanner.admin.database.v1.GetDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetDatabaseRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ); client.innerApiCalls.getDatabase = stubSimpleCall(expectedResponse); const [response] = await client.getDatabase(request); @@ -374,18 +386,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetDatabaseRequest() + new protos.google.spanner.admin.database.v1.GetDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetDatabaseRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ); client.innerApiCalls.getDatabase = stubSimpleCallWithCallback(expectedResponse); @@ -394,14 +406,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.database.v1.IDatabase | null + result?: protos.google.spanner.admin.database.v1.IDatabase | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -421,20 +433,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetDatabaseRequest() + new protos.google.spanner.admin.database.v1.GetDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetDatabaseRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.getDatabase = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.getDatabase(request), expectedError); const actualRequest = ( @@ -452,17 +464,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetDatabaseRequest() + new protos.google.spanner.admin.database.v1.GetDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetDatabaseRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.getDatabase(request), expectedError); }); }); @@ -473,18 +487,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DropDatabaseRequest() + new protos.google.spanner.admin.database.v1.DropDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DropDatabaseRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.dropDatabase = stubSimpleCall(expectedResponse); const [response] = await client.dropDatabase(request); @@ -504,18 +518,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DropDatabaseRequest() + new protos.google.spanner.admin.database.v1.DropDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DropDatabaseRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.dropDatabase = stubSimpleCallWithCallback(expectedResponse); @@ -524,14 +538,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.protobuf.IEmpty | null + result?: protos.google.protobuf.IEmpty | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -551,20 +565,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DropDatabaseRequest() + new protos.google.spanner.admin.database.v1.DropDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DropDatabaseRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.dropDatabase = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.dropDatabase(request), expectedError); const actualRequest = ( @@ -582,17 +596,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DropDatabaseRequest() + new protos.google.spanner.admin.database.v1.DropDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DropDatabaseRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.dropDatabase(request), expectedError); }); }); @@ -603,18 +619,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetDatabaseDdlRequest() + new protos.google.spanner.admin.database.v1.GetDatabaseDdlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetDatabaseDdlRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetDatabaseDdlResponse() + new protos.google.spanner.admin.database.v1.GetDatabaseDdlResponse(), ); client.innerApiCalls.getDatabaseDdl = stubSimpleCall(expectedResponse); const [response] = await client.getDatabaseDdl(request); @@ -634,18 +650,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetDatabaseDdlRequest() + new protos.google.spanner.admin.database.v1.GetDatabaseDdlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetDatabaseDdlRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetDatabaseDdlResponse() + new protos.google.spanner.admin.database.v1.GetDatabaseDdlResponse(), ); client.innerApiCalls.getDatabaseDdl = stubSimpleCallWithCallback(expectedResponse); @@ -654,14 +670,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.database.v1.IGetDatabaseDdlResponse | null + result?: protos.google.spanner.admin.database.v1.IGetDatabaseDdlResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -681,20 +697,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetDatabaseDdlRequest() + new protos.google.spanner.admin.database.v1.GetDatabaseDdlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetDatabaseDdlRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.getDatabaseDdl = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.getDatabaseDdl(request), expectedError); const actualRequest = ( @@ -712,17 +728,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetDatabaseDdlRequest() + new protos.google.spanner.admin.database.v1.GetDatabaseDdlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetDatabaseDdlRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.getDatabaseDdl(request), expectedError); }); }); @@ -733,18 +751,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.SetIamPolicyRequest() + new protos.google.iam.v1.SetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.SetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.Policy() + new protos.google.iam.v1.Policy(), ); client.innerApiCalls.setIamPolicy = stubSimpleCall(expectedResponse); const [response] = await client.setIamPolicy(request); @@ -764,18 +782,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.SetIamPolicyRequest() + new protos.google.iam.v1.SetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.SetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.Policy() + new protos.google.iam.v1.Policy(), ); client.innerApiCalls.setIamPolicy = stubSimpleCallWithCallback(expectedResponse); @@ -784,14 +802,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.iam.v1.IPolicy | null + result?: protos.google.iam.v1.IPolicy | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -811,20 +829,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.SetIamPolicyRequest() + new protos.google.iam.v1.SetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.SetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.setIamPolicy = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.setIamPolicy(request), expectedError); const actualRequest = ( @@ -842,17 +860,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.SetIamPolicyRequest() + new protos.google.iam.v1.SetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.SetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.setIamPolicy(request), expectedError); }); }); @@ -863,18 +883,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.GetIamPolicyRequest() + new protos.google.iam.v1.GetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.GetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.Policy() + new protos.google.iam.v1.Policy(), ); client.innerApiCalls.getIamPolicy = stubSimpleCall(expectedResponse); const [response] = await client.getIamPolicy(request); @@ -894,18 +914,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.GetIamPolicyRequest() + new protos.google.iam.v1.GetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.GetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.Policy() + new protos.google.iam.v1.Policy(), ); client.innerApiCalls.getIamPolicy = stubSimpleCallWithCallback(expectedResponse); @@ -914,14 +934,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.iam.v1.IPolicy | null + result?: protos.google.iam.v1.IPolicy | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -941,20 +961,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.GetIamPolicyRequest() + new protos.google.iam.v1.GetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.GetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.getIamPolicy = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.getIamPolicy(request), expectedError); const actualRequest = ( @@ -972,17 +992,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.GetIamPolicyRequest() + new protos.google.iam.v1.GetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.GetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.getIamPolicy(request), expectedError); }); }); @@ -993,18 +1015,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsRequest() + new protos.google.iam.v1.TestIamPermissionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.TestIamPermissionsRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsResponse() + new protos.google.iam.v1.TestIamPermissionsResponse(), ); client.innerApiCalls.testIamPermissions = stubSimpleCall(expectedResponse); @@ -1025,18 +1047,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsRequest() + new protos.google.iam.v1.TestIamPermissionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.TestIamPermissionsRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsResponse() + new protos.google.iam.v1.TestIamPermissionsResponse(), ); client.innerApiCalls.testIamPermissions = stubSimpleCallWithCallback(expectedResponse); @@ -1045,14 +1067,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.iam.v1.ITestIamPermissionsResponse | null + result?: protos.google.iam.v1.ITestIamPermissionsResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1072,20 +1094,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsRequest() + new protos.google.iam.v1.TestIamPermissionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.TestIamPermissionsRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.testIamPermissions = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.testIamPermissions(request), expectedError); const actualRequest = ( @@ -1103,17 +1125,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsRequest() + new protos.google.iam.v1.TestIamPermissionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.TestIamPermissionsRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.testIamPermissions(request), expectedError); }); }); @@ -1124,18 +1148,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetBackupRequest() + new protos.google.spanner.admin.database.v1.GetBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetBackupRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ); client.innerApiCalls.getBackup = stubSimpleCall(expectedResponse); const [response] = await client.getBackup(request); @@ -1155,18 +1179,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetBackupRequest() + new protos.google.spanner.admin.database.v1.GetBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetBackupRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ); client.innerApiCalls.getBackup = stubSimpleCallWithCallback(expectedResponse); @@ -1175,14 +1199,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.database.v1.IBackup | null + result?: protos.google.spanner.admin.database.v1.IBackup | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1202,13 +1226,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetBackupRequest() + new protos.google.spanner.admin.database.v1.GetBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetBackupRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; @@ -1230,17 +1254,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetBackupRequest() + new protos.google.spanner.admin.database.v1.GetBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetBackupRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.getBackup(request), expectedError); }); }); @@ -1251,19 +1277,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateBackupRequest() + new protos.google.spanner.admin.database.v1.UpdateBackupRequest(), ); request.backup ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateBackupRequest', - ['backup', 'name'] + ['backup', 'name'], ); request.backup.name = defaultValue1; const expectedHeaderRequestParams = `backup.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ); client.innerApiCalls.updateBackup = stubSimpleCall(expectedResponse); const [response] = await client.updateBackup(request); @@ -1283,19 +1309,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateBackupRequest() + new protos.google.spanner.admin.database.v1.UpdateBackupRequest(), ); request.backup ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateBackupRequest', - ['backup', 'name'] + ['backup', 'name'], ); request.backup.name = defaultValue1; const expectedHeaderRequestParams = `backup.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ); client.innerApiCalls.updateBackup = stubSimpleCallWithCallback(expectedResponse); @@ -1304,14 +1330,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.database.v1.IBackup | null + result?: protos.google.spanner.admin.database.v1.IBackup | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1331,21 +1357,21 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateBackupRequest() + new protos.google.spanner.admin.database.v1.UpdateBackupRequest(), ); request.backup ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateBackupRequest', - ['backup', 'name'] + ['backup', 'name'], ); request.backup.name = defaultValue1; const expectedHeaderRequestParams = `backup.name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.updateBackup = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.updateBackup(request), expectedError); const actualRequest = ( @@ -1363,18 +1389,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateBackupRequest() + new protos.google.spanner.admin.database.v1.UpdateBackupRequest(), ); request.backup ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateBackupRequest', - ['backup', 'name'] + ['backup', 'name'], ); request.backup.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.updateBackup(request), expectedError); }); }); @@ -1385,18 +1413,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DeleteBackupRequest() + new protos.google.spanner.admin.database.v1.DeleteBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DeleteBackupRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteBackup = stubSimpleCall(expectedResponse); const [response] = await client.deleteBackup(request); @@ -1416,18 +1444,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DeleteBackupRequest() + new protos.google.spanner.admin.database.v1.DeleteBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DeleteBackupRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteBackup = stubSimpleCallWithCallback(expectedResponse); @@ -1436,14 +1464,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.protobuf.IEmpty | null + result?: protos.google.protobuf.IEmpty | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1463,20 +1491,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DeleteBackupRequest() + new protos.google.spanner.admin.database.v1.DeleteBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DeleteBackupRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.deleteBackup = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.deleteBackup(request), expectedError); const actualRequest = ( @@ -1494,17 +1522,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DeleteBackupRequest() + new protos.google.spanner.admin.database.v1.DeleteBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DeleteBackupRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.deleteBackup(request), expectedError); }); }); @@ -1515,18 +1545,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.AddSplitPointsRequest() + new protos.google.spanner.admin.database.v1.AddSplitPointsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.AddSplitPointsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.AddSplitPointsResponse() + new protos.google.spanner.admin.database.v1.AddSplitPointsResponse(), ); client.innerApiCalls.addSplitPoints = stubSimpleCall(expectedResponse); const [response] = await client.addSplitPoints(request); @@ -1546,18 +1576,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.AddSplitPointsRequest() + new protos.google.spanner.admin.database.v1.AddSplitPointsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.AddSplitPointsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.AddSplitPointsResponse() + new protos.google.spanner.admin.database.v1.AddSplitPointsResponse(), ); client.innerApiCalls.addSplitPoints = stubSimpleCallWithCallback(expectedResponse); @@ -1566,14 +1596,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.database.v1.IAddSplitPointsResponse | null + result?: protos.google.spanner.admin.database.v1.IAddSplitPointsResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1593,20 +1623,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.AddSplitPointsRequest() + new protos.google.spanner.admin.database.v1.AddSplitPointsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.AddSplitPointsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.addSplitPoints = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.addSplitPoints(request), expectedError); const actualRequest = ( @@ -1624,17 +1654,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.AddSplitPointsRequest() + new protos.google.spanner.admin.database.v1.AddSplitPointsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.AddSplitPointsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.addSplitPoints(request), expectedError); }); }); @@ -1645,18 +1677,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.CreateBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateBackupScheduleRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ); client.innerApiCalls.createBackupSchedule = stubSimpleCall(expectedResponse); @@ -1677,18 +1709,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.CreateBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateBackupScheduleRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ); client.innerApiCalls.createBackupSchedule = stubSimpleCallWithCallback(expectedResponse); @@ -1697,14 +1729,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.database.v1.IBackupSchedule | null + result?: protos.google.spanner.admin.database.v1.IBackupSchedule | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1724,20 +1756,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.CreateBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateBackupScheduleRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.createBackupSchedule = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.createBackupSchedule(request), expectedError); const actualRequest = ( @@ -1755,17 +1787,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.CreateBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateBackupScheduleRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.createBackupSchedule(request), expectedError); }); }); @@ -1776,18 +1810,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.GetBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetBackupScheduleRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ); client.innerApiCalls.getBackupSchedule = stubSimpleCall(expectedResponse); const [response] = await client.getBackupSchedule(request); @@ -1807,18 +1841,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.GetBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetBackupScheduleRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ); client.innerApiCalls.getBackupSchedule = stubSimpleCallWithCallback(expectedResponse); @@ -1827,14 +1861,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.database.v1.IBackupSchedule | null + result?: protos.google.spanner.admin.database.v1.IBackupSchedule | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1854,20 +1888,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.GetBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetBackupScheduleRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.getBackupSchedule = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.getBackupSchedule(request), expectedError); const actualRequest = ( @@ -1885,17 +1919,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.GetBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.GetBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.GetBackupScheduleRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.getBackupSchedule(request), expectedError); }); }); @@ -1906,19 +1942,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.UpdateBackupScheduleRequest(), ); request.backupSchedule ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateBackupScheduleRequest', - ['backupSchedule', 'name'] + ['backupSchedule', 'name'], ); request.backupSchedule.name = defaultValue1; const expectedHeaderRequestParams = `backup_schedule.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ); client.innerApiCalls.updateBackupSchedule = stubSimpleCall(expectedResponse); @@ -1939,19 +1975,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.UpdateBackupScheduleRequest(), ); request.backupSchedule ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateBackupScheduleRequest', - ['backupSchedule', 'name'] + ['backupSchedule', 'name'], ); request.backupSchedule.name = defaultValue1; const expectedHeaderRequestParams = `backup_schedule.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ); client.innerApiCalls.updateBackupSchedule = stubSimpleCallWithCallback(expectedResponse); @@ -1960,14 +1996,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.database.v1.IBackupSchedule | null + result?: protos.google.spanner.admin.database.v1.IBackupSchedule | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1987,21 +2023,21 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.UpdateBackupScheduleRequest(), ); request.backupSchedule ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateBackupScheduleRequest', - ['backupSchedule', 'name'] + ['backupSchedule', 'name'], ); request.backupSchedule.name = defaultValue1; const expectedHeaderRequestParams = `backup_schedule.name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.updateBackupSchedule = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.updateBackupSchedule(request), expectedError); const actualRequest = ( @@ -2019,18 +2055,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.UpdateBackupScheduleRequest(), ); request.backupSchedule ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateBackupScheduleRequest', - ['backupSchedule', 'name'] + ['backupSchedule', 'name'], ); request.backupSchedule.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.updateBackupSchedule(request), expectedError); }); }); @@ -2041,18 +2079,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DeleteBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.DeleteBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DeleteBackupScheduleRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteBackupSchedule = stubSimpleCall(expectedResponse); @@ -2073,18 +2111,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DeleteBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.DeleteBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DeleteBackupScheduleRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteBackupSchedule = stubSimpleCallWithCallback(expectedResponse); @@ -2093,14 +2131,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.protobuf.IEmpty | null + result?: protos.google.protobuf.IEmpty | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -2120,20 +2158,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DeleteBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.DeleteBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DeleteBackupScheduleRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.deleteBackupSchedule = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.deleteBackupSchedule(request), expectedError); const actualRequest = ( @@ -2151,17 +2189,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.DeleteBackupScheduleRequest() + new protos.google.spanner.admin.database.v1.DeleteBackupScheduleRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.DeleteBackupScheduleRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.deleteBackupSchedule(request), expectedError); }); }); @@ -2172,18 +2212,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateDatabaseRequest() + new protos.google.spanner.admin.database.v1.CreateDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateDatabaseRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.createDatabase = stubLongRunningCall(expectedResponse); @@ -2205,18 +2245,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateDatabaseRequest() + new protos.google.spanner.admin.database.v1.CreateDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateDatabaseRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.createDatabase = stubLongRunningCallWithCallback(expectedResponse); @@ -2228,14 +2268,14 @@ describe('v1.DatabaseAdminClient', () => { result?: LROperation< protos.google.spanner.admin.database.v1.IDatabase, protos.google.spanner.admin.database.v1.ICreateDatabaseMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -2259,20 +2299,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateDatabaseRequest() + new protos.google.spanner.admin.database.v1.CreateDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateDatabaseRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.createDatabase = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.createDatabase(request), expectedError); const actualRequest = ( @@ -2290,13 +2330,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateDatabaseRequest() + new protos.google.spanner.admin.database.v1.CreateDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateDatabaseRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -2304,7 +2344,7 @@ describe('v1.DatabaseAdminClient', () => { client.innerApiCalls.createDatabase = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.createDatabase(request); await assert.rejects(operation.promise(), expectedError); @@ -2323,9 +2363,9 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -2333,7 +2373,7 @@ describe('v1.DatabaseAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkCreateDatabaseProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -2345,16 +2385,16 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.checkCreateDatabaseProgress(''), - expectedError + expectedError, ); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); }); @@ -2366,19 +2406,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateDatabaseRequest() + new protos.google.spanner.admin.database.v1.UpdateDatabaseRequest(), ); request.database ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateDatabaseRequest', - ['database', 'name'] + ['database', 'name'], ); request.database.name = defaultValue1; const expectedHeaderRequestParams = `database.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.updateDatabase = stubLongRunningCall(expectedResponse); @@ -2400,19 +2440,19 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateDatabaseRequest() + new protos.google.spanner.admin.database.v1.UpdateDatabaseRequest(), ); request.database ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateDatabaseRequest', - ['database', 'name'] + ['database', 'name'], ); request.database.name = defaultValue1; const expectedHeaderRequestParams = `database.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.updateDatabase = stubLongRunningCallWithCallback(expectedResponse); @@ -2424,14 +2464,14 @@ describe('v1.DatabaseAdminClient', () => { result?: LROperation< protos.google.spanner.admin.database.v1.IDatabase, protos.google.spanner.admin.database.v1.IUpdateDatabaseMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -2455,21 +2495,21 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateDatabaseRequest() + new protos.google.spanner.admin.database.v1.UpdateDatabaseRequest(), ); request.database ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateDatabaseRequest', - ['database', 'name'] + ['database', 'name'], ); request.database.name = defaultValue1; const expectedHeaderRequestParams = `database.name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.updateDatabase = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.updateDatabase(request), expectedError); const actualRequest = ( @@ -2487,14 +2527,14 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateDatabaseRequest() + new protos.google.spanner.admin.database.v1.UpdateDatabaseRequest(), ); request.database ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateDatabaseRequest', - ['database', 'name'] + ['database', 'name'], ); request.database.name = defaultValue1; const expectedHeaderRequestParams = `database.name=${defaultValue1 ?? ''}`; @@ -2502,7 +2542,7 @@ describe('v1.DatabaseAdminClient', () => { client.innerApiCalls.updateDatabase = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.updateDatabase(request); await assert.rejects(operation.promise(), expectedError); @@ -2521,9 +2561,9 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -2531,7 +2571,7 @@ describe('v1.DatabaseAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkUpdateDatabaseProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -2543,16 +2583,16 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.checkUpdateDatabaseProgress(''), - expectedError + expectedError, ); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); }); @@ -2564,18 +2604,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest() + new protos.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.updateDatabaseDdl = stubLongRunningCall(expectedResponse); @@ -2597,18 +2637,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest() + new protos.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.updateDatabaseDdl = stubLongRunningCallWithCallback(expectedResponse); @@ -2620,14 +2660,14 @@ describe('v1.DatabaseAdminClient', () => { result?: LROperation< protos.google.protobuf.IEmpty, protos.google.spanner.admin.database.v1.IUpdateDatabaseDdlMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -2651,20 +2691,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest() + new protos.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.updateDatabaseDdl = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.updateDatabaseDdl(request), expectedError); const actualRequest = ( @@ -2682,13 +2722,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest() + new protos.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; @@ -2696,7 +2736,7 @@ describe('v1.DatabaseAdminClient', () => { client.innerApiCalls.updateDatabaseDdl = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.updateDatabaseDdl(request); await assert.rejects(operation.promise(), expectedError); @@ -2715,9 +2755,9 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -2725,7 +2765,7 @@ describe('v1.DatabaseAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkUpdateDatabaseDdlProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -2737,16 +2777,16 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.checkUpdateDatabaseDdlProgress(''), - expectedError + expectedError, ); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); }); @@ -2758,18 +2798,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateBackupRequest() + new protos.google.spanner.admin.database.v1.CreateBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateBackupRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.createBackup = stubLongRunningCall(expectedResponse); const [operation] = await client.createBackup(request); @@ -2790,18 +2830,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateBackupRequest() + new protos.google.spanner.admin.database.v1.CreateBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateBackupRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.createBackup = stubLongRunningCallWithCallback(expectedResponse); @@ -2813,14 +2853,14 @@ describe('v1.DatabaseAdminClient', () => { result?: LROperation< protos.google.spanner.admin.database.v1.IBackup, protos.google.spanner.admin.database.v1.ICreateBackupMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -2844,20 +2884,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateBackupRequest() + new protos.google.spanner.admin.database.v1.CreateBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateBackupRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.createBackup = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.createBackup(request), expectedError); const actualRequest = ( @@ -2875,13 +2915,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CreateBackupRequest() + new protos.google.spanner.admin.database.v1.CreateBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CreateBackupRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -2889,7 +2929,7 @@ describe('v1.DatabaseAdminClient', () => { client.innerApiCalls.createBackup = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.createBackup(request); await assert.rejects(operation.promise(), expectedError); @@ -2908,9 +2948,9 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -2918,7 +2958,7 @@ describe('v1.DatabaseAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkCreateBackupProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -2930,12 +2970,12 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.checkCreateBackupProgress(''), expectedError); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); @@ -2948,18 +2988,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CopyBackupRequest() + new protos.google.spanner.admin.database.v1.CopyBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CopyBackupRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.copyBackup = stubLongRunningCall(expectedResponse); const [operation] = await client.copyBackup(request); @@ -2980,18 +3020,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CopyBackupRequest() + new protos.google.spanner.admin.database.v1.CopyBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CopyBackupRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.copyBackup = stubLongRunningCallWithCallback(expectedResponse); @@ -3003,14 +3043,14 @@ describe('v1.DatabaseAdminClient', () => { result?: LROperation< protos.google.spanner.admin.database.v1.IBackup, protos.google.spanner.admin.database.v1.ICopyBackupMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -3034,20 +3074,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CopyBackupRequest() + new protos.google.spanner.admin.database.v1.CopyBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CopyBackupRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.copyBackup = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.copyBackup(request), expectedError); const actualRequest = ( @@ -3065,13 +3105,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.CopyBackupRequest() + new protos.google.spanner.admin.database.v1.CopyBackupRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.CopyBackupRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3079,7 +3119,7 @@ describe('v1.DatabaseAdminClient', () => { client.innerApiCalls.copyBackup = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.copyBackup(request); await assert.rejects(operation.promise(), expectedError); @@ -3098,9 +3138,9 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -3108,7 +3148,7 @@ describe('v1.DatabaseAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkCopyBackupProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -3120,12 +3160,12 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.checkCopyBackupProgress(''), expectedError); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); @@ -3138,18 +3178,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.RestoreDatabaseRequest() + new protos.google.spanner.admin.database.v1.RestoreDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.RestoreDatabaseRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.restoreDatabase = stubLongRunningCall(expectedResponse); @@ -3171,18 +3211,18 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.RestoreDatabaseRequest() + new protos.google.spanner.admin.database.v1.RestoreDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.RestoreDatabaseRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.restoreDatabase = stubLongRunningCallWithCallback(expectedResponse); @@ -3194,14 +3234,14 @@ describe('v1.DatabaseAdminClient', () => { result?: LROperation< protos.google.spanner.admin.database.v1.IDatabase, protos.google.spanner.admin.database.v1.IRestoreDatabaseMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -3225,20 +3265,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.RestoreDatabaseRequest() + new protos.google.spanner.admin.database.v1.RestoreDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.RestoreDatabaseRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.restoreDatabase = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.restoreDatabase(request), expectedError); const actualRequest = ( @@ -3256,13 +3296,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.RestoreDatabaseRequest() + new protos.google.spanner.admin.database.v1.RestoreDatabaseRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.RestoreDatabaseRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3270,7 +3310,7 @@ describe('v1.DatabaseAdminClient', () => { client.innerApiCalls.restoreDatabase = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.restoreDatabase(request); await assert.rejects(operation.promise(), expectedError); @@ -3289,9 +3329,9 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -3299,7 +3339,7 @@ describe('v1.DatabaseAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkRestoreDatabaseProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -3311,16 +3351,16 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.checkRestoreDatabaseProgress(''), - expectedError + expectedError, ); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); }); @@ -3332,25 +3372,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabasesRequest() + new protos.google.spanner.admin.database.v1.ListDatabasesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabasesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), ]; client.innerApiCalls.listDatabases = stubSimpleCall(expectedResponse); @@ -3371,25 +3411,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabasesRequest() + new protos.google.spanner.admin.database.v1.ListDatabasesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabasesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), ]; client.innerApiCalls.listDatabases = @@ -3399,14 +3439,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.database.v1.IDatabase[] | null + result?: protos.google.spanner.admin.database.v1.IDatabase[] | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -3426,20 +3466,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabasesRequest() + new protos.google.spanner.admin.database.v1.ListDatabasesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabasesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listDatabases = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.listDatabases(request), expectedError); const actualRequest = ( @@ -3457,25 +3497,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabasesRequest() + new protos.google.spanner.admin.database.v1.ListDatabasesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabasesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), ]; client.descriptors.page.listDatabases.createStream = @@ -3488,7 +3528,7 @@ describe('v1.DatabaseAdminClient', () => { 'data', (response: protos.google.spanner.admin.database.v1.Database) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -3502,14 +3542,14 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.descriptors.page.listDatabases.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listDatabases, request) + .calledWith(client.innerApiCalls.listDatabases, request), ); assert( (client.descriptors.page.listDatabases.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3518,13 +3558,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabasesRequest() + new protos.google.spanner.admin.database.v1.ListDatabasesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabasesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3539,7 +3579,7 @@ describe('v1.DatabaseAdminClient', () => { 'data', (response: protos.google.spanner.admin.database.v1.Database) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -3552,14 +3592,14 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.descriptors.page.listDatabases.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listDatabases, request) + .calledWith(client.innerApiCalls.listDatabases, request), ); assert( (client.descriptors.page.listDatabases.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3568,25 +3608,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabasesRequest() + new protos.google.spanner.admin.database.v1.ListDatabasesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabasesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Database() + new protos.google.spanner.admin.database.v1.Database(), ), ]; client.descriptors.page.listDatabases.asyncIterate = @@ -3601,14 +3641,14 @@ describe('v1.DatabaseAdminClient', () => { ( client.descriptors.page.listDatabases.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listDatabases.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3617,13 +3657,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabasesRequest() + new protos.google.spanner.admin.database.v1.ListDatabasesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabasesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3642,14 +3682,14 @@ describe('v1.DatabaseAdminClient', () => { ( client.descriptors.page.listDatabases.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listDatabases.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); }); @@ -3660,25 +3700,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupsRequest() + new protos.google.spanner.admin.database.v1.ListBackupsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), ]; client.innerApiCalls.listBackups = stubSimpleCall(expectedResponse); @@ -3699,25 +3739,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupsRequest() + new protos.google.spanner.admin.database.v1.ListBackupsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), ]; client.innerApiCalls.listBackups = @@ -3727,14 +3767,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.database.v1.IBackup[] | null + result?: protos.google.spanner.admin.database.v1.IBackup[] | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -3754,20 +3794,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupsRequest() + new protos.google.spanner.admin.database.v1.ListBackupsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listBackups = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.listBackups(request), expectedError); const actualRequest = ( @@ -3785,25 +3825,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupsRequest() + new protos.google.spanner.admin.database.v1.ListBackupsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), ]; client.descriptors.page.listBackups.createStream = @@ -3815,7 +3855,7 @@ describe('v1.DatabaseAdminClient', () => { 'data', (response: protos.google.spanner.admin.database.v1.Backup) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -3829,14 +3869,14 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.descriptors.page.listBackups.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listBackups, request) + .calledWith(client.innerApiCalls.listBackups, request), ); assert( (client.descriptors.page.listBackups.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3845,20 +3885,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupsRequest() + new protos.google.spanner.admin.database.v1.ListBackupsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.descriptors.page.listBackups.createStream = stubPageStreamingCall( undefined, - expectedError + expectedError, ); const stream = client.listBackupsStream(request); const promise = new Promise((resolve, reject) => { @@ -3867,7 +3907,7 @@ describe('v1.DatabaseAdminClient', () => { 'data', (response: protos.google.spanner.admin.database.v1.Backup) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -3880,14 +3920,14 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.descriptors.page.listBackups.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listBackups, request) + .calledWith(client.innerApiCalls.listBackups, request), ); assert( (client.descriptors.page.listBackups.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3896,25 +3936,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupsRequest() + new protos.google.spanner.admin.database.v1.ListBackupsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.Backup() + new protos.google.spanner.admin.database.v1.Backup(), ), ]; client.descriptors.page.listBackups.asyncIterate = @@ -3927,16 +3967,16 @@ describe('v1.DatabaseAdminClient', () => { assert.deepStrictEqual(responses, expectedResponse); assert.deepStrictEqual( (client.descriptors.page.listBackups.asyncIterate as SinonStub).getCall( - 0 + 0, ).args[1], - request + request, ); assert( (client.descriptors.page.listBackups.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3945,20 +3985,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupsRequest() + new protos.google.spanner.admin.database.v1.ListBackupsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.descriptors.page.listBackups.asyncIterate = stubAsyncIterationCall( undefined, - expectedError + expectedError, ); const iterable = client.listBackupsAsync(request); await assert.rejects(async () => { @@ -3969,16 +4009,16 @@ describe('v1.DatabaseAdminClient', () => { }); assert.deepStrictEqual( (client.descriptors.page.listBackups.asyncIterate as SinonStub).getCall( - 0 + 0, ).args[1], - request + request, ); assert( (client.descriptors.page.listBackups.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); }); @@ -3989,13 +4029,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4023,13 +4063,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4045,14 +4085,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.longrunning.IOperation[] | null + result?: protos.google.longrunning.IOperation[] | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -4072,24 +4112,24 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listDatabaseOperations = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.listDatabaseOperations(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.listDatabaseOperations as SinonStub @@ -4106,13 +4146,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4144,7 +4184,7 @@ describe('v1.DatabaseAdminClient', () => { .createStream as SinonStub ) .getCall(0) - .calledWith(client.innerApiCalls.listDatabaseOperations, request) + .calledWith(client.innerApiCalls.listDatabaseOperations, request), ); assert( ( @@ -4153,8 +4193,8 @@ describe('v1.DatabaseAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -4163,13 +4203,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4196,7 +4236,7 @@ describe('v1.DatabaseAdminClient', () => { .createStream as SinonStub ) .getCall(0) - .calledWith(client.innerApiCalls.listDatabaseOperations, request) + .calledWith(client.innerApiCalls.listDatabaseOperations, request), ); assert( ( @@ -4205,8 +4245,8 @@ describe('v1.DatabaseAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -4215,13 +4255,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4243,7 +4283,7 @@ describe('v1.DatabaseAdminClient', () => { client.descriptors.page.listDatabaseOperations .asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( ( @@ -4252,8 +4292,8 @@ describe('v1.DatabaseAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -4262,13 +4302,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4287,7 +4327,7 @@ describe('v1.DatabaseAdminClient', () => { client.descriptors.page.listDatabaseOperations .asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( ( @@ -4296,8 +4336,8 @@ describe('v1.DatabaseAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); }); @@ -4308,13 +4348,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest() + new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4342,13 +4382,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest() + new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4364,14 +4404,14 @@ describe('v1.DatabaseAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.longrunning.IOperation[] | null + result?: protos.google.longrunning.IOperation[] | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -4391,20 +4431,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest() + new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listBackupOperations = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.listBackupOperations(request), expectedError); const actualRequest = ( @@ -4422,13 +4462,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest() + new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4457,14 +4497,14 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.descriptors.page.listBackupOperations.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listBackupOperations, request) + .calledWith(client.innerApiCalls.listBackupOperations, request), ); assert( (client.descriptors.page.listBackupOperations.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -4473,13 +4513,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest() + new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4503,14 +4543,14 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.descriptors.page.listBackupOperations.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listBackupOperations, request) + .calledWith(client.innerApiCalls.listBackupOperations, request), ); assert( (client.descriptors.page.listBackupOperations.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -4519,13 +4559,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest() + new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4546,14 +4586,14 @@ describe('v1.DatabaseAdminClient', () => { ( client.descriptors.page.listBackupOperations.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listBackupOperations.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -4562,13 +4602,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest() + new protos.google.spanner.admin.database.v1.ListBackupOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4586,14 +4626,14 @@ describe('v1.DatabaseAdminClient', () => { ( client.descriptors.page.listBackupOperations.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listBackupOperations.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); }); @@ -4604,25 +4644,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseRolesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), ]; client.innerApiCalls.listDatabaseRoles = stubSimpleCall(expectedResponse); @@ -4643,25 +4683,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseRolesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), ]; client.innerApiCalls.listDatabaseRoles = @@ -4673,14 +4713,14 @@ describe('v1.DatabaseAdminClient', () => { err?: Error | null, result?: | protos.google.spanner.admin.database.v1.IDatabaseRole[] - | null + | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -4700,20 +4740,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseRolesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listDatabaseRoles = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.listDatabaseRoles(request), expectedError); const actualRequest = ( @@ -4731,25 +4771,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseRolesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), ]; client.descriptors.page.listDatabaseRoles.createStream = @@ -4762,7 +4802,7 @@ describe('v1.DatabaseAdminClient', () => { 'data', (response: protos.google.spanner.admin.database.v1.DatabaseRole) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -4776,14 +4816,14 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.descriptors.page.listDatabaseRoles.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listDatabaseRoles, request) + .calledWith(client.innerApiCalls.listDatabaseRoles, request), ); assert( (client.descriptors.page.listDatabaseRoles.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -4792,13 +4832,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseRolesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4813,7 +4853,7 @@ describe('v1.DatabaseAdminClient', () => { 'data', (response: protos.google.spanner.admin.database.v1.DatabaseRole) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -4826,14 +4866,14 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.descriptors.page.listDatabaseRoles.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listDatabaseRoles, request) + .calledWith(client.innerApiCalls.listDatabaseRoles, request), ); assert( (client.descriptors.page.listDatabaseRoles.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -4842,25 +4882,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseRolesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.DatabaseRole() + new protos.google.spanner.admin.database.v1.DatabaseRole(), ), ]; client.descriptors.page.listDatabaseRoles.asyncIterate = @@ -4876,14 +4916,14 @@ describe('v1.DatabaseAdminClient', () => { ( client.descriptors.page.listDatabaseRoles.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listDatabaseRoles.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -4892,13 +4932,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest() + new protos.google.spanner.admin.database.v1.ListDatabaseRolesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListDatabaseRolesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4917,14 +4957,14 @@ describe('v1.DatabaseAdminClient', () => { ( client.descriptors.page.listDatabaseRoles.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listDatabaseRoles.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); }); @@ -4935,25 +4975,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest() + new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupSchedulesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), ]; client.innerApiCalls.listBackupSchedules = @@ -4975,25 +5015,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest() + new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupSchedulesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), ]; client.innerApiCalls.listBackupSchedules = @@ -5005,14 +5045,14 @@ describe('v1.DatabaseAdminClient', () => { err?: Error | null, result?: | protos.google.spanner.admin.database.v1.IBackupSchedule[] - | null + | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -5032,20 +5072,20 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest() + new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupSchedulesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listBackupSchedules = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.listBackupSchedules(request), expectedError); const actualRequest = ( @@ -5063,25 +5103,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest() + new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupSchedulesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), ]; client.descriptors.page.listBackupSchedules.createStream = @@ -5093,10 +5133,10 @@ describe('v1.DatabaseAdminClient', () => { stream.on( 'data', ( - response: protos.google.spanner.admin.database.v1.BackupSchedule + response: protos.google.spanner.admin.database.v1.BackupSchedule, ) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -5110,14 +5150,14 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.descriptors.page.listBackupSchedules.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listBackupSchedules, request) + .calledWith(client.innerApiCalls.listBackupSchedules, request), ); assert( (client.descriptors.page.listBackupSchedules.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -5126,13 +5166,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest() + new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupSchedulesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -5146,10 +5186,10 @@ describe('v1.DatabaseAdminClient', () => { stream.on( 'data', ( - response: protos.google.spanner.admin.database.v1.BackupSchedule + response: protos.google.spanner.admin.database.v1.BackupSchedule, ) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -5162,14 +5202,14 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.descriptors.page.listBackupSchedules.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listBackupSchedules, request) + .calledWith(client.innerApiCalls.listBackupSchedules, request), ); assert( (client.descriptors.page.listBackupSchedules.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -5178,25 +5218,25 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest() + new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupSchedulesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), generateSampleMessage( - new protos.google.spanner.admin.database.v1.BackupSchedule() + new protos.google.spanner.admin.database.v1.BackupSchedule(), ), ]; client.descriptors.page.listBackupSchedules.asyncIterate = @@ -5212,14 +5252,14 @@ describe('v1.DatabaseAdminClient', () => { ( client.descriptors.page.listBackupSchedules.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listBackupSchedules.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -5228,13 +5268,13 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest() + new protos.google.spanner.admin.database.v1.ListBackupSchedulesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.database.v1.ListBackupSchedulesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -5253,14 +5293,14 @@ describe('v1.DatabaseAdminClient', () => { ( client.descriptors.page.listBackupSchedules.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listBackupSchedules.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); }); @@ -5270,12 +5310,12 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new operationsProtos.google.longrunning.GetOperationRequest() + new operationsProtos.google.longrunning.GetOperationRequest(), ); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const response = await client.getOperation(request); @@ -5283,7 +5323,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.operationsClient.getOperation as SinonStub) .getCall(0) - .calledWith(request) + .calledWith(request), ); }); it('invokes getOperation without error using callback', async () => { @@ -5292,29 +5332,33 @@ describe('v1.DatabaseAdminClient', () => { projectId: 'bogus', }); const request = generateSampleMessage( - new operationsProtos.google.longrunning.GetOperationRequest() + new operationsProtos.google.longrunning.GetOperationRequest(), ); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); client.operationsClient.getOperation = sinon .stub() .callsArgWith(2, null, expectedResponse); const promise = new Promise((resolve, reject) => { - client.operationsClient.getOperation( - request, - undefined, - ( - err?: Error | null, - result?: operationsProtos.google.longrunning.Operation | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); + client.operationsClient + .getOperation( + request, + undefined, + ( + err?: Error | null, + result?: operationsProtos.google.longrunning.Operation | null, + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }, + ) + .catch(err => { + throw err; + }); }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); @@ -5326,12 +5370,12 @@ describe('v1.DatabaseAdminClient', () => { projectId: 'bogus', }); const request = generateSampleMessage( - new operationsProtos.google.longrunning.GetOperationRequest() + new operationsProtos.google.longrunning.GetOperationRequest(), ); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(async () => { await client.getOperation(request); @@ -5339,7 +5383,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.operationsClient.getOperation as SinonStub) .getCall(0) - .calledWith(request) + .calledWith(request), ); }); }); @@ -5349,12 +5393,12 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new operationsProtos.google.longrunning.CancelOperationRequest() + new operationsProtos.google.longrunning.CancelOperationRequest(), ); const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.operationsClient.cancelOperation = stubSimpleCall(expectedResponse); @@ -5363,7 +5407,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.operationsClient.cancelOperation as SinonStub) .getCall(0) - .calledWith(request) + .calledWith(request), ); }); it('invokes cancelOperation without error using callback', async () => { @@ -5372,29 +5416,33 @@ describe('v1.DatabaseAdminClient', () => { projectId: 'bogus', }); const request = generateSampleMessage( - new operationsProtos.google.longrunning.CancelOperationRequest() + new operationsProtos.google.longrunning.CancelOperationRequest(), ); const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.operationsClient.cancelOperation = sinon .stub() .callsArgWith(2, null, expectedResponse); const promise = new Promise((resolve, reject) => { - client.operationsClient.cancelOperation( - request, - undefined, - ( - err?: Error | null, - result?: protos.google.protobuf.Empty | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); + client.operationsClient + .cancelOperation( + request, + undefined, + ( + err?: Error | null, + result?: protos.google.protobuf.Empty | null, + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }, + ) + .catch(err => { + throw err; + }); }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); @@ -5406,12 +5454,12 @@ describe('v1.DatabaseAdminClient', () => { projectId: 'bogus', }); const request = generateSampleMessage( - new operationsProtos.google.longrunning.CancelOperationRequest() + new operationsProtos.google.longrunning.CancelOperationRequest(), ); const expectedError = new Error('expected'); client.operationsClient.cancelOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(async () => { await client.cancelOperation(request); @@ -5419,7 +5467,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.operationsClient.cancelOperation as SinonStub) .getCall(0) - .calledWith(request) + .calledWith(request), ); }); }); @@ -5429,12 +5477,12 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new operationsProtos.google.longrunning.DeleteOperationRequest() + new operationsProtos.google.longrunning.DeleteOperationRequest(), ); const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.operationsClient.deleteOperation = stubSimpleCall(expectedResponse); @@ -5443,7 +5491,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.operationsClient.deleteOperation as SinonStub) .getCall(0) - .calledWith(request) + .calledWith(request), ); }); it('invokes deleteOperation without error using callback', async () => { @@ -5452,29 +5500,33 @@ describe('v1.DatabaseAdminClient', () => { projectId: 'bogus', }); const request = generateSampleMessage( - new operationsProtos.google.longrunning.DeleteOperationRequest() + new operationsProtos.google.longrunning.DeleteOperationRequest(), ); const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.operationsClient.deleteOperation = sinon .stub() .callsArgWith(2, null, expectedResponse); const promise = new Promise((resolve, reject) => { - client.operationsClient.deleteOperation( - request, - undefined, - ( - err?: Error | null, - result?: protos.google.protobuf.Empty | null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - } - ); + client.operationsClient + .deleteOperation( + request, + undefined, + ( + err?: Error | null, + result?: protos.google.protobuf.Empty | null, + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }, + ) + .catch(err => { + throw err; + }); }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); @@ -5486,12 +5538,12 @@ describe('v1.DatabaseAdminClient', () => { projectId: 'bogus', }); const request = generateSampleMessage( - new operationsProtos.google.longrunning.DeleteOperationRequest() + new operationsProtos.google.longrunning.DeleteOperationRequest(), ); const expectedError = new Error('expected'); client.operationsClient.deleteOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(async () => { await client.deleteOperation(request); @@ -5499,7 +5551,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.operationsClient.deleteOperation as SinonStub) .getCall(0) - .calledWith(request) + .calledWith(request), ); }); }); @@ -5510,23 +5562,22 @@ describe('v1.DatabaseAdminClient', () => { projectId: 'bogus', }); const request = generateSampleMessage( - new operationsProtos.google.longrunning.ListOperationsRequest() + new operationsProtos.google.longrunning.ListOperationsRequest(), ); const expectedResponse = [ generateSampleMessage( - new operationsProtos.google.longrunning.ListOperationsResponse() + new operationsProtos.google.longrunning.ListOperationsResponse(), ), generateSampleMessage( - new operationsProtos.google.longrunning.ListOperationsResponse() + new operationsProtos.google.longrunning.ListOperationsResponse(), ), generateSampleMessage( - new operationsProtos.google.longrunning.ListOperationsResponse() + new operationsProtos.google.longrunning.ListOperationsResponse(), ), ]; client.operationsClient.descriptor.listOperations.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: operationsProtos.google.longrunning.ListOperationsResponse[] = - []; + const responses: operationsProtos.google.longrunning.IOperation[] = []; const iterable = client.operationsClient.listOperationsAsync(request); for await (const resource of iterable) { responses.push(resource!); @@ -5537,7 +5588,7 @@ describe('v1.DatabaseAdminClient', () => { client.operationsClient.descriptor.listOperations .asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); }); it('uses async iteration with listOperations with error', async () => { @@ -5545,17 +5596,16 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new operationsProtos.google.longrunning.ListOperationsRequest() + new operationsProtos.google.longrunning.ListOperationsRequest(), ); const expectedError = new Error('expected'); client.operationsClient.descriptor.listOperations.asyncIterate = stubAsyncIterationCall(undefined, expectedError); const iterable = client.operationsClient.listOperationsAsync(request); await assert.rejects(async () => { - const responses: operationsProtos.google.longrunning.ListOperationsResponse[] = - []; + const responses: operationsProtos.google.longrunning.IOperation[] = []; for await (const resource of iterable) { responses.push(resource!); } @@ -5565,13 +5615,13 @@ describe('v1.DatabaseAdminClient', () => { client.operationsClient.descriptor.listOperations .asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); }); }); describe('Path templates', () => { - describe('backup', () => { + describe('backup', async () => { const fakePath = '/rendered/path/backup'; const expectedParameters = { project: 'projectValue', @@ -5582,7 +5632,7 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.backupPathTemplate.render = sinon .stub() .returns(fakePath); @@ -5594,13 +5644,13 @@ describe('v1.DatabaseAdminClient', () => { const result = client.backupPath( 'projectValue', 'instanceValue', - 'backupValue' + 'backupValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.backupPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -5610,7 +5660,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.backupPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5620,7 +5670,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.backupPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5630,12 +5680,12 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.backupPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('backupSchedule', () => { + describe('backupSchedule', async () => { const fakePath = '/rendered/path/backupSchedule'; const expectedParameters = { project: 'projectValue', @@ -5647,7 +5697,7 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.backupSchedulePathTemplate.render = sinon .stub() .returns(fakePath); @@ -5660,13 +5710,13 @@ describe('v1.DatabaseAdminClient', () => { 'projectValue', 'instanceValue', 'databaseValue', - 'scheduleValue' + 'scheduleValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.backupSchedulePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -5676,7 +5726,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.backupSchedulePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5686,7 +5736,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.backupSchedulePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5696,7 +5746,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.backupSchedulePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5706,12 +5756,12 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.backupSchedulePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('cryptoKey', () => { + describe('cryptoKey', async () => { const fakePath = '/rendered/path/cryptoKey'; const expectedParameters = { project: 'projectValue', @@ -5723,7 +5773,7 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.cryptoKeyPathTemplate.render = sinon .stub() .returns(fakePath); @@ -5736,13 +5786,13 @@ describe('v1.DatabaseAdminClient', () => { 'projectValue', 'locationValue', 'keyRingValue', - 'cryptoKeyValue' + 'cryptoKeyValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.cryptoKeyPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -5752,7 +5802,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.cryptoKeyPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5762,7 +5812,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.cryptoKeyPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5772,7 +5822,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.cryptoKeyPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5782,12 +5832,12 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.cryptoKeyPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('database', () => { + describe('database', async () => { const fakePath = '/rendered/path/database'; const expectedParameters = { project: 'projectValue', @@ -5798,7 +5848,7 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.databasePathTemplate.render = sinon .stub() .returns(fakePath); @@ -5810,13 +5860,13 @@ describe('v1.DatabaseAdminClient', () => { const result = client.databasePath( 'projectValue', 'instanceValue', - 'databaseValue' + 'databaseValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.databasePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -5826,7 +5876,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.databasePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5836,7 +5886,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.databasePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5846,12 +5896,12 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.databasePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('databaseRole', () => { + describe('databaseRole', async () => { const fakePath = '/rendered/path/databaseRole'; const expectedParameters = { project: 'projectValue', @@ -5863,7 +5913,7 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.databaseRolePathTemplate.render = sinon .stub() .returns(fakePath); @@ -5876,13 +5926,13 @@ describe('v1.DatabaseAdminClient', () => { 'projectValue', 'instanceValue', 'databaseValue', - 'roleValue' + 'roleValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.databaseRolePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -5892,7 +5942,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.databaseRolePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5902,7 +5952,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.databaseRolePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5912,7 +5962,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.databaseRolePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5922,12 +5972,12 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.databaseRolePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('instance', () => { + describe('instance', async () => { const fakePath = '/rendered/path/instance'; const expectedParameters = { project: 'projectValue', @@ -5937,7 +5987,7 @@ describe('v1.DatabaseAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.instancePathTemplate.render = sinon .stub() .returns(fakePath); @@ -5951,7 +6001,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.instancePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -5961,7 +6011,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.instancePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -5971,7 +6021,7 @@ describe('v1.DatabaseAdminClient', () => { assert( (client.pathTemplates.instancePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); diff --git a/test/gapic_instance_admin_v1.ts b/test/gapic_instance_admin_v1.ts index 4c4aa245b..627f8ece6 100644 --- a/test/gapic_instance_admin_v1.ts +++ b/test/gapic_instance_admin_v1.ts @@ -30,7 +30,7 @@ import {protobuf, LROperation, operationsProtos} from 'google-gax'; // Dynamically loaded proto JSON is needed to get the type information // to fill in default values for request objects const root = protobuf.Root.fromJSON( - require('../protos/protos.json') + require('../protos/protos.json'), ).resolveAll(); // eslint-disable-next-line @typescript-eslint/no-unused-vars @@ -47,7 +47,7 @@ function generateSampleMessage(instance: T) { instance.constructor as typeof protobuf.Message ).toObject(instance as protobuf.Message, {defaults: true}); return (instance.constructor as typeof protobuf.Message).fromObject( - filledObject + filledObject, ) as T; } @@ -59,7 +59,7 @@ function stubSimpleCall(response?: ResponseType, error?: Error) { function stubSimpleCallWithCallback( response?: ResponseType, - error?: Error + error?: Error, ) { return error ? sinon.stub().callsArgWith(2, error) @@ -69,7 +69,7 @@ function stubSimpleCallWithCallback( function stubLongRunningCall( response?: ResponseType, callError?: Error, - lroError?: Error + lroError?: Error, ) { const innerStub = lroError ? sinon.stub().rejects(lroError) @@ -85,7 +85,7 @@ function stubLongRunningCall( function stubLongRunningCallWithCallback( response?: ResponseType, callError?: Error, - lroError?: Error + lroError?: Error, ) { const innerStub = lroError ? sinon.stub().rejects(lroError) @@ -100,7 +100,7 @@ function stubLongRunningCallWithCallback( function stubPageStreamingCall( responses?: ResponseType[], - error?: Error + error?: Error, ) { const pagingStub = sinon.stub(); if (responses) { @@ -138,7 +138,7 @@ function stubPageStreamingCall( function stubAsyncIterationCall( responses?: ResponseType[], - error?: Error + error?: Error, ) { let counter = 0; const asyncIterable = { @@ -284,11 +284,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); - assert(client.instanceAdminStub); - client.close().then(() => { - done(); + client.initialize().catch(err => { + throw err; }); + assert(client.instanceAdminStub); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has close method for the non-initialized client', done => { @@ -297,9 +304,14 @@ describe('v1.InstanceAdminClient', () => { projectId: 'bogus', }); assert.strictEqual(client.instanceAdminStub, undefined); - client.close().then(() => { - done(); - }); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has getProjectId method', async () => { @@ -343,18 +355,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.GetInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstanceConfigRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ); client.innerApiCalls.getInstanceConfig = stubSimpleCall(expectedResponse); const [response] = await client.getInstanceConfig(request); @@ -374,18 +386,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.GetInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstanceConfigRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ); client.innerApiCalls.getInstanceConfig = stubSimpleCallWithCallback(expectedResponse); @@ -394,14 +406,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.instance.v1.IInstanceConfig | null + result?: protos.google.spanner.admin.instance.v1.IInstanceConfig | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -421,20 +433,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.GetInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstanceConfigRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.getInstanceConfig = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.getInstanceConfig(request), expectedError); const actualRequest = ( @@ -452,17 +464,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.GetInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstanceConfigRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.getInstanceConfig(request), expectedError); }); }); @@ -473,18 +487,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteInstanceConfig = stubSimpleCall(expectedResponse); @@ -505,18 +519,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteInstanceConfig = stubSimpleCallWithCallback(expectedResponse); @@ -525,14 +539,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.protobuf.IEmpty | null + result?: protos.google.protobuf.IEmpty | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -552,20 +566,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.deleteInstanceConfig = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.deleteInstanceConfig(request), expectedError); const actualRequest = ( @@ -583,17 +597,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstanceConfigRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.deleteInstanceConfig(request), expectedError); }); }); @@ -604,18 +620,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstanceRequest() + new protos.google.spanner.admin.instance.v1.GetInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ); client.innerApiCalls.getInstance = stubSimpleCall(expectedResponse); const [response] = await client.getInstance(request); @@ -635,18 +651,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstanceRequest() + new protos.google.spanner.admin.instance.v1.GetInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ); client.innerApiCalls.getInstance = stubSimpleCallWithCallback(expectedResponse); @@ -655,14 +671,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.instance.v1.IInstance | null + result?: protos.google.spanner.admin.instance.v1.IInstance | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -682,20 +698,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstanceRequest() + new protos.google.spanner.admin.instance.v1.GetInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.getInstance = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.getInstance(request), expectedError); const actualRequest = ( @@ -713,17 +729,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstanceRequest() + new protos.google.spanner.admin.instance.v1.GetInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.getInstance(request), expectedError); }); }); @@ -734,18 +752,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstanceRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteInstance = stubSimpleCall(expectedResponse); const [response] = await client.deleteInstance(request); @@ -765,18 +783,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstanceRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteInstance = stubSimpleCallWithCallback(expectedResponse); @@ -785,14 +803,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.protobuf.IEmpty | null + result?: protos.google.protobuf.IEmpty | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -812,20 +830,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstanceRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.deleteInstance = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.deleteInstance(request), expectedError); const actualRequest = ( @@ -843,17 +861,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstanceRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.deleteInstance(request), expectedError); }); }); @@ -864,18 +884,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.SetIamPolicyRequest() + new protos.google.iam.v1.SetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.SetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.Policy() + new protos.google.iam.v1.Policy(), ); client.innerApiCalls.setIamPolicy = stubSimpleCall(expectedResponse); const [response] = await client.setIamPolicy(request); @@ -895,18 +915,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.SetIamPolicyRequest() + new protos.google.iam.v1.SetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.SetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.Policy() + new protos.google.iam.v1.Policy(), ); client.innerApiCalls.setIamPolicy = stubSimpleCallWithCallback(expectedResponse); @@ -915,14 +935,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.iam.v1.IPolicy | null + result?: protos.google.iam.v1.IPolicy | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -942,20 +962,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.SetIamPolicyRequest() + new protos.google.iam.v1.SetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.SetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.setIamPolicy = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.setIamPolicy(request), expectedError); const actualRequest = ( @@ -973,17 +993,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.SetIamPolicyRequest() + new protos.google.iam.v1.SetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.SetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.setIamPolicy(request), expectedError); }); }); @@ -994,18 +1016,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.GetIamPolicyRequest() + new protos.google.iam.v1.GetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.GetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.Policy() + new protos.google.iam.v1.Policy(), ); client.innerApiCalls.getIamPolicy = stubSimpleCall(expectedResponse); const [response] = await client.getIamPolicy(request); @@ -1025,18 +1047,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.GetIamPolicyRequest() + new protos.google.iam.v1.GetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.GetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.Policy() + new protos.google.iam.v1.Policy(), ); client.innerApiCalls.getIamPolicy = stubSimpleCallWithCallback(expectedResponse); @@ -1045,14 +1067,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.iam.v1.IPolicy | null + result?: protos.google.iam.v1.IPolicy | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1072,20 +1094,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.GetIamPolicyRequest() + new protos.google.iam.v1.GetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.GetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.getIamPolicy = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.getIamPolicy(request), expectedError); const actualRequest = ( @@ -1103,17 +1125,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.GetIamPolicyRequest() + new protos.google.iam.v1.GetIamPolicyRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.GetIamPolicyRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.getIamPolicy(request), expectedError); }); }); @@ -1124,18 +1148,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsRequest() + new protos.google.iam.v1.TestIamPermissionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.TestIamPermissionsRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsResponse() + new protos.google.iam.v1.TestIamPermissionsResponse(), ); client.innerApiCalls.testIamPermissions = stubSimpleCall(expectedResponse); @@ -1156,18 +1180,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsRequest() + new protos.google.iam.v1.TestIamPermissionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.TestIamPermissionsRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsResponse() + new protos.google.iam.v1.TestIamPermissionsResponse(), ); client.innerApiCalls.testIamPermissions = stubSimpleCallWithCallback(expectedResponse); @@ -1176,14 +1200,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.iam.v1.ITestIamPermissionsResponse | null + result?: protos.google.iam.v1.ITestIamPermissionsResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1203,20 +1227,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsRequest() + new protos.google.iam.v1.TestIamPermissionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.TestIamPermissionsRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedHeaderRequestParams = `resource=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.testIamPermissions = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.testIamPermissions(request), expectedError); const actualRequest = ( @@ -1234,17 +1258,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.iam.v1.TestIamPermissionsRequest() + new protos.google.iam.v1.TestIamPermissionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.iam.v1.TestIamPermissionsRequest', - ['resource'] + ['resource'], ); request.resource = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.testIamPermissions(request), expectedError); }); }); @@ -1255,18 +1281,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.GetInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstancePartitionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ); client.innerApiCalls.getInstancePartition = stubSimpleCall(expectedResponse); @@ -1287,18 +1313,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.GetInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstancePartitionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ); client.innerApiCalls.getInstancePartition = stubSimpleCallWithCallback(expectedResponse); @@ -1307,14 +1333,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.instance.v1.IInstancePartition | null + result?: protos.google.spanner.admin.instance.v1.IInstancePartition | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1334,20 +1360,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.GetInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstancePartitionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.getInstancePartition = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.getInstancePartition(request), expectedError); const actualRequest = ( @@ -1365,17 +1391,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.GetInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.GetInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.GetInstancePartitionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.getInstancePartition(request), expectedError); }); }); @@ -1386,18 +1414,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteInstancePartition = stubSimpleCall(expectedResponse); @@ -1418,18 +1446,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteInstancePartition = stubSimpleCallWithCallback(expectedResponse); @@ -1438,14 +1466,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.protobuf.IEmpty | null + result?: protos.google.protobuf.IEmpty | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1465,24 +1493,24 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.deleteInstancePartition = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.deleteInstancePartition(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.deleteInstancePartition as SinonStub @@ -1499,20 +1527,22 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.DeleteInstancePartitionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects( client.deleteInstancePartition(request), - expectedError + expectedError, ); }); }); @@ -1523,18 +1553,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.CreateInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstanceConfigRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.createInstanceConfig = stubLongRunningCall(expectedResponse); @@ -1556,18 +1586,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.CreateInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstanceConfigRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.createInstanceConfig = stubLongRunningCallWithCallback(expectedResponse); @@ -1579,14 +1609,14 @@ describe('v1.InstanceAdminClient', () => { result?: LROperation< protos.google.spanner.admin.instance.v1.IInstanceConfig, protos.google.spanner.admin.instance.v1.ICreateInstanceConfigMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -1610,20 +1640,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.CreateInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstanceConfigRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.createInstanceConfig = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.createInstanceConfig(request), expectedError); const actualRequest = ( @@ -1641,13 +1671,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.CreateInstanceConfigRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstanceConfigRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -1655,7 +1685,7 @@ describe('v1.InstanceAdminClient', () => { client.innerApiCalls.createInstanceConfig = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.createInstanceConfig(request); await assert.rejects(operation.promise(), expectedError); @@ -1674,9 +1704,9 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -1684,7 +1714,7 @@ describe('v1.InstanceAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkCreateInstanceConfigProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -1696,16 +1726,16 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.checkCreateInstanceConfigProgress(''), - expectedError + expectedError, ); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); }); @@ -1717,19 +1747,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest(), ); request.instanceConfig ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest', - ['instanceConfig', 'name'] + ['instanceConfig', 'name'], ); request.instanceConfig.name = defaultValue1; const expectedHeaderRequestParams = `instance_config.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.updateInstanceConfig = stubLongRunningCall(expectedResponse); @@ -1751,19 +1781,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest(), ); request.instanceConfig ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest', - ['instanceConfig', 'name'] + ['instanceConfig', 'name'], ); request.instanceConfig.name = defaultValue1; const expectedHeaderRequestParams = `instance_config.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.updateInstanceConfig = stubLongRunningCallWithCallback(expectedResponse); @@ -1775,14 +1805,14 @@ describe('v1.InstanceAdminClient', () => { result?: LROperation< protos.google.spanner.admin.instance.v1.IInstanceConfig, protos.google.spanner.admin.instance.v1.IUpdateInstanceConfigMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -1806,21 +1836,21 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest(), ); request.instanceConfig ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest', - ['instanceConfig', 'name'] + ['instanceConfig', 'name'], ); request.instanceConfig.name = defaultValue1; const expectedHeaderRequestParams = `instance_config.name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.updateInstanceConfig = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.updateInstanceConfig(request), expectedError); const actualRequest = ( @@ -1838,14 +1868,14 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest(), ); request.instanceConfig ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstanceConfigRequest', - ['instanceConfig', 'name'] + ['instanceConfig', 'name'], ); request.instanceConfig.name = defaultValue1; const expectedHeaderRequestParams = `instance_config.name=${defaultValue1 ?? ''}`; @@ -1853,7 +1883,7 @@ describe('v1.InstanceAdminClient', () => { client.innerApiCalls.updateInstanceConfig = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.updateInstanceConfig(request); await assert.rejects(operation.promise(), expectedError); @@ -1872,9 +1902,9 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -1882,7 +1912,7 @@ describe('v1.InstanceAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkUpdateInstanceConfigProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -1894,16 +1924,16 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.checkUpdateInstanceConfigProgress(''), - expectedError + expectedError, ); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); }); @@ -1915,18 +1945,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstanceRequest() + new protos.google.spanner.admin.instance.v1.CreateInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstanceRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.createInstance = stubLongRunningCall(expectedResponse); @@ -1948,18 +1978,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstanceRequest() + new protos.google.spanner.admin.instance.v1.CreateInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstanceRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.createInstance = stubLongRunningCallWithCallback(expectedResponse); @@ -1971,14 +2001,14 @@ describe('v1.InstanceAdminClient', () => { result?: LROperation< protos.google.spanner.admin.instance.v1.IInstance, protos.google.spanner.admin.instance.v1.ICreateInstanceMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -2002,20 +2032,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstanceRequest() + new protos.google.spanner.admin.instance.v1.CreateInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstanceRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.createInstance = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.createInstance(request), expectedError); const actualRequest = ( @@ -2033,13 +2063,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstanceRequest() + new protos.google.spanner.admin.instance.v1.CreateInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstanceRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -2047,7 +2077,7 @@ describe('v1.InstanceAdminClient', () => { client.innerApiCalls.createInstance = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.createInstance(request); await assert.rejects(operation.promise(), expectedError); @@ -2066,9 +2096,9 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -2076,7 +2106,7 @@ describe('v1.InstanceAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkCreateInstanceProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -2088,16 +2118,16 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.checkCreateInstanceProgress(''), - expectedError + expectedError, ); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); }); @@ -2109,19 +2139,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstanceRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstanceRequest(), ); request.instance ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstanceRequest', - ['instance', 'name'] + ['instance', 'name'], ); request.instance.name = defaultValue1; const expectedHeaderRequestParams = `instance.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.updateInstance = stubLongRunningCall(expectedResponse); @@ -2143,19 +2173,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstanceRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstanceRequest(), ); request.instance ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstanceRequest', - ['instance', 'name'] + ['instance', 'name'], ); request.instance.name = defaultValue1; const expectedHeaderRequestParams = `instance.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.updateInstance = stubLongRunningCallWithCallback(expectedResponse); @@ -2167,14 +2197,14 @@ describe('v1.InstanceAdminClient', () => { result?: LROperation< protos.google.spanner.admin.instance.v1.IInstance, protos.google.spanner.admin.instance.v1.IUpdateInstanceMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -2198,21 +2228,21 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstanceRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstanceRequest(), ); request.instance ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstanceRequest', - ['instance', 'name'] + ['instance', 'name'], ); request.instance.name = defaultValue1; const expectedHeaderRequestParams = `instance.name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.updateInstance = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.updateInstance(request), expectedError); const actualRequest = ( @@ -2230,14 +2260,14 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstanceRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstanceRequest(), ); request.instance ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstanceRequest', - ['instance', 'name'] + ['instance', 'name'], ); request.instance.name = defaultValue1; const expectedHeaderRequestParams = `instance.name=${defaultValue1 ?? ''}`; @@ -2245,7 +2275,7 @@ describe('v1.InstanceAdminClient', () => { client.innerApiCalls.updateInstance = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.updateInstance(request); await assert.rejects(operation.promise(), expectedError); @@ -2264,9 +2294,9 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -2274,7 +2304,7 @@ describe('v1.InstanceAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkUpdateInstanceProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -2286,16 +2316,16 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.checkUpdateInstanceProgress(''), - expectedError + expectedError, ); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); }); @@ -2307,18 +2337,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.CreateInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstancePartitionRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.createInstancePartition = stubLongRunningCall(expectedResponse); @@ -2340,18 +2370,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.CreateInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstancePartitionRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.createInstancePartition = stubLongRunningCallWithCallback(expectedResponse); @@ -2363,14 +2393,14 @@ describe('v1.InstanceAdminClient', () => { result?: LROperation< protos.google.spanner.admin.instance.v1.IInstancePartition, protos.google.spanner.admin.instance.v1.ICreateInstancePartitionMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -2394,24 +2424,24 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.CreateInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstancePartitionRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.createInstancePartition = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.createInstancePartition(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.createInstancePartition as SinonStub @@ -2428,13 +2458,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.CreateInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.CreateInstancePartitionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.CreateInstancePartitionRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -2442,7 +2472,7 @@ describe('v1.InstanceAdminClient', () => { client.innerApiCalls.createInstancePartition = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.createInstancePartition(request); await assert.rejects(operation.promise(), expectedError); @@ -2461,9 +2491,9 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -2472,7 +2502,7 @@ describe('v1.InstanceAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkCreateInstancePartitionProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -2484,16 +2514,16 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.checkCreateInstancePartitionProgress(''), - expectedError + expectedError, ); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); }); @@ -2505,19 +2535,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest(), ); request.instancePartition ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest', - ['instancePartition', 'name'] + ['instancePartition', 'name'], ); request.instancePartition.name = defaultValue1; const expectedHeaderRequestParams = `instance_partition.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.updateInstancePartition = stubLongRunningCall(expectedResponse); @@ -2539,19 +2569,19 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest(), ); request.instancePartition ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest', - ['instancePartition', 'name'] + ['instancePartition', 'name'], ); request.instancePartition.name = defaultValue1; const expectedHeaderRequestParams = `instance_partition.name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.updateInstancePartition = stubLongRunningCallWithCallback(expectedResponse); @@ -2563,14 +2593,14 @@ describe('v1.InstanceAdminClient', () => { result?: LROperation< protos.google.spanner.admin.instance.v1.IInstancePartition, protos.google.spanner.admin.instance.v1.IUpdateInstancePartitionMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -2594,25 +2624,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest(), ); request.instancePartition ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest', - ['instancePartition', 'name'] + ['instancePartition', 'name'], ); request.instancePartition.name = defaultValue1; const expectedHeaderRequestParams = `instance_partition.name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.updateInstancePartition = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.updateInstancePartition(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.updateInstancePartition as SinonStub @@ -2629,14 +2659,14 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest() + new protos.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest(), ); request.instancePartition ??= {}; const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.UpdateInstancePartitionRequest', - ['instancePartition', 'name'] + ['instancePartition', 'name'], ); request.instancePartition.name = defaultValue1; const expectedHeaderRequestParams = `instance_partition.name=${defaultValue1 ?? ''}`; @@ -2644,7 +2674,7 @@ describe('v1.InstanceAdminClient', () => { client.innerApiCalls.updateInstancePartition = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.updateInstancePartition(request); await assert.rejects(operation.promise(), expectedError); @@ -2663,9 +2693,9 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -2674,7 +2704,7 @@ describe('v1.InstanceAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkUpdateInstancePartitionProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -2686,16 +2716,16 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.checkUpdateInstancePartitionProgress(''), - expectedError + expectedError, ); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); }); @@ -2707,18 +2737,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.MoveInstanceRequest() + new protos.google.spanner.admin.instance.v1.MoveInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.MoveInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.moveInstance = stubLongRunningCall(expectedResponse); const [operation] = await client.moveInstance(request); @@ -2739,18 +2769,18 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.MoveInstanceRequest() + new protos.google.spanner.admin.instance.v1.MoveInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.MoveInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.longrunning.Operation() + new protos.google.longrunning.Operation(), ); client.innerApiCalls.moveInstance = stubLongRunningCallWithCallback(expectedResponse); @@ -2762,14 +2792,14 @@ describe('v1.InstanceAdminClient', () => { result?: LROperation< protos.google.spanner.admin.instance.v1.IMoveInstanceResponse, protos.google.spanner.admin.instance.v1.IMoveInstanceMetadata - > | null + > | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const operation = (await promise) as LROperation< @@ -2793,20 +2823,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.MoveInstanceRequest() + new protos.google.spanner.admin.instance.v1.MoveInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.MoveInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.moveInstance = stubLongRunningCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.moveInstance(request), expectedError); const actualRequest = ( @@ -2824,13 +2854,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.MoveInstanceRequest() + new protos.google.spanner.admin.instance.v1.MoveInstanceRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.MoveInstanceRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; @@ -2838,7 +2868,7 @@ describe('v1.InstanceAdminClient', () => { client.innerApiCalls.moveInstance = stubLongRunningCall( undefined, undefined, - expectedError + expectedError, ); const [operation] = await client.moveInstance(request); await assert.rejects(operation.promise(), expectedError); @@ -2857,9 +2887,9 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedResponse = generateSampleMessage( - new operationsProtos.google.longrunning.Operation() + new operationsProtos.google.longrunning.Operation(), ); expectedResponse.name = 'test'; expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; @@ -2867,7 +2897,7 @@ describe('v1.InstanceAdminClient', () => { client.operationsClient.getOperation = stubSimpleCall(expectedResponse); const decodedOperation = await client.checkMoveInstanceProgress( - expectedResponse.name + expectedResponse.name, ); assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); assert(decodedOperation.metadata); @@ -2879,12 +2909,12 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const expectedError = new Error('expected'); client.operationsClient.getOperation = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.checkMoveInstanceProgress(''), expectedError); assert((client.operationsClient.getOperation as SinonStub).getCall(0)); @@ -2897,25 +2927,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), ]; client.innerApiCalls.listInstanceConfigs = @@ -2937,25 +2967,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), ]; client.innerApiCalls.listInstanceConfigs = @@ -2967,14 +2997,14 @@ describe('v1.InstanceAdminClient', () => { err?: Error | null, result?: | protos.google.spanner.admin.instance.v1.IInstanceConfig[] - | null + | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -2994,20 +3024,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listInstanceConfigs = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.listInstanceConfigs(request), expectedError); const actualRequest = ( @@ -3025,25 +3055,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), ]; client.descriptors.page.listInstanceConfigs.createStream = @@ -3055,10 +3085,10 @@ describe('v1.InstanceAdminClient', () => { stream.on( 'data', ( - response: protos.google.spanner.admin.instance.v1.InstanceConfig + response: protos.google.spanner.admin.instance.v1.InstanceConfig, ) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -3072,14 +3102,14 @@ describe('v1.InstanceAdminClient', () => { assert( (client.descriptors.page.listInstanceConfigs.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listInstanceConfigs, request) + .calledWith(client.innerApiCalls.listInstanceConfigs, request), ); assert( (client.descriptors.page.listInstanceConfigs.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3088,13 +3118,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3108,10 +3138,10 @@ describe('v1.InstanceAdminClient', () => { stream.on( 'data', ( - response: protos.google.spanner.admin.instance.v1.InstanceConfig + response: protos.google.spanner.admin.instance.v1.InstanceConfig, ) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -3124,14 +3154,14 @@ describe('v1.InstanceAdminClient', () => { assert( (client.descriptors.page.listInstanceConfigs.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listInstanceConfigs, request) + .calledWith(client.innerApiCalls.listInstanceConfigs, request), ); assert( (client.descriptors.page.listInstanceConfigs.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3140,25 +3170,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstanceConfig() + new protos.google.spanner.admin.instance.v1.InstanceConfig(), ), ]; client.descriptors.page.listInstanceConfigs.asyncIterate = @@ -3174,14 +3204,14 @@ describe('v1.InstanceAdminClient', () => { ( client.descriptors.page.listInstanceConfigs.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listInstanceConfigs.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3190,13 +3220,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3215,14 +3245,14 @@ describe('v1.InstanceAdminClient', () => { ( client.descriptors.page.listInstanceConfigs.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listInstanceConfigs.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); }); @@ -3233,13 +3263,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3267,13 +3297,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3289,14 +3319,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.longrunning.IOperation[] | null + result?: protos.google.longrunning.IOperation[] | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -3316,24 +3346,24 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listInstanceConfigOperations = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.listInstanceConfigOperations(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.listInstanceConfigOperations as SinonStub @@ -3350,13 +3380,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3390,8 +3420,8 @@ describe('v1.InstanceAdminClient', () => { .getCall(0) .calledWith( client.innerApiCalls.listInstanceConfigOperations, - request - ) + request, + ), ); assert( ( @@ -3400,8 +3430,8 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -3410,13 +3440,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3445,8 +3475,8 @@ describe('v1.InstanceAdminClient', () => { .getCall(0) .calledWith( client.innerApiCalls.listInstanceConfigOperations, - request - ) + request, + ), ); assert( ( @@ -3455,8 +3485,8 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -3465,13 +3495,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3493,7 +3523,7 @@ describe('v1.InstanceAdminClient', () => { client.descriptors.page.listInstanceConfigOperations .asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( ( @@ -3502,8 +3532,8 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -3512,13 +3542,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstanceConfigOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3537,7 +3567,7 @@ describe('v1.InstanceAdminClient', () => { client.descriptors.page.listInstanceConfigOperations .asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( ( @@ -3546,8 +3576,8 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); }); @@ -3558,25 +3588,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancesRequest() + new protos.google.spanner.admin.instance.v1.ListInstancesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), ]; client.innerApiCalls.listInstances = stubSimpleCall(expectedResponse); @@ -3597,25 +3627,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancesRequest() + new protos.google.spanner.admin.instance.v1.ListInstancesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), ]; client.innerApiCalls.listInstances = @@ -3625,14 +3655,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.admin.instance.v1.IInstance[] | null + result?: protos.google.spanner.admin.instance.v1.IInstance[] | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -3652,20 +3682,20 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancesRequest() + new protos.google.spanner.admin.instance.v1.ListInstancesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listInstances = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.listInstances(request), expectedError); const actualRequest = ( @@ -3683,25 +3713,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancesRequest() + new protos.google.spanner.admin.instance.v1.ListInstancesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), ]; client.descriptors.page.listInstances.createStream = @@ -3714,7 +3744,7 @@ describe('v1.InstanceAdminClient', () => { 'data', (response: protos.google.spanner.admin.instance.v1.Instance) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -3728,14 +3758,14 @@ describe('v1.InstanceAdminClient', () => { assert( (client.descriptors.page.listInstances.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listInstances, request) + .calledWith(client.innerApiCalls.listInstances, request), ); assert( (client.descriptors.page.listInstances.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3744,13 +3774,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancesRequest() + new protos.google.spanner.admin.instance.v1.ListInstancesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3765,7 +3795,7 @@ describe('v1.InstanceAdminClient', () => { 'data', (response: protos.google.spanner.admin.instance.v1.Instance) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -3778,14 +3808,14 @@ describe('v1.InstanceAdminClient', () => { assert( (client.descriptors.page.listInstances.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listInstances, request) + .calledWith(client.innerApiCalls.listInstances, request), ); assert( (client.descriptors.page.listInstances.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3794,25 +3824,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancesRequest() + new protos.google.spanner.admin.instance.v1.ListInstancesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.Instance() + new protos.google.spanner.admin.instance.v1.Instance(), ), ]; client.descriptors.page.listInstances.asyncIterate = @@ -3827,14 +3857,14 @@ describe('v1.InstanceAdminClient', () => { ( client.descriptors.page.listInstances.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listInstances.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -3843,13 +3873,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancesRequest() + new protos.google.spanner.admin.instance.v1.ListInstancesRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancesRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -3868,14 +3898,14 @@ describe('v1.InstanceAdminClient', () => { ( client.descriptors.page.listInstances.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listInstances.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); }); @@ -3886,25 +3916,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), ]; client.innerApiCalls.listInstancePartitions = @@ -3926,25 +3956,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), ]; client.innerApiCalls.listInstancePartitions = @@ -3956,14 +3986,14 @@ describe('v1.InstanceAdminClient', () => { err?: Error | null, result?: | protos.google.spanner.admin.instance.v1.IInstancePartition[] - | null + | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -3983,24 +4013,24 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listInstancePartitions = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.listInstancePartitions(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.listInstancePartitions as SinonStub @@ -4017,25 +4047,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), ]; client.descriptors.page.listInstancePartitions.createStream = @@ -4047,10 +4077,10 @@ describe('v1.InstanceAdminClient', () => { stream.on( 'data', ( - response: protos.google.spanner.admin.instance.v1.InstancePartition + response: protos.google.spanner.admin.instance.v1.InstancePartition, ) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -4067,7 +4097,7 @@ describe('v1.InstanceAdminClient', () => { .createStream as SinonStub ) .getCall(0) - .calledWith(client.innerApiCalls.listInstancePartitions, request) + .calledWith(client.innerApiCalls.listInstancePartitions, request), ); assert( ( @@ -4076,8 +4106,8 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -4086,13 +4116,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4106,10 +4136,10 @@ describe('v1.InstanceAdminClient', () => { stream.on( 'data', ( - response: protos.google.spanner.admin.instance.v1.InstancePartition + response: protos.google.spanner.admin.instance.v1.InstancePartition, ) => { responses.push(response); - } + }, ); stream.on('end', () => { resolve(responses); @@ -4125,7 +4155,7 @@ describe('v1.InstanceAdminClient', () => { .createStream as SinonStub ) .getCall(0) - .calledWith(client.innerApiCalls.listInstancePartitions, request) + .calledWith(client.innerApiCalls.listInstancePartitions, request), ); assert( ( @@ -4134,8 +4164,8 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -4144,25 +4174,25 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedResponse = [ generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), generateSampleMessage( - new protos.google.spanner.admin.instance.v1.InstancePartition() + new protos.google.spanner.admin.instance.v1.InstancePartition(), ), ]; client.descriptors.page.listInstancePartitions.asyncIterate = @@ -4179,7 +4209,7 @@ describe('v1.InstanceAdminClient', () => { client.descriptors.page.listInstancePartitions .asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( ( @@ -4188,8 +4218,8 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -4198,13 +4228,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4224,7 +4254,7 @@ describe('v1.InstanceAdminClient', () => { client.descriptors.page.listInstancePartitions .asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( ( @@ -4233,8 +4263,8 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); }); @@ -4245,13 +4275,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4279,13 +4309,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4301,14 +4331,14 @@ describe('v1.InstanceAdminClient', () => { request, ( err?: Error | null, - result?: protos.google.longrunning.IOperation[] | null + result?: protos.google.longrunning.IOperation[] | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -4328,24 +4358,24 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listInstancePartitionOperations = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects( client.listInstancePartitionOperations(request), - expectedError + expectedError, ); const actualRequest = ( client.innerApiCalls.listInstancePartitionOperations as SinonStub @@ -4362,13 +4392,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4402,8 +4432,8 @@ describe('v1.InstanceAdminClient', () => { .getCall(0) .calledWith( client.innerApiCalls.listInstancePartitionOperations, - request - ) + request, + ), ); assert( ( @@ -4412,8 +4442,8 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -4422,13 +4452,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4457,8 +4487,8 @@ describe('v1.InstanceAdminClient', () => { .getCall(0) .calledWith( client.innerApiCalls.listInstancePartitionOperations, - request - ) + request, + ), ); assert( ( @@ -4467,8 +4497,8 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -4477,13 +4507,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4505,7 +4535,7 @@ describe('v1.InstanceAdminClient', () => { client.descriptors.page.listInstancePartitionOperations .asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( ( @@ -4514,8 +4544,8 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); @@ -4524,13 +4554,13 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest() + new protos.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest', - ['parent'] + ['parent'], ); request.parent = defaultValue1; const expectedHeaderRequestParams = `parent=${defaultValue1 ?? ''}`; @@ -4549,7 +4579,7 @@ describe('v1.InstanceAdminClient', () => { client.descriptors.page.listInstancePartitionOperations .asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( ( @@ -4558,14 +4588,14 @@ describe('v1.InstanceAdminClient', () => { ) .getCall(0) .args[2].otherArgs.headers['x-goog-request-params'].includes( - expectedHeaderRequestParams - ) + expectedHeaderRequestParams, + ), ); }); }); describe('Path templates', () => { - describe('instance', () => { + describe('instance', async () => { const fakePath = '/rendered/path/instance'; const expectedParameters = { project: 'projectValue', @@ -4575,7 +4605,7 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.instancePathTemplate.render = sinon .stub() .returns(fakePath); @@ -4589,7 +4619,7 @@ describe('v1.InstanceAdminClient', () => { assert( (client.pathTemplates.instancePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -4599,7 +4629,7 @@ describe('v1.InstanceAdminClient', () => { assert( (client.pathTemplates.instancePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -4609,12 +4639,12 @@ describe('v1.InstanceAdminClient', () => { assert( (client.pathTemplates.instancePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('instanceConfig', () => { + describe('instanceConfig', async () => { const fakePath = '/rendered/path/instanceConfig'; const expectedParameters = { project: 'projectValue', @@ -4624,7 +4654,7 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.instanceConfigPathTemplate.render = sinon .stub() .returns(fakePath); @@ -4635,13 +4665,13 @@ describe('v1.InstanceAdminClient', () => { it('instanceConfigPath', () => { const result = client.instanceConfigPath( 'projectValue', - 'instanceConfigValue' + 'instanceConfigValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.instanceConfigPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -4651,7 +4681,7 @@ describe('v1.InstanceAdminClient', () => { assert( (client.pathTemplates.instanceConfigPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -4662,12 +4692,12 @@ describe('v1.InstanceAdminClient', () => { assert( (client.pathTemplates.instanceConfigPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('instancePartition', () => { + describe('instancePartition', async () => { const fakePath = '/rendered/path/instancePartition'; const expectedParameters = { project: 'projectValue', @@ -4678,7 +4708,7 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.instancePartitionPathTemplate.render = sinon .stub() .returns(fakePath); @@ -4690,7 +4720,7 @@ describe('v1.InstanceAdminClient', () => { const result = client.instancePartitionPath( 'projectValue', 'instanceValue', - 'instancePartitionValue' + 'instancePartitionValue', ); assert.strictEqual(result, fakePath); assert( @@ -4699,7 +4729,7 @@ describe('v1.InstanceAdminClient', () => { .render as SinonStub ) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -4712,7 +4742,7 @@ describe('v1.InstanceAdminClient', () => { .match as SinonStub ) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -4725,7 +4755,7 @@ describe('v1.InstanceAdminClient', () => { .match as SinonStub ) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -4739,12 +4769,12 @@ describe('v1.InstanceAdminClient', () => { .match as SinonStub ) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('project', () => { + describe('project', async () => { const fakePath = '/rendered/path/project'; const expectedParameters = { project: 'projectValue', @@ -4753,7 +4783,7 @@ describe('v1.InstanceAdminClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.projectPathTemplate.render = sinon .stub() .returns(fakePath); @@ -4767,7 +4797,7 @@ describe('v1.InstanceAdminClient', () => { assert( (client.pathTemplates.projectPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -4777,7 +4807,7 @@ describe('v1.InstanceAdminClient', () => { assert( (client.pathTemplates.projectPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); diff --git a/test/gapic_spanner_executor_proxy_v1.ts b/test/gapic_spanner_executor_proxy_v1.ts index 200eb7f20..e3bdf237b 100644 --- a/test/gapic_spanner_executor_proxy_v1.ts +++ b/test/gapic_spanner_executor_proxy_v1.ts @@ -30,7 +30,7 @@ import {protobuf} from 'google-gax'; // Dynamically loaded proto JSON is needed to get the type information // to fill in default values for request objects const root = protobuf.Root.fromJSON( - require('../protos/protos.json') + require('../protos/protos.json'), ).resolveAll(); // eslint-disable-next-line @typescript-eslint/no-unused-vars @@ -47,13 +47,13 @@ function generateSampleMessage(instance: T) { instance.constructor as typeof protobuf.Message ).toObject(instance as protobuf.Message, {defaults: true}); return (instance.constructor as typeof protobuf.Message).fromObject( - filledObject + filledObject, ) as T; } function stubBidiStreamingCall( response?: ResponseType, - error?: Error + error?: Error, ) { const transformStub = error ? sinon.stub().callsArgWith(2, error) @@ -91,7 +91,7 @@ describe('v1.SpannerExecutorProxyClient', () => { spannerexecutorproxyModule.v1.SpannerExecutorProxyClient.servicePath; assert.strictEqual( servicePath, - 'spanner-cloud-executor.googleapis.com' + 'spanner-cloud-executor.googleapis.com', ); assert(stub.called); stub.restore(); @@ -103,7 +103,7 @@ describe('v1.SpannerExecutorProxyClient', () => { spannerexecutorproxyModule.v1.SpannerExecutorProxyClient.apiEndpoint; assert.strictEqual( apiEndpoint, - 'spanner-cloud-executor.googleapis.com' + 'spanner-cloud-executor.googleapis.com', ); assert(stub.called); stub.restore(); @@ -153,7 +153,7 @@ describe('v1.SpannerExecutorProxyClient', () => { const servicePath = client.apiEndpoint; assert.strictEqual( servicePath, - 'spanner-cloud-executor.configured.example.com' + 'spanner-cloud-executor.configured.example.com', ); if (saved) { process.env['GOOGLE_CLOUD_UNIVERSE_DOMAIN'] = saved; @@ -210,11 +210,18 @@ describe('v1.SpannerExecutorProxyClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); - assert(client.spannerExecutorProxyStub); - client.close().then(() => { - done(); + client.initialize().catch(err => { + throw err; }); + assert(client.spannerExecutorProxyStub); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has close method for the non-initialized client', done => { @@ -224,9 +231,14 @@ describe('v1.SpannerExecutorProxyClient', () => { projectId: 'bogus', }); assert.strictEqual(client.spannerExecutorProxyStub, undefined); - client.close().then(() => { - done(); - }); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has getProjectId method', async () => { @@ -273,13 +285,13 @@ describe('v1.SpannerExecutorProxyClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.executor.v1.SpannerAsyncActionRequest() + new protos.google.spanner.executor.v1.SpannerAsyncActionRequest(), ); const expectedResponse = generateSampleMessage( - new protos.google.spanner.executor.v1.SpannerAsyncActionResponse() + new protos.google.spanner.executor.v1.SpannerAsyncActionResponse(), ); client.innerApiCalls.executeActionAsync = stubBidiStreamingCall(expectedResponse); @@ -288,10 +300,10 @@ describe('v1.SpannerExecutorProxyClient', () => { stream.on( 'data', ( - response: protos.google.spanner.executor.v1.SpannerAsyncActionResponse + response: protos.google.spanner.executor.v1.SpannerAsyncActionResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -304,12 +316,12 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.innerApiCalls.executeActionAsync as SinonStub) .getCall(0) - .calledWith(null) + .calledWith(null), ); assert.deepStrictEqual( ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) .args[0], - request + request, ); }); @@ -319,24 +331,24 @@ describe('v1.SpannerExecutorProxyClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.executor.v1.SpannerAsyncActionRequest() + new protos.google.spanner.executor.v1.SpannerAsyncActionRequest(), ); const expectedError = new Error('expected'); client.innerApiCalls.executeActionAsync = stubBidiStreamingCall( undefined, - expectedError + expectedError, ); const stream = client.executeActionAsync(); const promise = new Promise((resolve, reject) => { stream.on( 'data', ( - response: protos.google.spanner.executor.v1.SpannerAsyncActionResponse + response: protos.google.spanner.executor.v1.SpannerAsyncActionResponse, ) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -348,18 +360,18 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.innerApiCalls.executeActionAsync as SinonStub) .getCall(0) - .calledWith(null) + .calledWith(null), ); assert.deepStrictEqual( ((stream as unknown as PassThrough)._transform as SinonStub).getCall(0) .args[0], - request + request, ); }); }); describe('Path templates', () => { - describe('backup', () => { + describe('backup', async () => { const fakePath = '/rendered/path/backup'; const expectedParameters = { project: 'projectValue', @@ -371,7 +383,7 @@ describe('v1.SpannerExecutorProxyClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.backupPathTemplate.render = sinon .stub() .returns(fakePath); @@ -383,13 +395,13 @@ describe('v1.SpannerExecutorProxyClient', () => { const result = client.backupPath( 'projectValue', 'instanceValue', - 'backupValue' + 'backupValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.backupPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -399,7 +411,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.backupPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -409,7 +421,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.backupPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -419,12 +431,12 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.backupPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('backupSchedule', () => { + describe('backupSchedule', async () => { const fakePath = '/rendered/path/backupSchedule'; const expectedParameters = { project: 'projectValue', @@ -437,7 +449,7 @@ describe('v1.SpannerExecutorProxyClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.backupSchedulePathTemplate.render = sinon .stub() .returns(fakePath); @@ -450,13 +462,13 @@ describe('v1.SpannerExecutorProxyClient', () => { 'projectValue', 'instanceValue', 'databaseValue', - 'scheduleValue' + 'scheduleValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.backupSchedulePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -466,7 +478,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.backupSchedulePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -476,7 +488,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.backupSchedulePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -486,7 +498,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.backupSchedulePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -496,12 +508,12 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.backupSchedulePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('database', () => { + describe('database', async () => { const fakePath = '/rendered/path/database'; const expectedParameters = { project: 'projectValue', @@ -513,7 +525,7 @@ describe('v1.SpannerExecutorProxyClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.databasePathTemplate.render = sinon .stub() .returns(fakePath); @@ -525,13 +537,13 @@ describe('v1.SpannerExecutorProxyClient', () => { const result = client.databasePath( 'projectValue', 'instanceValue', - 'databaseValue' + 'databaseValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.databasePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -541,7 +553,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.databasePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -551,7 +563,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.databasePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -561,12 +573,12 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.databasePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('databaseRole', () => { + describe('databaseRole', async () => { const fakePath = '/rendered/path/databaseRole'; const expectedParameters = { project: 'projectValue', @@ -579,7 +591,7 @@ describe('v1.SpannerExecutorProxyClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.databaseRolePathTemplate.render = sinon .stub() .returns(fakePath); @@ -592,13 +604,13 @@ describe('v1.SpannerExecutorProxyClient', () => { 'projectValue', 'instanceValue', 'databaseValue', - 'roleValue' + 'roleValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.databaseRolePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -608,7 +620,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.databaseRolePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -618,7 +630,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.databaseRolePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -628,7 +640,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.databaseRolePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -638,12 +650,12 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.databaseRolePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('instance', () => { + describe('instance', async () => { const fakePath = '/rendered/path/instance'; const expectedParameters = { project: 'projectValue', @@ -654,7 +666,7 @@ describe('v1.SpannerExecutorProxyClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.instancePathTemplate.render = sinon .stub() .returns(fakePath); @@ -668,7 +680,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.instancePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -678,7 +690,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.instancePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -688,12 +700,12 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.instancePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('instanceConfig', () => { + describe('instanceConfig', async () => { const fakePath = '/rendered/path/instanceConfig'; const expectedParameters = { project: 'projectValue', @@ -704,7 +716,7 @@ describe('v1.SpannerExecutorProxyClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.instanceConfigPathTemplate.render = sinon .stub() .returns(fakePath); @@ -715,13 +727,13 @@ describe('v1.SpannerExecutorProxyClient', () => { it('instanceConfigPath', () => { const result = client.instanceConfigPath( 'projectValue', - 'instanceConfigValue' + 'instanceConfigValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.instanceConfigPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -731,7 +743,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.instanceConfigPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -742,12 +754,12 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.instanceConfigPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('instancePartition', () => { + describe('instancePartition', async () => { const fakePath = '/rendered/path/instancePartition'; const expectedParameters = { project: 'projectValue', @@ -759,7 +771,7 @@ describe('v1.SpannerExecutorProxyClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.instancePartitionPathTemplate.render = sinon .stub() .returns(fakePath); @@ -771,7 +783,7 @@ describe('v1.SpannerExecutorProxyClient', () => { const result = client.instancePartitionPath( 'projectValue', 'instanceValue', - 'instancePartitionValue' + 'instancePartitionValue', ); assert.strictEqual(result, fakePath); assert( @@ -780,7 +792,7 @@ describe('v1.SpannerExecutorProxyClient', () => { .render as SinonStub ) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -793,7 +805,7 @@ describe('v1.SpannerExecutorProxyClient', () => { .match as SinonStub ) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -806,7 +818,7 @@ describe('v1.SpannerExecutorProxyClient', () => { .match as SinonStub ) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -820,12 +832,12 @@ describe('v1.SpannerExecutorProxyClient', () => { .match as SinonStub ) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('session', () => { + describe('session', async () => { const fakePath = '/rendered/path/session'; const expectedParameters = { project: 'projectValue', @@ -838,7 +850,7 @@ describe('v1.SpannerExecutorProxyClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.sessionPathTemplate.render = sinon .stub() .returns(fakePath); @@ -851,13 +863,13 @@ describe('v1.SpannerExecutorProxyClient', () => { 'projectValue', 'instanceValue', 'databaseValue', - 'sessionValue' + 'sessionValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.sessionPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -867,7 +879,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.sessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -877,7 +889,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.sessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -887,7 +899,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.sessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -897,7 +909,7 @@ describe('v1.SpannerExecutorProxyClient', () => { assert( (client.pathTemplates.sessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); diff --git a/test/gapic_spanner_v1.ts b/test/gapic_spanner_v1.ts index 2ae8d2904..80b0ca1d0 100644 --- a/test/gapic_spanner_v1.ts +++ b/test/gapic_spanner_v1.ts @@ -30,7 +30,7 @@ import {protobuf} from 'google-gax'; // Dynamically loaded proto JSON is needed to get the type information // to fill in default values for request objects const root = protobuf.Root.fromJSON( - require('../protos/protos.json') + require('../protos/protos.json'), ).resolveAll(); // eslint-disable-next-line @typescript-eslint/no-unused-vars @@ -47,7 +47,7 @@ function generateSampleMessage(instance: T) { instance.constructor as typeof protobuf.Message ).toObject(instance as protobuf.Message, {defaults: true}); return (instance.constructor as typeof protobuf.Message).fromObject( - filledObject + filledObject, ) as T; } @@ -59,7 +59,7 @@ function stubSimpleCall(response?: ResponseType, error?: Error) { function stubSimpleCallWithCallback( response?: ResponseType, - error?: Error + error?: Error, ) { return error ? sinon.stub().callsArgWith(2, error) @@ -68,7 +68,7 @@ function stubSimpleCallWithCallback( function stubServerStreamingCall( response?: ResponseType, - error?: Error + error?: Error, ) { const transformStub = error ? sinon.stub().callsArgWith(2, error) @@ -89,7 +89,7 @@ function stubServerStreamingCall( function stubPageStreamingCall( responses?: ResponseType[], - error?: Error + error?: Error, ) { const pagingStub = sinon.stub(); if (responses) { @@ -127,7 +127,7 @@ function stubPageStreamingCall( function stubAsyncIterationCall( responses?: ResponseType[], - error?: Error + error?: Error, ) { let counter = 0; const asyncIterable = { @@ -271,11 +271,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); - assert(client.spannerStub); - client.close().then(() => { - done(); + client.initialize().catch(err => { + throw err; }); + assert(client.spannerStub); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has close method for the non-initialized client', done => { @@ -284,9 +291,14 @@ describe('v1.SpannerClient', () => { projectId: 'bogus', }); assert.strictEqual(client.spannerStub, undefined); - client.close().then(() => { - done(); - }); + client + .close() + .then(() => { + done(); + }) + .catch(err => { + throw err; + }); }); it('has getProjectId method', async () => { @@ -330,18 +342,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.CreateSessionRequest() + new protos.google.spanner.v1.CreateSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.CreateSessionRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.Session() + new protos.google.spanner.v1.Session(), ); client.innerApiCalls.createSession = stubSimpleCall(expectedResponse); const [response] = await client.createSession(request); @@ -361,18 +373,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.CreateSessionRequest() + new protos.google.spanner.v1.CreateSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.CreateSessionRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.Session() + new protos.google.spanner.v1.Session(), ); client.innerApiCalls.createSession = stubSimpleCallWithCallback(expectedResponse); @@ -381,14 +393,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.v1.ISession | null + result?: protos.google.spanner.v1.ISession | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -408,20 +420,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.CreateSessionRequest() + new protos.google.spanner.v1.CreateSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.CreateSessionRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.createSession = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.createSession(request), expectedError); const actualRequest = ( @@ -439,17 +451,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.CreateSessionRequest() + new protos.google.spanner.v1.CreateSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.CreateSessionRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.createSession(request), expectedError); }); }); @@ -460,18 +474,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BatchCreateSessionsRequest() + new protos.google.spanner.v1.BatchCreateSessionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BatchCreateSessionsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.BatchCreateSessionsResponse() + new protos.google.spanner.v1.BatchCreateSessionsResponse(), ); client.innerApiCalls.batchCreateSessions = stubSimpleCall(expectedResponse); @@ -492,18 +506,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BatchCreateSessionsRequest() + new protos.google.spanner.v1.BatchCreateSessionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BatchCreateSessionsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.BatchCreateSessionsResponse() + new protos.google.spanner.v1.BatchCreateSessionsResponse(), ); client.innerApiCalls.batchCreateSessions = stubSimpleCallWithCallback(expectedResponse); @@ -512,14 +526,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.v1.IBatchCreateSessionsResponse | null + result?: protos.google.spanner.v1.IBatchCreateSessionsResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -539,20 +553,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BatchCreateSessionsRequest() + new protos.google.spanner.v1.BatchCreateSessionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BatchCreateSessionsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.batchCreateSessions = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.batchCreateSessions(request), expectedError); const actualRequest = ( @@ -570,17 +584,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BatchCreateSessionsRequest() + new protos.google.spanner.v1.BatchCreateSessionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BatchCreateSessionsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.batchCreateSessions(request), expectedError); }); }); @@ -591,18 +607,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.GetSessionRequest() + new protos.google.spanner.v1.GetSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.GetSessionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.Session() + new protos.google.spanner.v1.Session(), ); client.innerApiCalls.getSession = stubSimpleCall(expectedResponse); const [response] = await client.getSession(request); @@ -622,18 +638,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.GetSessionRequest() + new protos.google.spanner.v1.GetSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.GetSessionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.Session() + new protos.google.spanner.v1.Session(), ); client.innerApiCalls.getSession = stubSimpleCallWithCallback(expectedResponse); @@ -642,14 +658,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.v1.ISession | null + result?: protos.google.spanner.v1.ISession | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -669,20 +685,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.GetSessionRequest() + new protos.google.spanner.v1.GetSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.GetSessionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.getSession = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.getSession(request), expectedError); const actualRequest = ( @@ -700,17 +716,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.GetSessionRequest() + new protos.google.spanner.v1.GetSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.GetSessionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.getSession(request), expectedError); }); }); @@ -721,18 +739,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.DeleteSessionRequest() + new protos.google.spanner.v1.DeleteSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.DeleteSessionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteSession = stubSimpleCall(expectedResponse); const [response] = await client.deleteSession(request); @@ -752,18 +770,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.DeleteSessionRequest() + new protos.google.spanner.v1.DeleteSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.DeleteSessionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.deleteSession = stubSimpleCallWithCallback(expectedResponse); @@ -772,14 +790,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.protobuf.IEmpty | null + result?: protos.google.protobuf.IEmpty | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -799,20 +817,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.DeleteSessionRequest() + new protos.google.spanner.v1.DeleteSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.DeleteSessionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedHeaderRequestParams = `name=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.deleteSession = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.deleteSession(request), expectedError); const actualRequest = ( @@ -830,17 +848,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.DeleteSessionRequest() + new protos.google.spanner.v1.DeleteSessionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.DeleteSessionRequest', - ['name'] + ['name'], ); request.name = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.deleteSession(request), expectedError); }); }); @@ -851,18 +871,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteSqlRequest() + new protos.google.spanner.v1.ExecuteSqlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteSqlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.ResultSet() + new protos.google.spanner.v1.ResultSet(), ); client.innerApiCalls.executeSql = stubSimpleCall(expectedResponse); const [response] = await client.executeSql(request); @@ -882,18 +902,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteSqlRequest() + new protos.google.spanner.v1.ExecuteSqlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteSqlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.ResultSet() + new protos.google.spanner.v1.ResultSet(), ); client.innerApiCalls.executeSql = stubSimpleCallWithCallback(expectedResponse); @@ -902,14 +922,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.v1.IResultSet | null + result?: protos.google.spanner.v1.IResultSet | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -929,20 +949,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteSqlRequest() + new protos.google.spanner.v1.ExecuteSqlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteSqlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.executeSql = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.executeSql(request), expectedError); const actualRequest = ( @@ -960,17 +980,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteSqlRequest() + new protos.google.spanner.v1.ExecuteSqlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteSqlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.executeSql(request), expectedError); }); }); @@ -981,18 +1003,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteBatchDmlRequest() + new protos.google.spanner.v1.ExecuteBatchDmlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteBatchDmlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.ExecuteBatchDmlResponse() + new protos.google.spanner.v1.ExecuteBatchDmlResponse(), ); client.innerApiCalls.executeBatchDml = stubSimpleCall(expectedResponse); const [response] = await client.executeBatchDml(request); @@ -1012,18 +1034,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteBatchDmlRequest() + new protos.google.spanner.v1.ExecuteBatchDmlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteBatchDmlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.ExecuteBatchDmlResponse() + new protos.google.spanner.v1.ExecuteBatchDmlResponse(), ); client.innerApiCalls.executeBatchDml = stubSimpleCallWithCallback(expectedResponse); @@ -1032,14 +1054,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.v1.IExecuteBatchDmlResponse | null + result?: protos.google.spanner.v1.IExecuteBatchDmlResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1059,20 +1081,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteBatchDmlRequest() + new protos.google.spanner.v1.ExecuteBatchDmlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteBatchDmlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.executeBatchDml = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.executeBatchDml(request), expectedError); const actualRequest = ( @@ -1090,17 +1112,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteBatchDmlRequest() + new protos.google.spanner.v1.ExecuteBatchDmlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteBatchDmlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.executeBatchDml(request), expectedError); }); }); @@ -1111,18 +1135,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ReadRequest() + new protos.google.spanner.v1.ReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.ResultSet() + new protos.google.spanner.v1.ResultSet(), ); client.innerApiCalls.read = stubSimpleCall(expectedResponse); const [response] = await client.read(request); @@ -1141,18 +1165,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ReadRequest() + new protos.google.spanner.v1.ReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.ResultSet() + new protos.google.spanner.v1.ResultSet(), ); client.innerApiCalls.read = stubSimpleCallWithCallback(expectedResponse); const promise = new Promise((resolve, reject) => { @@ -1160,14 +1184,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.v1.IResultSet | null + result?: protos.google.spanner.v1.IResultSet | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1186,13 +1210,13 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ReadRequest() + new protos.google.spanner.v1.ReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; @@ -1213,17 +1237,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ReadRequest() + new protos.google.spanner.v1.ReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.read(request), expectedError); }); }); @@ -1234,18 +1260,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BeginTransactionRequest() + new protos.google.spanner.v1.BeginTransactionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BeginTransactionRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.Transaction() + new protos.google.spanner.v1.Transaction(), ); client.innerApiCalls.beginTransaction = stubSimpleCall(expectedResponse); const [response] = await client.beginTransaction(request); @@ -1265,18 +1291,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BeginTransactionRequest() + new protos.google.spanner.v1.BeginTransactionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BeginTransactionRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.Transaction() + new protos.google.spanner.v1.Transaction(), ); client.innerApiCalls.beginTransaction = stubSimpleCallWithCallback(expectedResponse); @@ -1285,14 +1311,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.v1.ITransaction | null + result?: protos.google.spanner.v1.ITransaction | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1312,20 +1338,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BeginTransactionRequest() + new protos.google.spanner.v1.BeginTransactionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BeginTransactionRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.beginTransaction = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.beginTransaction(request), expectedError); const actualRequest = ( @@ -1343,17 +1369,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BeginTransactionRequest() + new protos.google.spanner.v1.BeginTransactionRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BeginTransactionRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.beginTransaction(request), expectedError); }); }); @@ -1364,24 +1392,24 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.CommitRequest() + new protos.google.spanner.v1.CommitRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.CommitRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.CommitResponse() + new protos.google.spanner.v1.CommitResponse(), ); client.innerApiCalls.commit = stubSimpleCall(expectedResponse); const [response] = await client.commit(request); assert.deepStrictEqual(response, expectedResponse); const actualRequest = (client.innerApiCalls.commit as SinonStub).getCall( - 0 + 0, ).args[0]; assert.deepStrictEqual(actualRequest, request); const actualHeaderRequestParams = ( @@ -1395,18 +1423,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.CommitRequest() + new protos.google.spanner.v1.CommitRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.CommitRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.CommitResponse() + new protos.google.spanner.v1.CommitResponse(), ); client.innerApiCalls.commit = stubSimpleCallWithCallback(expectedResponse); @@ -1415,20 +1443,20 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.v1.ICommitResponse | null + result?: protos.google.spanner.v1.ICommitResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; assert.deepStrictEqual(response, expectedResponse); const actualRequest = (client.innerApiCalls.commit as SinonStub).getCall( - 0 + 0, ).args[0]; assert.deepStrictEqual(actualRequest, request); const actualHeaderRequestParams = ( @@ -1442,13 +1470,13 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.CommitRequest() + new protos.google.spanner.v1.CommitRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.CommitRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; @@ -1456,7 +1484,7 @@ describe('v1.SpannerClient', () => { client.innerApiCalls.commit = stubSimpleCall(undefined, expectedError); await assert.rejects(client.commit(request), expectedError); const actualRequest = (client.innerApiCalls.commit as SinonStub).getCall( - 0 + 0, ).args[0]; assert.deepStrictEqual(actualRequest, request); const actualHeaderRequestParams = ( @@ -1470,17 +1498,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.CommitRequest() + new protos.google.spanner.v1.CommitRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.CommitRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.commit(request), expectedError); }); }); @@ -1491,18 +1521,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.RollbackRequest() + new protos.google.spanner.v1.RollbackRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.RollbackRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.rollback = stubSimpleCall(expectedResponse); const [response] = await client.rollback(request); @@ -1522,18 +1552,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.RollbackRequest() + new protos.google.spanner.v1.RollbackRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.RollbackRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.protobuf.Empty() + new protos.google.protobuf.Empty(), ); client.innerApiCalls.rollback = stubSimpleCallWithCallback(expectedResponse); @@ -1542,14 +1572,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.protobuf.IEmpty | null + result?: protos.google.protobuf.IEmpty | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1569,13 +1599,13 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.RollbackRequest() + new protos.google.spanner.v1.RollbackRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.RollbackRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; @@ -1597,17 +1627,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.RollbackRequest() + new protos.google.spanner.v1.RollbackRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.RollbackRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.rollback(request), expectedError); }); }); @@ -1618,18 +1650,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.PartitionQueryRequest() + new protos.google.spanner.v1.PartitionQueryRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.PartitionQueryRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.PartitionResponse() + new protos.google.spanner.v1.PartitionResponse(), ); client.innerApiCalls.partitionQuery = stubSimpleCall(expectedResponse); const [response] = await client.partitionQuery(request); @@ -1649,18 +1681,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.PartitionQueryRequest() + new protos.google.spanner.v1.PartitionQueryRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.PartitionQueryRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.PartitionResponse() + new protos.google.spanner.v1.PartitionResponse(), ); client.innerApiCalls.partitionQuery = stubSimpleCallWithCallback(expectedResponse); @@ -1669,14 +1701,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.v1.IPartitionResponse | null + result?: protos.google.spanner.v1.IPartitionResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1696,20 +1728,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.PartitionQueryRequest() + new protos.google.spanner.v1.PartitionQueryRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.PartitionQueryRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.partitionQuery = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.partitionQuery(request), expectedError); const actualRequest = ( @@ -1727,17 +1759,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.PartitionQueryRequest() + new protos.google.spanner.v1.PartitionQueryRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.PartitionQueryRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.partitionQuery(request), expectedError); }); }); @@ -1748,18 +1782,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.PartitionReadRequest() + new protos.google.spanner.v1.PartitionReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.PartitionReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.PartitionResponse() + new protos.google.spanner.v1.PartitionResponse(), ); client.innerApiCalls.partitionRead = stubSimpleCall(expectedResponse); const [response] = await client.partitionRead(request); @@ -1779,18 +1813,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.PartitionReadRequest() + new protos.google.spanner.v1.PartitionReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.PartitionReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.PartitionResponse() + new protos.google.spanner.v1.PartitionResponse(), ); client.innerApiCalls.partitionRead = stubSimpleCallWithCallback(expectedResponse); @@ -1799,14 +1833,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.v1.IPartitionResponse | null + result?: protos.google.spanner.v1.IPartitionResponse | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -1826,20 +1860,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.PartitionReadRequest() + new protos.google.spanner.v1.PartitionReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.PartitionReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.partitionRead = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.partitionRead(request), expectedError); const actualRequest = ( @@ -1857,17 +1891,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.PartitionReadRequest() + new protos.google.spanner.v1.PartitionReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.PartitionReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); await assert.rejects(client.partitionRead(request), expectedError); }); }); @@ -1878,18 +1914,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteSqlRequest() + new protos.google.spanner.v1.ExecuteSqlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteSqlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.PartialResultSet() + new protos.google.spanner.v1.PartialResultSet(), ); client.innerApiCalls.executeStreamingSql = stubServerStreamingCall(expectedResponse); @@ -1899,7 +1935,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.PartialResultSet) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -1919,20 +1955,22 @@ describe('v1.SpannerClient', () => { it('invokes executeStreamingSql without error and gaxServerStreamingRetries enabled', async () => { const client = new spannerModule.v1.SpannerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', gaxServerStreamingRetries: true, }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteSqlRequest() + new protos.google.spanner.v1.ExecuteSqlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteSqlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.PartialResultSet() + new protos.google.spanner.v1.PartialResultSet(), ); client.innerApiCalls.executeStreamingSql = stubServerStreamingCall(expectedResponse); @@ -1942,7 +1980,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.PartialResultSet) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -1965,20 +2003,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteSqlRequest() + new protos.google.spanner.v1.ExecuteSqlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteSqlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.executeStreamingSql = stubServerStreamingCall( undefined, - expectedError + expectedError, ); const stream = client.executeStreamingSql(request); const promise = new Promise((resolve, reject) => { @@ -1986,7 +2024,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.PartialResultSet) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -2008,17 +2046,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ExecuteSqlRequest() + new protos.google.spanner.v1.ExecuteSqlRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ExecuteSqlRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); const stream = client.executeStreamingSql(request, { retryRequestOptions: {noResponseRetries: 0}, }); @@ -2027,7 +2067,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.PartialResultSet) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -2049,18 +2089,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ReadRequest() + new protos.google.spanner.v1.ReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.PartialResultSet() + new protos.google.spanner.v1.PartialResultSet(), ); client.innerApiCalls.streamingRead = stubServerStreamingCall(expectedResponse); @@ -2070,7 +2110,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.PartialResultSet) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -2090,20 +2130,22 @@ describe('v1.SpannerClient', () => { it('invokes streamingRead without error and gaxServerStreamingRetries enabled', async () => { const client = new spannerModule.v1.SpannerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', gaxServerStreamingRetries: true, }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ReadRequest() + new protos.google.spanner.v1.ReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.PartialResultSet() + new protos.google.spanner.v1.PartialResultSet(), ); client.innerApiCalls.streamingRead = stubServerStreamingCall(expectedResponse); @@ -2113,7 +2155,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.PartialResultSet) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -2136,20 +2178,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ReadRequest() + new protos.google.spanner.v1.ReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.streamingRead = stubServerStreamingCall( undefined, - expectedError + expectedError, ); const stream = client.streamingRead(request); const promise = new Promise((resolve, reject) => { @@ -2157,7 +2199,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.PartialResultSet) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -2179,17 +2221,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ReadRequest() + new protos.google.spanner.v1.ReadRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ReadRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); const stream = client.streamingRead(request, { retryRequestOptions: {noResponseRetries: 0}, }); @@ -2198,7 +2242,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.PartialResultSet) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -2220,18 +2264,18 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BatchWriteRequest() + new protos.google.spanner.v1.BatchWriteRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BatchWriteRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.BatchWriteResponse() + new protos.google.spanner.v1.BatchWriteResponse(), ); client.innerApiCalls.batchWrite = stubServerStreamingCall(expectedResponse); @@ -2241,7 +2285,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.BatchWriteResponse) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -2261,20 +2305,22 @@ describe('v1.SpannerClient', () => { it('invokes batchWrite without error and gaxServerStreamingRetries enabled', async () => { const client = new spannerModule.v1.SpannerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', gaxServerStreamingRetries: true, }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BatchWriteRequest() + new protos.google.spanner.v1.BatchWriteRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BatchWriteRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedResponse = generateSampleMessage( - new protos.google.spanner.v1.BatchWriteResponse() + new protos.google.spanner.v1.BatchWriteResponse(), ); client.innerApiCalls.batchWrite = stubServerStreamingCall(expectedResponse); @@ -2284,7 +2330,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.BatchWriteResponse) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -2307,20 +2353,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BatchWriteRequest() + new protos.google.spanner.v1.BatchWriteRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BatchWriteRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedHeaderRequestParams = `session=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.batchWrite = stubServerStreamingCall( undefined, - expectedError + expectedError, ); const stream = client.batchWrite(request); const promise = new Promise((resolve, reject) => { @@ -2328,7 +2374,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.BatchWriteResponse) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -2350,17 +2396,19 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.BatchWriteRequest() + new protos.google.spanner.v1.BatchWriteRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.BatchWriteRequest', - ['session'] + ['session'], ); request.session = defaultValue1; const expectedError = new Error('The client has already been closed.'); - client.close(); + client.close().catch(err => { + throw err; + }); const stream = client.batchWrite(request, { retryRequestOptions: {noResponseRetries: 0}, }); @@ -2369,7 +2417,7 @@ describe('v1.SpannerClient', () => { 'data', (response: protos.google.spanner.v1.BatchWriteResponse) => { resolve(response); - } + }, ); stream.on('error', (err: Error) => { reject(err); @@ -2391,13 +2439,13 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ListSessionsRequest() + new protos.google.spanner.v1.ListSessionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ListSessionsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; @@ -2424,13 +2472,13 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ListSessionsRequest() + new protos.google.spanner.v1.ListSessionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ListSessionsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; @@ -2446,14 +2494,14 @@ describe('v1.SpannerClient', () => { request, ( err?: Error | null, - result?: protos.google.spanner.v1.ISession[] | null + result?: protos.google.spanner.v1.ISession[] | null, ) => { if (err) { reject(err); } else { resolve(result); } - } + }, ); }); const response = await promise; @@ -2473,20 +2521,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ListSessionsRequest() + new protos.google.spanner.v1.ListSessionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ListSessionsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.innerApiCalls.listSessions = stubSimpleCall( undefined, - expectedError + expectedError, ); await assert.rejects(client.listSessions(request), expectedError); const actualRequest = ( @@ -2504,13 +2552,13 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ListSessionsRequest() + new protos.google.spanner.v1.ListSessionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ListSessionsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; @@ -2539,14 +2587,14 @@ describe('v1.SpannerClient', () => { assert( (client.descriptors.page.listSessions.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listSessions, request) + .calledWith(client.innerApiCalls.listSessions, request), ); assert( (client.descriptors.page.listSessions.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -2555,20 +2603,20 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ListSessionsRequest() + new protos.google.spanner.v1.ListSessionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ListSessionsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; const expectedError = new Error('expected'); client.descriptors.page.listSessions.createStream = stubPageStreamingCall( undefined, - expectedError + expectedError, ); const stream = client.listSessionsStream(request); const promise = new Promise((resolve, reject) => { @@ -2587,14 +2635,14 @@ describe('v1.SpannerClient', () => { assert( (client.descriptors.page.listSessions.createStream as SinonStub) .getCall(0) - .calledWith(client.innerApiCalls.listSessions, request) + .calledWith(client.innerApiCalls.listSessions, request), ); assert( (client.descriptors.page.listSessions.createStream as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -2603,13 +2651,13 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ListSessionsRequest() + new protos.google.spanner.v1.ListSessionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ListSessionsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; @@ -2630,14 +2678,14 @@ describe('v1.SpannerClient', () => { ( client.descriptors.page.listSessions.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listSessions.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); @@ -2646,13 +2694,13 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); const request = generateSampleMessage( - new protos.google.spanner.v1.ListSessionsRequest() + new protos.google.spanner.v1.ListSessionsRequest(), ); const defaultValue1 = getTypeDefaultValue( '.google.spanner.v1.ListSessionsRequest', - ['database'] + ['database'], ); request.database = defaultValue1; const expectedHeaderRequestParams = `database=${defaultValue1 ?? ''}`; @@ -2670,20 +2718,20 @@ describe('v1.SpannerClient', () => { ( client.descriptors.page.listSessions.asyncIterate as SinonStub ).getCall(0).args[1], - request + request, ); assert( (client.descriptors.page.listSessions.asyncIterate as SinonStub) .getCall(0) .args[2].otherArgs.headers[ 'x-goog-request-params' - ].includes(expectedHeaderRequestParams) + ].includes(expectedHeaderRequestParams), ); }); }); describe('Path templates', () => { - describe('database', () => { + describe('database', async () => { const fakePath = '/rendered/path/database'; const expectedParameters = { project: 'projectValue', @@ -2694,7 +2742,7 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.databasePathTemplate.render = sinon .stub() .returns(fakePath); @@ -2706,13 +2754,13 @@ describe('v1.SpannerClient', () => { const result = client.databasePath( 'projectValue', 'instanceValue', - 'databaseValue' + 'databaseValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.databasePathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -2722,7 +2770,7 @@ describe('v1.SpannerClient', () => { assert( (client.pathTemplates.databasePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -2732,7 +2780,7 @@ describe('v1.SpannerClient', () => { assert( (client.pathTemplates.databasePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -2742,12 +2790,12 @@ describe('v1.SpannerClient', () => { assert( (client.pathTemplates.databasePathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); - describe('session', () => { + describe('session', async () => { const fakePath = '/rendered/path/session'; const expectedParameters = { project: 'projectValue', @@ -2759,7 +2807,7 @@ describe('v1.SpannerClient', () => { credentials: {client_email: 'bogus', private_key: 'bogus'}, projectId: 'bogus', }); - client.initialize(); + await client.initialize(); client.pathTemplates.sessionPathTemplate.render = sinon .stub() .returns(fakePath); @@ -2772,13 +2820,13 @@ describe('v1.SpannerClient', () => { 'projectValue', 'instanceValue', 'databaseValue', - 'sessionValue' + 'sessionValue', ); assert.strictEqual(result, fakePath); assert( (client.pathTemplates.sessionPathTemplate.render as SinonStub) .getCall(-1) - .calledWith(expectedParameters) + .calledWith(expectedParameters), ); }); @@ -2788,7 +2836,7 @@ describe('v1.SpannerClient', () => { assert( (client.pathTemplates.sessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -2798,7 +2846,7 @@ describe('v1.SpannerClient', () => { assert( (client.pathTemplates.sessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -2808,7 +2856,7 @@ describe('v1.SpannerClient', () => { assert( (client.pathTemplates.sessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); @@ -2818,7 +2866,7 @@ describe('v1.SpannerClient', () => { assert( (client.pathTemplates.sessionPathTemplate.match as SinonStub) .getCall(-1) - .calledWith(fakePath) + .calledWith(fakePath), ); }); }); diff --git a/test/index.ts b/test/index.ts index 9b2c8286a..4be0abfaf 100644 --- a/test/index.ts +++ b/test/index.ts @@ -233,7 +233,7 @@ describe('Spanner', () => { it('should create an auth instance from google-auth-library', () => { assert.deepStrictEqual( getFake(spanner.auth).calledWith_[0], - EXPECTED_OPTIONS + EXPECTED_OPTIONS, ); }); @@ -251,7 +251,7 @@ describe('Spanner', () => { assert.deepStrictEqual( getFake(spanner.auth).calledWith_[0], - expectedOptions + expectedOptions, ); }); @@ -265,11 +265,11 @@ describe('Spanner', () => { const expectedOptions = Object.assign( {}, EXPECTED_OPTIONS, - keepaliveOptions + keepaliveOptions, ); assert.deepStrictEqual( getFake(spanner.auth).calledWith_[0], - expectedOptions + expectedOptions, ); }); @@ -339,14 +339,14 @@ describe('Spanner', () => { const spanner = new Spanner(fakeDefaultTxnOptions); assert.strictEqual( spanner.defaultTransactionOptions, - fakeDefaultTxnOptions.defaultTransactionOptions + fakeDefaultTxnOptions.defaultTransactionOptions, ); }); it('should set projectFormattedName_', () => { assert.strictEqual( spanner.projectFormattedName_, - `projects/${spanner.projectId}` + `projects/${spanner.projectId}`, ); }); @@ -400,7 +400,7 @@ describe('Spanner', () => { } catch (e) { assert.strictEqual( (e as Error).message, - 'SPANNER_EMULATOR_HOST must not start with a protocol specification (http/https)' + 'SPANNER_EMULATOR_HOST must not start with a protocol specification (http/https)', ); } }); @@ -414,7 +414,7 @@ describe('Spanner', () => { } catch (e) { assert.strictEqual( (e as Error).message, - 'Invalid port number: not_a_port' + 'Invalid port number: not_a_port', ); } }); @@ -755,13 +755,13 @@ describe('Spanner', () => { it('should throw if a name is not provided', () => { assert.throws(() => { - spanner.createInstance(null!, {} as CreateInstanceRequest); + void spanner.createInstance(null!, {} as CreateInstanceRequest); }, /A name is required to create an instance\./); }); it('should throw if a config object is not provided', () => { assert.throws(() => { - spanner.createInstance(NAME, null!); + void spanner.createInstance(NAME, null!); }, /A configuration object is required to create an instance\./); }); @@ -836,7 +836,7 @@ describe('Spanner', () => { spanner.request = config => { assert.strictEqual( config.reqOpts.instance.processingUnits, - processingUnits + processingUnits, ); assert.strictEqual(config.reqOpts.instance.nodeCount, undefined); done(); @@ -851,7 +851,7 @@ describe('Spanner', () => { const config = Object.assign({}, CONFIG, {nodeCount, processingUnits}); assert.throws(() => { - spanner.createInstance(NAME, config); + void spanner.createInstance(NAME, config); }, /Only one of nodeCount or processingUnits can be specified\./); }); @@ -887,7 +887,7 @@ describe('Spanner', () => { const reqOpts = config_.reqOpts; assert.strictEqual( reqOpts.instance.config, - 'projects/' + spanner.projectId + '/instanceConfigs/' + name + 'projects/' + spanner.projectId + '/instanceConfigs/' + name, ); done(); }; @@ -990,7 +990,7 @@ describe('Spanner', () => { { parent: 'projects/' + spanner.projectId, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); const expectedGaxOpts = {timeout: 1000}; @@ -1025,7 +1025,7 @@ describe('Spanner', () => { { parent: 'projects/' + spanner.projectId, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); const expectedGaxOpts = {timeout: 1000}; @@ -1128,7 +1128,7 @@ describe('Spanner', () => { const EXPECTEDNEXTQUERY = extend( {}, GETINSTANCESOPTIONS, - NEXTPAGEREQUEST + NEXTPAGEREQUEST, ); spanner.request = (config, callback) => { callback(...GAX_RESPONSE_ARGS); @@ -1181,7 +1181,7 @@ describe('Spanner', () => { { parent: 'projects/' + spanner.projectId, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); const expectedGaxOpts = {timeout: 1000}; @@ -1216,7 +1216,7 @@ describe('Spanner', () => { { parent: 'projects/' + spanner.projectId, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); const expectedGaxOpts = {timeout: 1000}; @@ -1267,13 +1267,16 @@ describe('Spanner', () => { it('should throw if a name is not provided', () => { assert.throws(() => { - spanner.createInstanceConfig(null!, {} as CreateInstanceConfigRequest); + void spanner.createInstanceConfig( + null!, + {} as CreateInstanceConfigRequest, + ); }, /A name is required to create an instance config\./); }); it('should throw if a config object is not provided', () => { assert.throws(() => { - spanner.createInstanceConfig(NAME, null!); + void spanner.createInstanceConfig(NAME, null!); }, /A configuration object is required to create an instance config\./); }); @@ -1281,7 +1284,7 @@ describe('Spanner', () => { // eslint-disable-next-line @typescript-eslint/no-unused-vars const {baseConfig, ...CONFIG_WITHOUT_BASE_CONFIG} = ORIGINAL_CONFIG; assert.throws(() => { - spanner.createInstanceConfig(NAME, CONFIG_WITHOUT_BASE_CONFIG!); + void spanner.createInstanceConfig(NAME, CONFIG_WITHOUT_BASE_CONFIG!); }, /Base instance config is required to create an instance config\./); }); @@ -1333,7 +1336,7 @@ describe('Spanner', () => { spanner.request = config => { assert.strictEqual( config.reqOpts.instanceConfig.displayName, - displayName + displayName, ); done(); }; @@ -1370,7 +1373,7 @@ describe('Spanner', () => { assert.strictEqual(op, null); assert.strictEqual(resp, API_RESPONSE); done(); - } + }, ); }); }); @@ -1404,7 +1407,7 @@ describe('Spanner', () => { assert.strictEqual(op, OPERATION); assert.strictEqual(resp, API_RESPONSE); done(); - } + }, ); }); }); @@ -1459,7 +1462,7 @@ describe('Spanner', () => { { parent: 'projects/' + spanner.projectId, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); const expectedGaxOpts = {timeout: 1000}; @@ -1494,7 +1497,7 @@ describe('Spanner', () => { { parent: 'projects/' + spanner.projectId, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); const expectedGaxOpts = {timeout: 1000}; @@ -1544,7 +1547,7 @@ describe('Spanner', () => { const EXPECTEDNEXTQUERY = extend( {}, GETINSTANCECONFIGSOPTIONS, - NEXTPAGEREQUEST + NEXTPAGEREQUEST, ); spanner.request = (config, callback) => { callback(...RESPONSE); @@ -1600,7 +1603,7 @@ describe('Spanner', () => { { parent: spanner.projectFormattedName_, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); spanner.request = config => { @@ -1634,7 +1637,7 @@ describe('Spanner', () => { { parent: spanner.projectFormattedName_, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); spanner.request = config => { @@ -1681,7 +1684,7 @@ describe('Spanner', () => { const EXPECTED_NEXT_QUERY = extend( {}, GET_INSTANCE_CONFIGS_OPERATIONS_OPTIONS, - NEXT_PAGE_REQUEST + NEXT_PAGE_REQUEST, ); spanner.request = (config, callback) => { callback(...RESPONSE); @@ -1692,7 +1695,7 @@ describe('Spanner', () => { } spanner.getInstanceConfigOperations( GET_INSTANCE_CONFIGS_OPERATIONS_OPTIONS, - callback + callback, ); }); }); @@ -1729,7 +1732,7 @@ describe('Spanner', () => { }; const returnedValue = spanner.getInstanceConfigsStream( - OPTIONS as GetInstanceConfigsOptions + OPTIONS as GetInstanceConfigsOptions, ); assert.strictEqual(returnedValue, returnValue); }); @@ -1767,7 +1770,7 @@ describe('Spanner', () => { { parent: 'projects/' + spanner.projectId, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); delete expectedReqOpts.gaxOptions; const expectedGaxOpts = {timeout: 1000}; @@ -1799,7 +1802,7 @@ describe('Spanner', () => { pageSize: optionsPageSize, pageToken: optionsPageToken, gaxOptions, - } + }, ); const expectedReqOpts: {gaxOptions?: {}} = extend( {}, @@ -1807,7 +1810,7 @@ describe('Spanner', () => { { parent: 'projects/' + spanner.projectId, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); delete expectedReqOpts.gaxOptions; const expectedGaxOpts = {timeout: 1000}; @@ -1862,7 +1865,7 @@ describe('Spanner', () => { const returnedValue = spanner.getInstanceConfig( 'nam1', options, - callback + callback, ); assert.strictEqual(returnedValue, returnValue); }); diff --git a/test/instance-config.ts b/test/instance-config.ts index 3faf7d992..eb5db6863 100644 --- a/test/instance-config.ts +++ b/test/instance-config.ts @@ -147,7 +147,7 @@ describe('InstanceConfig', () => { it('should return the name if already formatted', () => { assert.strictEqual( InstanceConfig.formatName_(SPANNER.projectId, PATH), - PATH + PATH, ); }); @@ -322,7 +322,7 @@ describe('InstanceConfig', () => { {}, extend({}, METADATA, { name: instanceConfig.formattedName_, - }) + }), ) as instConfig.IInstanceConfig as instConfig.SetInstanceConfigMetadataRequest; assert.deepStrictEqual(config.reqOpts.instanceConfig, expectedReqOpts); @@ -341,7 +341,7 @@ describe('InstanceConfig', () => { const returnValue = instanceConfig.setMetadata( Object.assign({}, {instanceConfig: METADATA}), - callback + callback, ); assert.strictEqual(returnValue, requestReturnValue); }); @@ -354,14 +354,14 @@ describe('InstanceConfig', () => { }; instanceConfig.setMetadata( Object.assign({}, {instanceConfig: METADATA}, {gaxOpts: gaxOptions}), - assert.ifError + assert.ifError, ); }); it('should not require a callback', () => { - assert.doesNotThrow(() => { - instanceConfig.setMetadata( - Object.assign({}, {instanceConfig: METADATA}) + assert.doesNotThrow(async () => { + await instanceConfig.setMetadata( + Object.assign({}, {instanceConfig: METADATA}), ); }); }); diff --git a/test/instance.ts b/test/instance.ts index 7d6462dfb..bf287b5bf 100644 --- a/test/instance.ts +++ b/test/instance.ts @@ -29,7 +29,7 @@ import {Duplex} from 'stream'; import * as inst from '../src/instance'; import {Spanner, Database, RequestConfig} from '../src'; -import arrify = require('arrify'); +import {toArray} from '../src/helper'; import {SessionPoolOptions} from '../src/session-pool'; import {Backup} from '../src/backup'; import {PreciseDate} from '@google-cloud/precise-date'; @@ -205,7 +205,7 @@ describe('Instance', () => { it('should throw if a name is not provided', () => { assert.throws(() => { - instance.createDatabase(null!); + void instance.createDatabase(null!); }, /A name is required to create a database\./); }); @@ -235,7 +235,7 @@ describe('Instance', () => { parent: instance.formattedName_, createStatement: 'CREATE DATABASE `' + NAME + '`', }, - OPTIONS + OPTIONS, ); assert.deepStrictEqual(config.reqOpts, expectedReqOpts); @@ -285,7 +285,7 @@ describe('Instance', () => { parent: instance.formattedName_, createStatement: 'CREATE DATABASE `' + NAME + '`', }, - OPTIONS + OPTIONS, ); assert.deepStrictEqual(config.reqOpts, expectedReqOpts); @@ -440,7 +440,7 @@ describe('Instance', () => { const database = instance.database( NAME, - poolOptions + poolOptions, ) as {} as FakeDatabase; assert(database instanceof FakeDatabase); @@ -480,32 +480,32 @@ describe('Instance', () => { NAME + '/' + JSON.stringify(Object.entries(fakeSessionPoolOptions).sort()), - fakeDatabaseWithSessionPoolOptions + fakeDatabaseWithSessionPoolOptions, ); const database = instance.database(NAME); const databaseWithEmptyOptions = instance.database( NAME, - emptySessionPoolOptions + emptySessionPoolOptions, ); const databaseWithOptions = instance.database( NAME, - fakeSessionPoolOptions + fakeSessionPoolOptions, ); const databaseWithOptionsInOtherOrder = instance.database( NAME, - fakeSessionPoolOptionsInOtherOrder + fakeSessionPoolOptionsInOtherOrder, ); assert.strictEqual(database, fakeDatabase); assert.strictEqual(databaseWithEmptyOptions, fakeDatabase); assert.strictEqual( databaseWithOptions, - fakeDatabaseWithSessionPoolOptions + fakeDatabaseWithSessionPoolOptions, ); assert.strictEqual( databaseWithOptionsInOtherOrder, - fakeDatabaseWithSessionPoolOptions + fakeDatabaseWithSessionPoolOptions, ); }); }); @@ -605,11 +605,11 @@ describe('Instance', () => { opts_: | inst.GetInstanceMetadataOptions | inst.GetInstanceMetadataCallback, - cb + cb, ) => { cb = typeof opts_ === 'function' ? opts_ : cb; cb(error as grpc.ServiceError); - } + }, ); instance.exists((err, exists) => { @@ -627,11 +627,11 @@ describe('Instance', () => { opts_: | inst.GetInstanceMetadataOptions | inst.GetInstanceMetadataCallback, - cb + cb, ) => { cb = typeof opts_ === 'function' ? opts_ : cb; cb(null); - } + }, ); instance.exists((err, exists) => { @@ -651,12 +651,12 @@ describe('Instance', () => { opts_: | inst.GetInstanceMetadataOptions | inst.GetInstanceMetadataCallback, - callback + callback, ) => { callback = typeof opts_ === 'function' ? opts_ : callback; callback(error as grpc.ServiceError); - } + }, ); instance.exists((err, exists) => { @@ -768,7 +768,7 @@ describe('Instance', () => { labels, fieldNames: 'labels', }, - assert.ifError + assert.ifError, ); }); @@ -949,7 +949,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); delete expectedReqOpts.gaxOptions; @@ -983,7 +983,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); delete expectedReqOpts.gaxOptions; @@ -1086,7 +1086,7 @@ describe('Instance', () => { const EXPECTEDNEXTQUERY = extend( {}, GETDATABASESOPTIONS, - NEXTPAGEREQUEST + NEXTPAGEREQUEST, ); instance.request = (config, callback) => { callback(...REQUEST_RESPONSE_ARGS); @@ -1140,7 +1140,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); instance.requestStream = config => { @@ -1175,7 +1175,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); delete expectedReqOpts.gaxOptions; @@ -1235,7 +1235,7 @@ describe('Instance', () => { instance.request = config => { assert.deepStrictEqual(config.reqOpts, { fieldMask: { - paths: arrify(fieldNames).map(snakeCase), + paths: toArray(fieldNames).map(snakeCase), }, name: instance.formattedName_, }); @@ -1338,8 +1338,8 @@ describe('Instance', () => { }); it('should not require a callback', () => { - assert.doesNotThrow(() => { - instance.setMetadata(METADATA); + assert.doesNotThrow(async () => { + await instance.setMetadata(METADATA); }); }); }); @@ -1387,7 +1387,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); instance.request = config => { @@ -1420,7 +1420,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); instance.request = config => { @@ -1521,7 +1521,7 @@ describe('Instance', () => { const EXPECTEDNEXTQUERY = extend( {}, GETBACKUPSOPTIONS, - NEXTPAGEREQUEST + NEXTPAGEREQUEST, ); instance.request = (config, callback) => { callback(...REQUEST_RESPONSE_ARGS); @@ -1575,7 +1575,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); instance.requestStream = config => { @@ -1610,7 +1610,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); delete expectedReqOpts.gaxOptions; @@ -1703,7 +1703,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); instance.request = config => { @@ -1737,7 +1737,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); instance.request = config => { @@ -1784,7 +1784,7 @@ describe('Instance', () => { const EXPECTEDNEXTQUERY = extend( {}, GETBACKUPOPSOPTIONS, - NEXTPAGEREQUEST + NEXTPAGEREQUEST, ); instance.request = (config, callback) => { callback(...RESPONSE); @@ -1841,7 +1841,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken} + {pageSize: gaxOptions.pageSize, pageToken: gaxOptions.pageToken}, ); instance.request = config => { @@ -1875,7 +1875,7 @@ describe('Instance', () => { { parent: instance.formattedName_, }, - {pageSize: optionsPageSize, pageToken: optionsPageToken} + {pageSize: optionsPageSize, pageToken: optionsPageToken}, ); instance.request = config => { @@ -1922,7 +1922,7 @@ describe('Instance', () => { const EXPECTEDNEXTQUERY = extend( {}, GETDATABASEOPSOPTIONS, - NEXTPAGEREQUEST + NEXTPAGEREQUEST, ); instance.request = (config, callback) => { callback(...RESPONSE); diff --git a/test/mockserver/mockdatabaseadmin.ts b/test/mockserver/mockdatabaseadmin.ts index b44b329ea..7b2e5954c 100644 --- a/test/mockserver/mockdatabaseadmin.ts +++ b/test/mockserver/mockdatabaseadmin.ts @@ -31,7 +31,7 @@ const PROTO_DIR = const GAX_PROTO_DIR = path.join( path.dirname(require.resolve('google-gax')), '..', - 'protos' + 'protos', ); /** @@ -74,7 +74,7 @@ export class MockDatabaseAdmin { value: v1.CreateDatabaseMetadata.encode( v1.CreateDatabaseMetadata.create({ database: MockDatabaseAdmin.TEST_DATABASE.name, - }) + }), ).finish(), }), }); @@ -89,7 +89,7 @@ export class MockDatabaseAdmin { value: v1.CreateDatabaseMetadata.encode( v1.CreateDatabaseMetadata.create({ database: MockDatabaseAdmin.PROD_DATABASE.name, - }) + }), ).finish(), }), }); @@ -122,7 +122,7 @@ export class MockDatabaseAdmin { v1.ListDatabasesRequest, v1.ListDatabasesResponse >, - callback: v1.DatabaseAdmin.ListDatabasesCallback + callback: v1.DatabaseAdmin.ListDatabasesCallback, ) { callback( null, @@ -131,7 +131,7 @@ export class MockDatabaseAdmin { MockDatabaseAdmin.TEST_DATABASE, MockDatabaseAdmin.PROD_DATABASE, ], - }) + }), ); } @@ -140,7 +140,7 @@ export class MockDatabaseAdmin { v1.ListDatabaseOperationsRequest, v1.ListDatabaseOperationsResponse >, - callback: v1.DatabaseAdmin.ListDatabaseOperationsCallback + callback: v1.DatabaseAdmin.ListDatabaseOperationsCallback, ) { callback( null, @@ -149,13 +149,13 @@ export class MockDatabaseAdmin { MockDatabaseAdmin.CREATE_TEST_DATABASE_OPERATION, MockDatabaseAdmin.CREATE_PROD_DATABASE_OPERATION, ], - }) + }), ); } createDatabase( call: grpc.ServerUnaryCall, - callback: v1.DatabaseAdmin.CreateDatabaseCallback + callback: v1.DatabaseAdmin.CreateDatabaseCallback, ) { let name = call.request!.createStatement.replace('CREATE DATABASE ', ''); name = name.substring(1, name.length - 1); @@ -166,7 +166,7 @@ export class MockDatabaseAdmin { const metadataBuffer = v1.CreateDatabaseMetadata.encode( v1.CreateDatabaseMetadata.create({ database: database.name, - }) + }), ).finish(); const databaseBuffer = v1.Database.encode(database).finish(); callback( @@ -180,60 +180,60 @@ export class MockDatabaseAdmin { response: Any.create({ value: databaseBuffer, }), - }) + }), ); } getDatabase( call: grpc.ServerUnaryCall, - callback: v1.DatabaseAdmin.GetDatabaseCallback + callback: v1.DatabaseAdmin.GetDatabaseCallback, ) { callback(createUnimplementedError('GetDatabase is not yet implemented')); } updateDatabaseDdl( call: grpc.ServerUnaryCall, - callback: v1.DatabaseAdmin.UpdateDatabaseDdlCallback + callback: v1.DatabaseAdmin.UpdateDatabaseDdlCallback, ) { callback( - createUnimplementedError('UpdateDatabaseDdl is not yet implemented') + createUnimplementedError('UpdateDatabaseDdl is not yet implemented'), ); } dropDatabase( call: grpc.ServerUnaryCall, - callback: v1.DatabaseAdmin.DropDatabaseCallback + callback: v1.DatabaseAdmin.DropDatabaseCallback, ) { callback(createUnimplementedError('DropDatabase is not yet implemented')); } getDatabaseDdl( call: grpc.ServerUnaryCall, - callback: v1.DatabaseAdmin.GetDatabaseDdlCallback + callback: v1.DatabaseAdmin.GetDatabaseDdlCallback, ) { callback(createUnimplementedError('GetDatabaseDdl is not yet implemented')); } setIamPolicy( call: grpc.ServerUnaryCall, - callback: iam.IAMPolicy.SetIamPolicyCallback + callback: iam.IAMPolicy.SetIamPolicyCallback, ) { callback(createUnimplementedError('SetIamPolicy is not yet implemented')); } getIamPolicy( call: grpc.ServerUnaryCall, - callback: iam.IAMPolicy.GetIamPolicyCallback + callback: iam.IAMPolicy.GetIamPolicyCallback, ) { callback(createUnimplementedError('GetIamPolicy is not yet implemented')); } testIamPermissions( call: grpc.ServerUnaryCall, - callback: iam.IAMPolicy.TestIamPermissionsCallback + callback: iam.IAMPolicy.TestIamPermissionsCallback, ) { callback( - createUnimplementedError('TestIamPermissions is not yet implemented') + createUnimplementedError('TestIamPermissions is not yet implemented'), ); } } @@ -243,7 +243,7 @@ export class MockDatabaseAdmin { * 1. Two Databases: 'projects/mock-project/instances/test/databases/test' and 'projects/mock-project/instances/prod/databases/prod'. */ export function createMockDatabaseAdmin( - server: grpc.Server + server: grpc.Server, ): MockDatabaseAdmin { const mock = MockDatabaseAdmin.create(); server.addService(databaseAdminProtoDescriptor.DatabaseAdmin.service, { diff --git a/test/mockserver/mockinstanceadmin.ts b/test/mockserver/mockinstanceadmin.ts index aa9f2616c..c773f7da4 100644 --- a/test/mockserver/mockinstanceadmin.ts +++ b/test/mockserver/mockinstanceadmin.ts @@ -32,7 +32,7 @@ const PROTO_DIR = const GAX_PROTO_DIR = path.join( path.dirname(require.resolve('google-gax')), '..', - 'protos' + 'protos', ); /** @@ -113,27 +113,27 @@ export class MockInstanceAdmin { v1.ListInstanceConfigsRequest, v1.ListInstanceConfigsResponse >, - callback: v1.InstanceAdmin.ListInstanceConfigsCallback + callback: v1.InstanceAdmin.ListInstanceConfigsCallback, ) { callback( null, v1.ListInstanceConfigsResponse.create({ instanceConfigs: [MockInstanceAdmin.TEST_INSTANCE_CONFIG], - }) + }), ); } getInstanceConfig( call: grpc.ServerUnaryCall, - callback: v1.InstanceAdmin.GetInstanceConfigCallback + callback: v1.InstanceAdmin.GetInstanceConfigCallback, ) { if (call.request!.name === TEST_INSTANCE_CONFIG_NAME) { callback(null, MockInstanceAdmin.TEST_INSTANCE_CONFIG); } else { callback( MockInstanceAdmin.createNotFoundError( - `InstanceConfig not found: ${call.request!.name}` - ) + `InstanceConfig not found: ${call.request!.name}`, + ), ); } } @@ -143,13 +143,13 @@ export class MockInstanceAdmin { v1.ListInstancesRequest, v1.ListInstancesResponse >, - callback: v1.InstanceAdmin.ListInstancesCallback + callback: v1.InstanceAdmin.ListInstancesCallback, ) { let instances: google.spanner.admin.instance.v1.IInstance[] = []; if ( !call.request!.filter || call.request!.filter.includes( - `name:${MockInstanceAdmin.TEST_INSTANCE.name}` + `name:${MockInstanceAdmin.TEST_INSTANCE.name}`, ) ) { instances.push(MockInstanceAdmin.TEST_INSTANCE); @@ -157,7 +157,7 @@ export class MockInstanceAdmin { if ( !call.request!.filter || call.request!.filter.includes( - `name:${MockInstanceAdmin.PROD_INSTANCE.name}` + `name:${MockInstanceAdmin.PROD_INSTANCE.name}`, ) ) { instances.push(MockInstanceAdmin.PROD_INSTANCE); @@ -173,13 +173,13 @@ export class MockInstanceAdmin { null, v1.ListInstancesResponse.create({ instances, - }) + }), ); } getInstance( call: grpc.ServerUnaryCall, - callback: v1.InstanceAdmin.GetInstanceCallback + callback: v1.InstanceAdmin.GetInstanceCallback, ) { if (call.request!.name === TEST_INSTANCE_NAME) { callback(null, MockInstanceAdmin.TEST_INSTANCE); @@ -188,15 +188,15 @@ export class MockInstanceAdmin { } else { callback( MockInstanceAdmin.createNotFoundError( - `Instance not found: ${call.request!.name}` - ) + `Instance not found: ${call.request!.name}`, + ), ); } } createInstance( call: grpc.ServerUnaryCall, - callback: v1.InstanceAdmin.CreateInstanceCallback + callback: v1.InstanceAdmin.CreateInstanceCallback, ) { const instance = v1.Instance.create({ name: `${call.request!.parent}/instances/${call.request!.instanceId}`, @@ -222,7 +222,7 @@ export class MockInstanceAdmin { instance, startTime: now(), endTime: now(), - }) + }), ).finish(); const instanceBuffer = v1.Instance.encode(instance).finish(); callback( @@ -236,13 +236,13 @@ export class MockInstanceAdmin { response: Any.create({ value: instanceBuffer, }), - }) + }), ); } updateInstance( call: grpc.ServerUnaryCall, - callback: v1.InstanceAdmin.UpdateInstanceCallback + callback: v1.InstanceAdmin.UpdateInstanceCallback, ) { if (call.request!.instance) { if ( @@ -254,7 +254,7 @@ export class MockInstanceAdmin { instance: call.request!.instance, startTime: now(), endTime: now(), - }) + }), ).finish(); callback( null, @@ -267,28 +267,28 @@ export class MockInstanceAdmin { response: Any.create({ value: v1.Instance.encode(call.request!.instance).finish(), }), - }) + }), ); } else { callback( MockInstanceAdmin.createNotFoundError( - `Instance not found: ${call.request!.instance.name}` - ) + `Instance not found: ${call.request!.instance.name}`, + ), ); } } else { callback( MockInstanceAdmin.createServiceError( 'Missing instance in UpdateInstance request', - grpc.status.INVALID_ARGUMENT - ) + grpc.status.INVALID_ARGUMENT, + ), ); } } deleteInstance( call: grpc.ServerUnaryCall, - callback: v1.InstanceAdmin.DeleteInstanceCallback + callback: v1.InstanceAdmin.DeleteInstanceCallback, ) { if ( call.request!.name === PROD_INSTANCE_NAME || @@ -298,32 +298,32 @@ export class MockInstanceAdmin { } else { callback( MockInstanceAdmin.createNotFoundError( - `Instance not found: ${call.request!.name}` - ) + `Instance not found: ${call.request!.name}`, + ), ); } } setIamPolicy( call: grpc.ServerUnaryCall, - callback: iam.IAMPolicy.SetIamPolicyCallback + callback: iam.IAMPolicy.SetIamPolicyCallback, ) { callback(createUnimplementedError('SetIamPolicy is not yet implemented')); } getIamPolicy( call: grpc.ServerUnaryCall, - callback: iam.IAMPolicy.GetIamPolicyCallback + callback: iam.IAMPolicy.GetIamPolicyCallback, ) { callback(createUnimplementedError('GetIamPolicy is not yet implemented')); } testIamPermissions( call: grpc.ServerUnaryCall, - callback: iam.IAMPolicy.TestIamPermissionsCallback + callback: iam.IAMPolicy.TestIamPermissionsCallback, ) { callback( - createUnimplementedError('TestIamPermissions is not yet implemented') + createUnimplementedError('TestIamPermissions is not yet implemented'), ); } } @@ -334,7 +334,7 @@ export class MockInstanceAdmin { * 2. Two Instances: 'projects/mock-project/instances/test' and 'projects/mock-project/instances/prod'. */ export function createMockInstanceAdmin( - server: grpc.Server + server: grpc.Server, ): MockInstanceAdmin { const mock = MockInstanceAdmin.create(); server.addService(instanceAdminProtoDescriptor.InstanceAdmin.service, { diff --git a/test/mockserver/mockspanner.ts b/test/mockserver/mockspanner.ts index 3033d8be1..d27e2e60c 100644 --- a/test/mockserver/mockspanner.ts +++ b/test/mockserver/mockspanner.ts @@ -30,6 +30,7 @@ import Status = google.rpc.Status; import Any = google.protobuf.Any; import QueryMode = google.spanner.v1.ExecuteSqlRequest.QueryMode; import NullValue = google.protobuf.NullValue; +import {ExecuteSqlRequest, ReadRequest} from '../../src/transaction'; const PROTO_PATH = 'spanner.proto'; const IMPORT_PATH = __dirname + '/../../../protos'; @@ -37,7 +38,7 @@ const PROTO_DIR = __dirname + '/../../../protos/google/spanner/v1'; const GAX_PROTO_DIR = path.join( path.dirname(require.resolve('google-gax')), '..', - 'protos' + 'protos', ); /** @@ -56,6 +57,17 @@ const spannerProtoDescriptor = protoDescriptor['google']['spanner']['v1']; const RETRY_INFO_BIN = 'google.rpc.retryinfo-bin'; const RETRY_INFO_TYPE = 'type.googleapis.com/google.rpc.retryinfo'; +/** + * Specifies the type of result that a mock server should return for a `ReadRequest`. + * + * `ERROR`: Simulates an error response from the server. + * `RESULT_SET`: Simulates a successful response with a result set. + */ +enum ReadRequestResultType { + ERROR, + RESULT_SET, +} + /** * The type of result for an SQL statement that the mock server should return. */ @@ -65,6 +77,87 @@ enum StatementResultType { UPDATE_COUNT, } +/** + * Represents the result of executing a `ReadRequest` on a mock Spanner server. + */ +export class ReadRequestResult { + private readonly _type: ReadRequestResultType; + + /** + * The type of result this instance represents. + */ + get type(): ReadRequestResultType { + return this._type; + } + + private readonly _error: Error | null; + + /** + * The error associated with the result, if any. + * + * @throws If the result type is not `ERROR`. + */ + get error(): Error { + if (this._error) { + return this._error; + } + throw new Error('The ReadRequestResult does not contain an Error'); + } + + private readonly _resultSet: + | protobuf.ResultSet + | protobuf.PartialResultSet[] + | null; + + /** + * The result set associated with the result, if any. + * + * Can be a full `ResultSet` or a stream of `PartialResultSet`s. + * + * @throws If the result type is not `RESULT_SET`. + */ + get resultSet(): protobuf.ResultSet | protobuf.PartialResultSet[] { + if (this._resultSet) { + return this._resultSet; + } + throw new Error('The ReadRequestResult does not contain a ResultSet'); + } + + private constructor( + type: ReadRequestResultType, + error: Error | null, + resultSet: protobuf.ResultSet | protobuf.PartialResultSet[] | null, + ) { + this._type = type; + this._error = error; + this._resultSet = resultSet; + } + + /** + * Creates a `ReadRequestResult` that simulates an error response. + * + * @param error The error to return for the read request. + * @returns A `ReadRequestResult` instance representing an error. + */ + static error(error: Error): ReadRequestResult { + return new ReadRequestResult(ReadRequestResultType.ERROR, error, null); + } + + /** + * Create a ReadRequestResult that will return a ResultSet or a stream of PartialResultSets. + * @param resultSet The result set to return. + */ + static resultSet( + resultSet: protobuf.ResultSet | protobuf.PartialResultSet[], + ): ReadRequestResult { + return new ReadRequestResult( + ReadRequestResultType.RESULT_SET, + null, + resultSet, + ); + } +} + /** * StatementResult contains the result for an SQL statement on the mock server. */ @@ -102,7 +195,7 @@ export class StatementResult { type: StatementResultType, error: Error | null, resultSet: protobuf.ResultSet | protobuf.PartialResultSet[] | null, - updateCount: number | null + updateCount: number | null, ) { this._type = type; this._error = error; @@ -123,13 +216,13 @@ export class StatementResult { * @param resultSet The result set to return. */ static resultSet( - resultSet: protobuf.ResultSet | protobuf.PartialResultSet[] + resultSet: protobuf.ResultSet | protobuf.PartialResultSet[], ): StatementResult { return new StatementResult( StatementResultType.RESULT_SET, null, resultSet, - null + null, ); } @@ -143,7 +236,7 @@ export class StatementResult { StatementResultType.UPDATE_COUNT, error || null, null, - updateCount + updateCount, ); } } @@ -245,6 +338,10 @@ export class MockSpanner { string, StatementResult >(); + private readRequestResults: Map = new Map< + string, + ReadRequestResult + >(); private executionTimes: Map = new Map< string, SimulatedExecutionTime @@ -252,6 +349,7 @@ export class MockSpanner { private constructor() { this.putStatementResult = this.putStatementResult.bind(this); + this.putReadRequestResult = this.putReadRequestResult.bind(this); this.batchCreateSessions = this.batchCreateSessions.bind(this); this.createSession = this.createSession.bind(this); this.deleteSession = this.deleteSession.bind(this); @@ -264,9 +362,11 @@ export class MockSpanner { this.executeBatchDml = this.executeBatchDml.bind(this); this.executeStreamingSql = this.executeStreamingSql.bind(this); + this.partitionQuery = this.partitionQuery.bind(this); this.read = this.read.bind(this); this.streamingRead = this.streamingRead.bind(this); + this.partitionRead = this.partitionRead.bind(this); } /** @@ -295,6 +395,21 @@ export class MockSpanner { return this.metadata; } + /** + * Register the expected result for a given `ReadRequest` on the mock server. + * + * @param query The `ReadRequest` to associate with the result. + * @param result The `ReadRequestResult` to return when the `query` is received. + */ + putReadRequestResult(query: ReadRequest, result: ReadRequestResult) { + const keySet = JSON.stringify( + query.keySet ?? {}, + Object.keys(query.keySet ?? {}).sort(), + ); + const key = `${query.table}|${keySet}`; + this.readRequestResults.set(key, result); + } + /** * Registers a result for an SQL statement on the server. * @param sql The SQL statement that should return the result. @@ -344,7 +459,7 @@ export class MockSpanner { */ private newSession( database: string, - multiplexed?: boolean + multiplexed?: boolean, ): protobuf.Session { const id = this.sessionCounter++; const name = `${database}/sessions/${id}`; @@ -365,7 +480,7 @@ export class MockSpanner { } private static createTransactionNotFoundError( - name: string + name: string, ): grpc.ServiceError { const error = new Error(`Transaction not found: ${name}`); return Object.assign(error, { @@ -374,7 +489,7 @@ export class MockSpanner { } private static createTransactionAbortedError( - name: string + name: string, ): grpc.ServiceError { const error = Object.assign(new Error(`Transaction aborted: ${name}`), { code: grpc.status.ABORTED, @@ -421,7 +536,7 @@ export class MockSpanner { private shiftStreamError( functionName: string, - index: number + index: number, ): MockError | undefined { const execTime = this.executionTimes.get(functionName); if (execTime) { @@ -446,7 +561,7 @@ export class MockSpanner { protobuf.BatchCreateSessionsRequest, protobuf.BatchCreateSessionsResponse >, - callback: protobuf.Spanner.BatchCreateSessionsCallback + callback: protobuf.Spanner.BatchCreateSessionsCallback, ) { this.pushRequest(call.request!, call.metadata); this.simulateExecutionTime(this.batchCreateSessions.name) @@ -457,7 +572,7 @@ export class MockSpanner { } callback( null, - protobuf.BatchCreateSessionsResponse.create({session: sessions}) + protobuf.BatchCreateSessionsResponse.create({session: sessions}), ); }) .catch(err => { @@ -467,7 +582,7 @@ export class MockSpanner { createSession( call: grpc.ServerUnaryCall, - callback: protobuf.Spanner.CreateSessionCallback + callback: protobuf.Spanner.CreateSessionCallback, ) { this.pushRequest(call.request!, call.metadata); this.simulateExecutionTime(this.createSession.name) @@ -476,8 +591,8 @@ export class MockSpanner { null, this.newSession( call.request!.database, - call.request!.session?.multiplexed ?? false - ) + call.request!.session?.multiplexed ?? false, + ), ); }) .catch(err => { @@ -487,17 +602,21 @@ export class MockSpanner { getSession( call: grpc.ServerUnaryCall, - callback: protobuf.Spanner.GetSessionCallback + callback: protobuf.Spanner.GetSessionCallback, ) { this.pushRequest(call.request!, call.metadata); - this.simulateExecutionTime(this.getSession.name).then(() => { - const session = this.sessions[call.request!.name]; - if (session) { - callback(null, session); - } else { - callback(MockSpanner.createSessionNotFoundError(call.request!.name)); - } - }); + this.simulateExecutionTime(this.getSession.name) + .then(() => { + const session = this.sessions[call.request!.name]; + if (session) { + callback(null, session); + } else { + callback(MockSpanner.createSessionNotFoundError(call.request!.name)); + } + }) + .catch(err => { + callback(err); + }); } listSessions( @@ -505,19 +624,23 @@ export class MockSpanner { protobuf.ListSessionsRequest, protobuf.ListSessionsResponse >, - callback: protobuf.Spanner.ListSessionsCallback + callback: protobuf.Spanner.ListSessionsCallback, ) { this.pushRequest(call.request!, call.metadata); - this.simulateExecutionTime(this.listSessions.name).then(() => { - callback( - null, - protobuf.ListSessionsResponse.create({ - sessions: Array.from(this.sessions.values()).filter(session => { - return session.name.startsWith(call.request!.database); + this.simulateExecutionTime(this.listSessions.name) + .then(() => { + callback( + null, + protobuf.ListSessionsResponse.create({ + sessions: Array.from(this.sessions.values()).filter(session => { + return session.name.startsWith(call.request!.database); + }), }), - }) - ); - }); + ); + }) + .catch(err => { + callback(err); + }); } deleteSession( @@ -525,7 +648,7 @@ export class MockSpanner { protobuf.DeleteSessionRequest, google.protobuf.Empty >, - callback: protobuf.Spanner.DeleteSessionCallback + callback: protobuf.Spanner.DeleteSessionCallback, ) { this.pushRequest(call.request!, call.metadata); if (this.sessions.delete(call.request!.name)) { @@ -537,7 +660,7 @@ export class MockSpanner { executeSql( call: grpc.ServerUnaryCall, - callback: protobuf.Spanner.ExecuteSqlCallback + callback: protobuf.Spanner.ExecuteSqlCallback, ) { this.pushRequest(call.request!, call.metadata); callback(createUnimplementedError('ExecuteSql is not yet implemented')); @@ -547,7 +670,7 @@ export class MockSpanner { call: grpc.ServerWritableStream< protobuf.ExecuteSqlRequest, protobuf.PartialResultSet - > + >, ) { this.pushRequest(call.request!, call.metadata); this.simulateExecutionTime(this.executeStreamingSql.name) @@ -560,7 +683,7 @@ export class MockSpanner { call.sendMetadata(new Metadata()); call.emit( 'error', - MockSpanner.createTransactionAbortedError(`${fullTransactionId}`) + MockSpanner.createTransactionAbortedError(`${fullTransactionId}`), ); call.end(); return; @@ -571,7 +694,7 @@ export class MockSpanner { if (call.request!.transaction?.begin) { const txn = this._updateTransaction( call.request!.session, - call.request!.transaction.begin + call.request!.transaction.begin, ); if (txn instanceof Error) { call.sendMetadata(new Metadata()); @@ -593,7 +716,7 @@ export class MockSpanner { } else { partialResultSets = MockSpanner.toPartialResultSets( res.resultSet, - call.request!.queryMode + call.request!.queryMode, ); } // Resume on the next index after the last one seen by the client. @@ -609,7 +732,7 @@ export class MockSpanner { ) { const streamErr = this.shiftStreamError( this.executeStreamingSql.name, - index + index, ); if (streamErr) { call.sendMetadata(new Metadata()); @@ -622,12 +745,12 @@ export class MockSpanner { case StatementResultType.UPDATE_COUNT: call.write( MockSpanner.emptyPartialResultSet( - Buffer.from('1'.padStart(8, '0')) - ) + Buffer.from('1'.padStart(8, '0')), + ), ); streamErr = this.shiftStreamError( this.executeStreamingSql.name, - 1 + 1, ); if (streamErr) { call.sendMetadata(new Metadata()); @@ -643,13 +766,13 @@ export class MockSpanner { default: call.emit( 'error', - new Error(`Unknown StatementResult type: ${res.type}`) + new Error(`Unknown StatementResult type: ${res.type}`), ); } } else { call.emit( 'error', - new Error(`There is no result registered for ${call.request!.sql}`) + new Error(`There is no result registered for ${call.request!.sql}`), ); } call.end(); @@ -671,7 +794,7 @@ export class MockSpanner { queryMode: | google.spanner.v1.ExecuteSqlRequest.QueryMode | keyof typeof google.spanner.v1.ExecuteSqlRequest.QueryMode, - rowsPerPartialResultSet = 1 + rowsPerPartialResultSet = 1, ): protobuf.PartialResultSet[] { const res: protobuf.PartialResultSet[] = []; let first = true; @@ -705,7 +828,7 @@ export class MockSpanner { } private static emptyPartialResultSet( - resumeToken: Uint8Array + resumeToken: Uint8Array, ): protobuf.PartialResultSet { return protobuf.PartialResultSet.create({ resumeToken, @@ -713,7 +836,7 @@ export class MockSpanner { } private static toPartialResultSet( - rowCount: number + rowCount: number, ): protobuf.PartialResultSet { const stats = { rowCountExact: rowCount, @@ -739,7 +862,7 @@ export class MockSpanner { protobuf.ExecuteBatchDmlRequest, protobuf.ExecuteBatchDmlResponse >, - callback: protobuf.Spanner.ExecuteBatchDmlCallback + callback: protobuf.Spanner.ExecuteBatchDmlCallback, ) { this.pushRequest(call.request!, call.metadata); this.simulateExecutionTime(this.executeBatchDml.name) @@ -750,7 +873,7 @@ export class MockSpanner { }`; if (this.abortedTransactions.has(fullTransactionId)) { callback( - MockSpanner.createTransactionAbortedError(`${fullTransactionId}`) + MockSpanner.createTransactionAbortedError(`${fullTransactionId}`), ); return; } @@ -792,7 +915,7 @@ export class MockSpanner { if (call.request!.transaction!.begin && i === 0) { const transaction = this._updateTransaction( call.request!.session, - call.request?.transaction!.begin + call.request?.transaction!.begin, ); if (transaction instanceof Error) { callback(transaction); @@ -821,12 +944,12 @@ export class MockSpanner { break; default: callback( - new Error(`Unknown StatementResult type: ${res.type}`) + new Error(`Unknown StatementResult type: ${res.type}`), ); } } else { callback( - new Error(`There is no result registered for ${statement.sql}`) + new Error(`There is no result registered for ${statement.sql}`), ); } } @@ -835,7 +958,7 @@ export class MockSpanner { ExecuteBatchDmlResponse.create({ resultSets: results, status: statementStatus, - }) + }), ); }) .catch(err => { @@ -845,7 +968,7 @@ export class MockSpanner { read( call: grpc.ServerUnaryCall, - callback: protobuf.Spanner.ReadCallback + callback: protobuf.Spanner.ReadCallback, ) { this.pushRequest(call.request!, call.metadata); callback(createUnimplementedError('Read is not yet implemented')); @@ -853,11 +976,108 @@ export class MockSpanner { streamingRead(call: grpc.ServerWritableStream) { this.pushRequest(call.request!, call.metadata); - call.emit( - 'error', - createUnimplementedError('StreamingRead is not yet implemented') - ); - call.end(); + + this.simulateExecutionTime(this.streamingRead.name) + .then(() => { + if (call.request!.transaction) { + const fullTransactionId = `${call.request!.session}/transactions/${ + call.request!.transaction.id + }`; + if (this.abortedTransactions.has(fullTransactionId)) { + call.sendMetadata(new Metadata()); + call.emit( + 'error', + MockSpanner.createTransactionAbortedError(`${fullTransactionId}`), + ); + call.end(); + return; + } + } + const keySet = JSON.stringify( + call.request!.keySet ?? {}, + Object.keys(call.request!.keySet ?? {}).sort(), + ); + const key = `${call.request!.table}|${keySet}`; + const res = this.readRequestResults.get(key); + if (res) { + if (call.request!.transaction?.begin) { + const txn = this._updateTransaction( + call.request!.session, + call.request!.transaction.begin, + ); + if (txn instanceof Error) { + call.sendMetadata(new Metadata()); + call.emit('error', txn); + call.end(); + return; + } + if (res.type === ReadRequestResultType.RESULT_SET) { + call.sendMetadata(new Metadata()); + (res.resultSet as protobuf.ResultSet).metadata!.transaction = txn; + } + } + let partialResultSets; + let resumeIndex; + switch (res.type) { + case ReadRequestResultType.RESULT_SET: + if (Array.isArray(res.resultSet)) { + partialResultSets = res.resultSet; + } else { + partialResultSets = MockSpanner.toPartialResultSets( + res.resultSet, + 'NORMAL', + ); + } + // Resume on the next index after the last one seen by the client. + resumeIndex = + call.request!.resumeToken.length === 0 + ? 0 + : parseInt( + Buffer.from(call.request!.resumeToken).toString(), + 10, + ) + 1; + for ( + let index = resumeIndex; + index < partialResultSets.length; + index++ + ) { + const streamErr = this.shiftStreamError( + this.streamingRead.name, + index, + ); + if (streamErr) { + call.sendMetadata(new Metadata()); + call.emit('error', streamErr); + break; + } + call.write(partialResultSets[index]); + } + break; + case ReadRequestResultType.ERROR: + call.sendMetadata(new Metadata()); + call.emit('error', res.error); + break; + default: + call.emit( + 'error', + new Error(`Unknown ReadRequestResult type: ${res.type}`), + ); + } + } else { + call.emit( + 'error', + new Error( + `There is no result registered for ${call.request!.table}`, + ), + ); + } + call.end(); + }) + .catch(err => { + call.sendMetadata(new Metadata()); + call.emit('error', err); + call.end(); + }); } beginTransaction( @@ -865,14 +1085,14 @@ export class MockSpanner { protobuf.BeginTransactionRequest, protobuf.Transaction >, - callback: protobuf.Spanner.BeginTransactionCallback + callback: protobuf.Spanner.BeginTransactionCallback, ) { this.pushRequest(call.request!, call.metadata); this.simulateExecutionTime(this.beginTransaction.name) .then(() => { const res = this._updateTransaction( call.request!.session, - call.request!.options + call.request!.options, ); if (res instanceof Error) { callback(res); @@ -887,7 +1107,7 @@ export class MockSpanner { commit( call: grpc.ServerUnaryCall, - callback: protobuf.Spanner.CommitCallback + callback: protobuf.Spanner.CommitCallback, ) { this.pushRequest(call.request!, call.metadata); this.simulateExecutionTime(this.commit.name) @@ -897,7 +1117,7 @@ export class MockSpanner { }`; if (this.abortedTransactions.has(fullTransactionId)) { callback( - MockSpanner.createTransactionAbortedError(`${fullTransactionId}`) + MockSpanner.createTransactionAbortedError(`${fullTransactionId}`), ); return; } @@ -916,11 +1136,11 @@ export class MockSpanner { null, protobuf.CommitResponse.create({ commitTimestamp: now(), - }) + }), ); } else { callback( - MockSpanner.createTransactionNotFoundError(fullTransactionId) + MockSpanner.createTransactionNotFoundError(fullTransactionId), ); } } else if (call.request!.singleUseTransaction) { @@ -928,12 +1148,12 @@ export class MockSpanner { null, protobuf.CommitResponse.create({ commitTimestamp: now(), - }) + }), ); } } else { callback( - MockSpanner.createSessionNotFoundError(call.request!.session) + MockSpanner.createSessionNotFoundError(call.request!.session), ); } }) @@ -944,7 +1164,7 @@ export class MockSpanner { rollback( call: grpc.ServerUnaryCall, - callback: protobuf.Spanner.RollbackCallback + callback: protobuf.Spanner.RollbackCallback, ) { this.pushRequest(call.request!, call.metadata); const session = this.sessions.get(call.request!.session); @@ -966,24 +1186,38 @@ export class MockSpanner { } partitionQuery( - call: grpc.ServerUnaryCall, - callback: protobuf.Spanner.PartitionQueryCallback + call: grpc.ServerUnaryCall, + callback: protobuf.Spanner.PartitionQueryCallback, ) { this.pushRequest(call.request!, call.metadata); - callback(createUnimplementedError('PartitionQuery is not yet implemented')); + this.simulateExecutionTime(this.partitionQuery.name) + .then(() => { + const response = protobuf.PartitionResponse.create({ + partitions: [{partitionToken: Buffer.from('mock-token')}], + }); + callback(null, response); + }) + .catch(err => callback(err)); } partitionRead( - call: grpc.ServerUnaryCall, - callback: protobuf.Spanner.PartitionReadCallback + call: grpc.ServerUnaryCall, + callback: protobuf.Spanner.PartitionReadCallback, ) { this.pushRequest(call.request!, call.metadata); - callback(createUnimplementedError('PartitionQuery is not yet implemented')); + this.simulateExecutionTime(this.partitionRead.name) + .then(() => { + const response = protobuf.PartitionResponse.create({ + partitions: [{partitionToken: Buffer.from('mock-token')}], + }); + callback(null, response); + }) + .catch(err => callback(err)); } private _updateTransaction( sessionName: string, - options: google.spanner.v1.ITransactionOptions | null | undefined + options: google.spanner.v1.ITransactionOptions | null | undefined, ): google.spanner.v1.Transaction | ServiceError { const session = this.sessions.get(sessionName); if (!session) { @@ -1033,6 +1267,44 @@ export function createMockSpanner(server: grpc.Server): MockSpanner { return mock; } +/** + * Creates a simple result set containing the following data: + * + * |-------------------------------| + * | ID (STRING) | VALUE (STRING) | + * |-------------------------------| + * | 'a' | 'Alpha' | + * | 'b' | 'Beta' | + * | 'c' | 'Gamma' | + * -------------------------------- + * + * This ResultSet can be used to mock read operations in a mock Spanner server. + */ +export function createReadRequestResultSet(): protobuf.ResultSet { + const fields = [ + protobuf.StructType.Field.create({ + name: 'ID', + type: protobuf.Type.create({code: protobuf.TypeCode.STRING}), + }), + protobuf.StructType.Field.create({ + name: 'VALUE', + type: protobuf.Type.create({code: protobuf.TypeCode.STRING}), + }), + ]; + const metadata = new protobuf.ResultSetMetadata({ + rowType: new protobuf.StructType({fields}), + }); + + return protobuf.ResultSet.create({ + metadata, + rows: [ + {values: [{stringValue: 'a'}, {stringValue: 'Alpha'}]}, + {values: [{stringValue: 'b'}, {stringValue: 'Beta'}]}, + {values: [{stringValue: 'c'}, {stringValue: 'Gamma'}]}, + ], + }); +} + /** * Creates a simple result set containing the following data: * diff --git a/test/multiplexed-session.ts b/test/multiplexed-session.ts index ef71f7304..ac960ad19 100644 --- a/test/multiplexed-session.ts +++ b/test/multiplexed-session.ts @@ -161,7 +161,7 @@ describe('MultiplexedSession', () => { multiplexedSession.on(MUX_SESSION_AVAILABLE, () => { assert.strictEqual( multiplexedSession._multiplexedSession, - fakeMuxSession + fakeMuxSession, ); done(); }); @@ -260,14 +260,14 @@ describe('MultiplexedSession', () => { setTimeout(() => multiplexedSession.emit(MUX_SESSION_AVAILABLE), 100); assert.strictEqual( multiplexedSession.listenerCount(MUX_SESSION_AVAILABLE), - 1 + 1, ); try { await promise; } finally { assert.strictEqual( multiplexedSession.listenerCount(MUX_SESSION_AVAILABLE), - 0 + 0, ); } }); @@ -277,11 +277,11 @@ describe('MultiplexedSession', () => { const promise = multiplexedSession._getSession(); setTimeout( () => multiplexedSession.emit(MUX_SESSION_CREATE_ERROR, error), - 100 + 100, ); assert.strictEqual( multiplexedSession.listenerCount(MUX_SESSION_CREATE_ERROR), - 1 + 1, ); try { await promise; @@ -289,7 +289,7 @@ describe('MultiplexedSession', () => { assert.strictEqual(e, error); assert.strictEqual( multiplexedSession.listenerCount(MUX_SESSION_CREATE_ERROR), - 0 + 0, ); } }); diff --git a/test/partial-result-stream.ts b/test/partial-result-stream.ts index 0154f048c..37159451b 100644 --- a/test/partial-result-stream.ts +++ b/test/partial-result-stream.ts @@ -251,7 +251,7 @@ describe('PartialResultStream', () => { concat(rows => { assert.strictEqual(rows.length, 11); done(); - }) + }), ); }); @@ -286,7 +286,7 @@ describe('PartialResultStream', () => { requestFnStub.onCall(1).callsFake(resumeToken => { assert.ok( !resumeToken, - 'Retry should be called with empty resume token' + 'Retry should be called with empty resume token', ); setTimeout(() => { @@ -307,7 +307,7 @@ describe('PartialResultStream', () => { concat(rows => { assert.strictEqual(rows.length, 2); done(); - }) + }), ); }); @@ -399,7 +399,7 @@ describe('PartialResultStream', () => { concat(rows => { assert.strictEqual(rows.length, 4); done(); - }) + }), ); }); diff --git a/test/request_id_header.ts b/test/request_id_header.ts index a55f2f1b2..9eda0d3d2 100644 --- a/test/request_id_header.ts +++ b/test/request_id_header.ts @@ -35,12 +35,12 @@ describe('RequestId', () => { assert.deepStrictEqual( ac0.increment(2), 2, - 'increment should return the added value' + 'increment should return the added value', ); assert.deepStrictEqual( ac0.value(), 2, - 'increment should have modified the value' + 'increment should have modified the value', ); const ac1 = newAtomicCounter(1); @@ -48,12 +48,12 @@ describe('RequestId', () => { assert.deepStrictEqual( ac1.increment(1 << 27), (1 << 27) + 1, - 'increment should return the added value' + 'increment should return the added value', ); assert.deepStrictEqual( ac1.value(), (1 << 27) + 1, - 'increment should have modified the value' + 'increment should have modified the value', ); done(); }); @@ -82,7 +82,7 @@ describe('RequestId', () => { assert.match( randIdForProcess, /^[0-9A-Fa-f]{8}$/, - `process-id should be a 32-bit hexadecimal number, but was ${randIdForProcess}` + `process-id should be a 32-bit hexadecimal number, but was ${randIdForProcess}`, ); done(); }); @@ -90,7 +90,7 @@ describe('RequestId', () => { it('with attempts', done => { assert.strictEqual( craftRequestId(1, 2, 3, 4), - `1.${randIdForProcess}.1.2.3.4` + `1.${randIdForProcess}.1.2.3.4`, ); done(); }); @@ -154,7 +154,7 @@ describe('RequestId', () => { assert.deepStrictEqual( nextNthRequest(fauxDatabase), 1, - 'Without override, should default to 1' + 'Without override, should default to 1', ); Object.assign(fauxDatabase, { @@ -165,7 +165,7 @@ describe('RequestId', () => { assert.deepStrictEqual( nextNthRequest(fauxDatabase), 4, - 'With override should infer value' + 'With override should infer value', ); }); }); diff --git a/test/session-factory.ts b/test/session-factory.ts index e4f6fea95..786bda168 100644 --- a/test/session-factory.ts +++ b/test/session-factory.ts @@ -104,7 +104,7 @@ describe('SessionFactory', () => { const sessionFactory = new SessionFactory( DATABASE, NAME, - FakePool as {} as db.SessionPoolConstructor + FakePool as {} as db.SessionPoolConstructor, ); assert(sessionFactory.pool_ instanceof FakePool); }); @@ -132,7 +132,7 @@ describe('SessionFactory', () => { it('should create a MultiplexedSession object', () => { assert( - sessionFactory.multiplexedSession_ instanceof MultiplexedSession + sessionFactory.multiplexedSession_ instanceof MultiplexedSession, ); }); @@ -193,7 +193,7 @@ describe('SessionFactory', () => { ( sandbox.stub( sessionFactory.multiplexedSession_, - 'getSession' + 'getSession', ) as sinon.SinonStub ).callsFake(callback => callback(null, fakeMuxSession)); sessionFactory.getSession((err, resp) => { @@ -210,7 +210,7 @@ describe('SessionFactory', () => { ( sandbox.stub( sessionFactory.multiplexedSession_, - 'getSession' + 'getSession', ) as sinon.SinonStub ).callsFake(callback => callback(fakeError, null)); sessionFactory.getSession((err, resp) => { @@ -264,7 +264,7 @@ describe('SessionFactory', () => { } catch (error) { assert.strictEqual( (error as ReleaseError).message, - 'Unable to release unknown resource.' + 'Unable to release unknown resource.', ); assert.strictEqual((error as ReleaseError).resource, fakeSession); } diff --git a/test/session-pool.ts b/test/session-pool.ts index 2c0a4dd3c..87e12b971 100644 --- a/test/session-pool.ts +++ b/test/session-pool.ts @@ -359,7 +359,7 @@ describe('SessionPool', () => { assert.strictEqual(err!.name, 'SessionLeakError'); assert.strictEqual( err!.message, - `${fakeLeaks.length} session leak(s) detected.` + `${fakeLeaks.length} session leak(s) detected.`, ); assert.strictEqual(err!.messages, fakeLeaks); done(); @@ -534,7 +534,7 @@ describe('SessionPool', () => { } catch (e) { assert.strictEqual( (e as sp.ReleaseError).message, - 'Unable to release unknown resource.' + 'Unable to release unknown resource.', ); assert.strictEqual((e as sp.ReleaseError).resource, badResource); } @@ -597,7 +597,7 @@ describe('SessionPool', () => { } catch (e) { assert.strictEqual( (e as sp.ReleaseError).message, - 'Database is closed.' + 'Database is closed.', ); } }); @@ -615,7 +615,7 @@ describe('SessionPool', () => { } catch (e) { assert.strictEqual( (e as sp.ReleaseError).message, - 'Timeout occurred while acquiring session.' + 'Timeout occurred while acquiring session.', ); } }); @@ -731,8 +731,8 @@ describe('SessionPool', () => { stub = sandbox.stub(sessionPool, '_createSessions').resolves(); }); - it('should create a single session', () => { - sessionPool._createSession(); + it('should create a single session', async () => { + await sessionPool._createSession(); const [numbers] = stub.lastCall.args; assert.deepStrictEqual(numbers, 1); }); @@ -841,7 +841,7 @@ describe('SessionPool', () => { done(); }); - sessionPool._destroy(fakeSession); + void sessionPool._destroy(fakeSession); }); }); @@ -923,27 +923,27 @@ describe('SessionPool', () => { sessionPool.options.min = 8; }); - it('should create the min number of required sessions', () => { - sessionPool._fill(); + it('should create the min number of required sessions', async () => { + await sessionPool._fill(); const amount = stub.lastCall.args[0]; assert.strictEqual(amount, 8); }); - it('should respect the current size of the pool', () => { + it('should respect the current size of the pool', async () => { inventory.sessions = [createSession(), createSession(), createSession()]; - sessionPool._fill(); + await sessionPool._fill(); const amount = stub.lastCall.args[0]; assert.strictEqual(amount, 5); }); - it('should noop when no sessions are needed', () => { + it('should noop when no sessions are needed', async () => { sessionPool.options.min = 0; - sessionPool._fill(); + await sessionPool._fill(); assert.strictEqual(stub.callCount, 0); }); @@ -1033,7 +1033,7 @@ describe('SessionPool', () => { } catch (e) { assert.strictEqual( (e as sp.ReleaseError).message, - 'No resources available.' + 'No resources available.', ); } }); @@ -1047,7 +1047,7 @@ describe('SessionPool', () => { } catch (e) { assert.strictEqual( (e as sp.ReleaseError).message, - 'Database is closed.' + 'Database is closed.', ); } }); @@ -1075,7 +1075,7 @@ describe('SessionPool', () => { assert(isAround(timeout, end())); assert.strictEqual( (e as sp.ReleaseError).message, - 'Timeout occurred while acquiring session.' + 'Timeout occurred while acquiring session.', ); } }); @@ -1198,12 +1198,12 @@ describe('SessionPool', () => { sandbox.stub(sessionPool, '_borrow'); }); - it('should borrow the session', () => { + it('should borrow the session', async () => { const fakeSession = createSession(); const stub = sessionPool._borrow as sinon.SinonStub; stub.withArgs(fakeSession); - sessionPool._ping(fakeSession); + await sessionPool._ping(fakeSession); assert.strictEqual(stub.callCount, 1); }); @@ -1395,7 +1395,7 @@ describe('SessionPool', () => { assert.strictEqual( events.length > 0, true, - 'Expecting at least 1 event' + 'Expecting at least 1 event', ); // Sort the events by earliest time of occurence. @@ -1412,7 +1412,7 @@ describe('SessionPool', () => { assert.deepEqual( gotEventNames, wantEventNames, - `Mismatched events\n\tGot: ${gotEventNames}\n\tWant: ${wantEventNames}` + `Mismatched events\n\tGot: ${gotEventNames}\n\tWant: ${wantEventNames}`, ); done(); diff --git a/test/session.ts b/test/session.ts index 5d67d2e95..fdfdff17c 100644 --- a/test/session.ts +++ b/test/session.ts @@ -191,7 +191,7 @@ describe('Session', () => { assert.strictEqual(resp, apiResponse); done(); - } + }, ); }); @@ -243,7 +243,7 @@ describe('Session', () => { it('should return the name if already formatted', () => { assert.strictEqual( Session.formatName_(DATABASE.formattedName_, PATH), - PATH + PATH, ); }); @@ -305,8 +305,8 @@ describe('Session', () => { config.headers, Object.assign( {[LEADER_AWARE_ROUTING_HEADER]: true}, - session.commonHeaders_ - ) + session.commonHeaders_, + ), ); callback(null, requestReturnValue); }; @@ -332,8 +332,8 @@ describe('Session', () => { config.headers, Object.assign( {[LEADER_AWARE_ROUTING_HEADER]: true}, - session.commonHeaders_ - ) + session.commonHeaders_, + ), ); return new Promise(resolve => resolve(requestReturnValue)); }; diff --git a/test/spanner.ts b/test/spanner.ts index 9838e28ec..751ae6751 100644 --- a/test/spanner.ts +++ b/test/spanner.ts @@ -121,13 +121,13 @@ class XGoogRequestHeaderInterceptor { const gotReqId = metadata[X_GOOG_SPANNER_REQUEST_ID_HEADER]; if (!gotReqId) { throw new Error( - `${call.method} is missing ${X_GOOG_SPANNER_REQUEST_ID_HEADER} header` + `${call.method} is missing ${X_GOOG_SPANNER_REQUEST_ID_HEADER} header`, ); } if (!gotReqId.match(X_GOOG_REQ_ID_REGEX)) { throw new Error( - `${call.method} reqID header ${gotReqId} does not match ${X_GOOG_REQ_ID_REGEX}` + `${call.method} reqID header ${gotReqId} does not match ${X_GOOG_REQ_ID_REGEX}`, ); } return gotReqId; @@ -239,6 +239,16 @@ describe('Spanner with mock server', () => { const insertSqlForAllTypes = `INSERT INTO TABLE_WITH_ALL_TYPES (COLBOOL, COLINT64, COLFLOAT64, COLNUMERIC, COLSTRING, COLBYTES, COLJSON, COLDATE, COLTIMESTAMP) VALUES (@bool, @int64, @float64, @numeric, @string, @bytes, @json, @date, @timestamp)`; const updateSql = "UPDATE NUMBER SET NAME='Unknown' WHERE NUM IN (5, 6)"; + const readPartitionsQuery = { + table: 'abc', + keySet: { + keys: [], + all: true, + ranges: [{}, {}], + }, + gaxOptions: {}, + dataBoostEnabled: true, + }; const fooNotFoundErr = Object.assign(new Error('Table FOO not found'), { code: grpc.status.NOT_FOUND, }); @@ -277,36 +287,40 @@ describe('Spanner with mock server', () => { } else { resolve(assignedPort); } - } + }, ); }); + spannerMock.putReadRequestResult( + readPartitionsQuery, + mock.ReadRequestResult.resultSet(mock.createReadRequestResultSet()), + ); spannerMock.putStatementResult( selectSql, - mock.StatementResult.resultSet(mock.createSimpleResultSet()) + mock.StatementResult.resultSet(mock.createSimpleResultSet()), ); spannerMock.putStatementResult( select1, - mock.StatementResult.resultSet(mock.createSelect1ResultSet()) + mock.StatementResult.resultSet(mock.createSelect1ResultSet()), ); spannerMock.putStatementResult( selectAllTypes, - mock.StatementResult.resultSet(mock.createResultSetWithAllDataTypes()) + mock.StatementResult.resultSet(mock.createResultSetWithAllDataTypes()), ); spannerMock.putStatementResult( invalidSql, - mock.StatementResult.error(fooNotFoundErr) + mock.StatementResult.error(fooNotFoundErr), ); spannerMock.putStatementResult( insertSql, - mock.StatementResult.updateCount(1) + mock.StatementResult.updateCount(1), ); spannerMock.putStatementResult( insertSqlForAllTypes, - mock.StatementResult.updateCount(1) + mock.StatementResult.updateCount(1), ); spannerMock.putStatementResult( updateSql, - mock.StatementResult.updateCount(2) + mock.StatementResult.updateCount(2), ); // TODO(loite): Enable when SPANNER_EMULATOR_HOST is supported. @@ -376,7 +390,7 @@ describe('Spanner with mock server', () => { spannerMock.getMetadata().forEach(metadata => { assert.strictEqual( metadata.get(CLOUD_RESOURCE_HEADER)[0], - `projects/test-project/instances/instance/databases/${database.id}` + `projects/test-project/instances/instance/databases/${database.id}`, ); }); } finally { @@ -402,7 +416,7 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.ok( request.requestOptions, - 'no requestOptions found on ExecuteSqlRequest' + 'no requestOptions found on ExecuteSqlRequest', ); assert.strictEqual(request.requestOptions!.priority, 'PRIORITY_HIGH'); assert.strictEqual(request.requestOptions!.requestTag, 'request-tag'); @@ -420,12 +434,10 @@ describe('Spanner with mock server', () => { }, }); } catch (e) { - // Ignore the fact that streaming read is unimplemented on the mock - // server. We just want to verify that the correct request is sent. - assert.strictEqual((e as ServiceError).code, Status.UNIMPLEMENTED); + assert.strictEqual((e as ServiceError).code, Status.UNKNOWN); assert.deepStrictEqual( (e as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.3.1` + `1.${randIdForProcess}.1.1.3.1`, ); } finally { snapshot.end(); @@ -437,7 +449,7 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ReadRequest found'); assert.ok( request.requestOptions, - 'no requestOptions found on ReadRequest' + 'no requestOptions found on ReadRequest', ); assert.strictEqual(request.requestOptions!.priority, 'PRIORITY_MEDIUM'); assert.strictEqual(request.requestOptions!.requestTag, 'request-tag'); @@ -456,7 +468,7 @@ describe('Spanner with mock server', () => { }); await tx!.batchUpdate([insertSql, insertSql]); return await tx.commit(); - } + }, ); await database.close(); const request = spannerMock.getRequests().find(val => { @@ -465,13 +477,13 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteBatchDmlRequest found'); assert.ok( request.requestOptions, - 'no requestOptions found on ExecuteBatchDmlRequest' + 'no requestOptions found on ExecuteBatchDmlRequest', ); assert.strictEqual(request.requestOptions!.priority, 'PRIORITY_MEDIUM'); assert.strictEqual(request.requestOptions!.requestTag, 'request-tag'); assert.strictEqual( request.requestOptions!.transactionTag, - 'transaction-tag' + 'transaction-tag', ); assert.ok(request.transaction?.begin, 'transaction is not empty'); const nextBatchRequest = spannerMock @@ -489,7 +501,7 @@ describe('Spanner with mock server', () => { assert.strictEqual(commitRequest.requestOptions!.requestTag, ''); assert.strictEqual( commitRequest.requestOptions!.transactionTag, - 'transaction-tag' + 'transaction-tag', ); }); @@ -501,7 +513,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.putStatementResult( sql, - mock.StatementResult.updateCount(1, err) + mock.StatementResult.updateCount(1, err), ); await database.runTransactionAsync(async tx => { @@ -538,7 +550,7 @@ describe('Spanner with mock server', () => { }, }); return await tx.commit(); - } + }, ); await database.close(); const request = spannerMock.getRequests().find(val => { @@ -547,14 +559,14 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.ok( request.requestOptions, - 'no requestOptions found on ExecuteSqlRequest' + 'no requestOptions found on ExecuteSqlRequest', ); assert.strictEqual(request.requestOptions!.priority, 'PRIORITY_LOW'); assert.strictEqual(request.requestOptions!.requestTag, 'request-tag'); assert.ok(request.transaction!.begin!.readWrite, 'ReadWrite is not set'); assert.strictEqual( request.requestOptions!.transactionTag, - 'transaction-tag' + 'transaction-tag', ); const commitRequest = spannerMock.getRequests().find(val => { return (val as v1.CommitRequest).mutations; @@ -562,7 +574,7 @@ describe('Spanner with mock server', () => { assert.strictEqual(commitRequest.requestOptions!.requestTag, ''); assert.strictEqual( commitRequest.requestOptions!.transactionTag, - 'transaction-tag' + 'transaction-tag', ); }); @@ -583,18 +595,16 @@ describe('Spanner with mock server', () => { }, }); } catch (e) { - // Ignore the fact that streaming read is unimplemented on the mock - // server. We just want to verify that the correct request is sent. - assert.strictEqual((e as ServiceError).code, Status.UNIMPLEMENTED); + assert.strictEqual((e as ServiceError).code, Status.UNKNOWN); assert.deepStrictEqual( (e as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.2.1` + `1.${randIdForProcess}.1.1.2.1`, ); return undefined; } finally { tx.end(); } - } + }, ); await database.close(); const request = spannerMock.getRequests().find(val => { @@ -603,20 +613,20 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ReadRequest found'); assert.ok( request.requestOptions, - 'no requestOptions found on ReadRequest' + 'no requestOptions found on ReadRequest', ); assert.strictEqual(request.requestOptions!.priority, 'PRIORITY_LOW'); assert.strictEqual(request.requestOptions!.requestTag, 'request-tag'); assert.strictEqual( request.requestOptions!.transactionTag, - 'transaction-tag' + 'transaction-tag', ); const beginTxnRequest = spannerMock.getRequests().find(val => { return (val as v1.BeginTransactionRequest).options?.readWrite; }) as v1.BeginTransactionRequest; assert.strictEqual( beginTxnRequest.options?.readWrite!.readLockMode, - 'OPTIMISTIC' + 'OPTIMISTIC', ); }); @@ -696,11 +706,11 @@ describe('Spanner with mock server', () => { }); assert.deepStrictEqual( dateCol.value, - new SpannerDate('2021-05-11') + new SpannerDate('2021-05-11'), ); assert.deepStrictEqual( timestampCol.value, - new PreciseDate('2021-05-11T16:46:04.872Z') + new PreciseDate('2021-05-11T16:46:04.872Z'), ); assert.deepStrictEqual(arrayBoolCol.value, [true, false, null]); assert.deepStrictEqual(arrayInt64Col.value, [ @@ -791,7 +801,7 @@ describe('Spanner with mock server', () => { assert.deepStrictEqual(row.COLDATE, new SpannerDate('2021-05-11')); assert.deepStrictEqual( row.COLTIMESTAMP, - new PreciseDate('2021-05-11T16:46:04.872Z') + new PreciseDate('2021-05-11T16:46:04.872Z'), ); assert.deepStrictEqual(row.COLBOOLARRAY, [true, false, null]); assert.deepStrictEqual(row.COLINT64ARRAY, [i, 100 * i, null]); @@ -875,7 +885,7 @@ describe('Spanner with mock server', () => { const largeSelect = 'select * from large_table'; spannerMock.putStatementResult( largeSelect, - mock.StatementResult.resultSet(mock.createLargeResultSet()) + mock.StatementResult.resultSet(mock.createLargeResultSet()), ); const database = newTestDatabase(); let rowCount = 0; @@ -886,7 +896,7 @@ describe('Spanner with mock server', () => { }); const pipeline = util.promisify(stream.pipeline); const simulateSlowFlushInterval = Math.floor( - NUM_ROWS_LARGE_RESULT_SET / 10 + NUM_ROWS_LARGE_RESULT_SET / 10, ); await pipeline( @@ -913,7 +923,7 @@ describe('Spanner with mock server', () => { transform(chunk, encoding, callback) { callback(); }, - }) + }), ); assert.strictEqual(rowCount, NUM_ROWS_LARGE_RESULT_SET); assert.ok(paused, 'stream should have been paused'); @@ -926,7 +936,7 @@ describe('Spanner with mock server', () => { const largeSelect = 'select * from large_table'; spannerMock.putStatementResult( largeSelect, - mock.StatementResult.resultSet(mock.createLargeResultSet()) + mock.StatementResult.resultSet(mock.createLargeResultSet()), ); const database = newTestDatabase(); try { @@ -954,13 +964,13 @@ describe('Spanner with mock server', () => { transform(chunk, encoding, callback) { callback(); }, - }) + }), ); assert.fail('missing expected error'); } catch (err) { assert.strictEqual( (err as ServiceError).message, - 'Stream is still not ready to receive data after 1 attempts to resume.' + 'Stream is still not ready to receive data after 1 attempts to resume.', ); } finally { await database.close(); @@ -1014,7 +1024,10 @@ describe('Spanner with mock server', () => { assert.strictEqual(rowCount, 3); assert.ok(stats); assert.ok(stats.queryPlan); - database.close().then(() => done()); + database + .close() + .then(() => done()) + .catch(() => done()); }); }); @@ -1022,23 +1035,29 @@ describe('Spanner with mock server', () => { const database = newTestDatabase(); let rowCount = 0; let stats: ResultSetStats; - database.getSnapshot().then(response => { - const [snapshot] = response; - snapshot - .runStream({ - sql: selectSql, - queryMode: google.spanner.v1.ExecuteSqlRequest.QueryMode.PROFILE, - }) - .on('data', () => rowCount++) - .on('stats', _stats => (stats = _stats)) - .on('end', () => { - assert.strictEqual(rowCount, 3); - assert.ok(stats); - assert.ok(stats.queryPlan); - snapshot.end(); - database.close().then(() => done()); - }); - }); + database + .getSnapshot() + .then(response => { + const [snapshot] = response; + snapshot + .runStream({ + sql: selectSql, + queryMode: google.spanner.v1.ExecuteSqlRequest.QueryMode.PROFILE, + }) + .on('data', () => rowCount++) + .on('stats', _stats => (stats = _stats)) + .on('end', () => { + assert.strictEqual(rowCount, 3); + assert.ok(stats); + assert.ok(stats.queryPlan); + snapshot.end(); + database + .close() + .then(() => done()) + .catch(() => done()); + }); + }) + .catch(err => done(err)); }); it('should call callback with statistics', done => { @@ -1053,8 +1072,11 @@ describe('Spanner with mock server', () => { assert.strictEqual(rows.length, 3); assert.ok(stats); assert.ok(stats.queryPlan); - database.close().then(() => done()); - } + database + .close() + .then(() => done()) + .catch(() => done()); + }, ); }); @@ -1098,31 +1120,31 @@ describe('Spanner with mock server', () => { assert.strictEqual(request.params!.fields!['int64'].stringValue, '100'); assert.strictEqual( request.params!.fields!['float64'].numberValue, - 3.14 + 3.14, ); assert.strictEqual( request.params!.fields!['numeric'].stringValue, - '6.626' + '6.626', ); assert.strictEqual( request.params!.fields!['string'].stringValue, - 'test' + 'test', ); assert.strictEqual( request.params!.fields!['bytes'].stringValue, - Buffer.from('test').toString('base64') + Buffer.from('test').toString('base64'), ); assert.strictEqual( request.params!.fields!['json'].stringValue, - '{"key1":"value1","key2":"value2","key3":["1","2","3"]}' + '{"key1":"value1","key2":"value2","key3":["1","2","3"]}', ); assert.strictEqual( request.params!.fields!['date'].stringValue, - '2021-05-11' + '2021-05-11', ); assert.strictEqual( request.params!.fields!['timestamp'].stringValue, - '2021-05-11T17:55:16.982300000Z' + '2021-05-11T17:55:16.982300000Z', ); assert.strictEqual(request.paramTypes!['bool'].code, 'BOOL'); assert.strictEqual(request.paramTypes!['int64'].code, 'INT64'); @@ -1153,7 +1175,7 @@ describe('Spanner with mock server', () => { await Promise.all(promises); assert.ok( pool.size >= 1 && pool.size <= 10, - 'Pool size should be between 1 and 10' + 'Pool size should be between 1 and 10', ); } finally { await database.close(); @@ -1176,7 +1198,7 @@ describe('Spanner with mock server', () => { await Promise.all(promises); assert.ok( pool.size >= 1 && pool.size <= 10, - 'Pool size should be between 1 and 10' + 'Pool size should be between 1 and 10', ); } finally { await database.close(); @@ -1191,14 +1213,13 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); database.run(selectSql, (err, rows) => { assert.ifError(err); assert.strictEqual(rows!.length, 3); database .close() - .catch(done) .then(() => { const gotStreamingCalls = xGoogReqIDInterceptor.getStreamingCalls(); const wantStreamingCalls = [ @@ -1213,7 +1234,8 @@ describe('Spanner with mock server', () => { ]; assert.deepStrictEqual(gotStreamingCalls, wantStreamingCalls); done(); - }); + }) + .catch(err => done(err)); }); }); @@ -1224,15 +1246,15 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); database.run(selectSql, err => { assert.ok(err, 'Missing expected error'); assert.strictEqual(err!.message, '2 UNKNOWN: Non-retryable error'); database .close() - .catch(done) - .then(() => done()); + .then(() => done()) + .catch(err => done(err)); }); }); @@ -1243,7 +1265,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); const rows: Row[] = []; const stream = database.runStream(selectSql); @@ -1252,8 +1274,8 @@ describe('Spanner with mock server', () => { assert.strictEqual(err.message, '2 UNKNOWN: Test error'); database .close() - .catch(done) - .then(() => done()); + .then(() => done()) + .catch(err => done(err)); }) .on('data', row => rows.push(row)) .on('end', () => { @@ -1273,7 +1295,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); try { const [rows] = await database.run(selectSql); @@ -1290,7 +1312,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); try { await database.run(selectSql); @@ -1298,11 +1320,11 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - '2 UNKNOWN: Test error' + '2 UNKNOWN: Test error', ); assert.deepStrictEqual( (e as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.2.1` + `1.${randIdForProcess}.1.1.2.1`, ); } finally { await database.close(); @@ -1330,12 +1352,12 @@ describe('Spanner with mock server', () => { PartialResultSet.create({ metadata, values: [{stringValue: `V${i}`}], - }) + }), ); } spannerMock.putStatementResult( sql, - mock.StatementResult.resultSet(results) + mock.StatementResult.resultSet(results), ); // Register an error after maxQueued has been exceeded. const err = { @@ -1346,7 +1368,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); const database = newTestDatabase(); @@ -1356,12 +1378,12 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - '14 UNAVAILABLE: Transient error' + '14 UNAVAILABLE: Transient error', ); // Ensure that we have a requestID returned and it was on the 2nd request. assert.deepStrictEqual( (e as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.2.1` + `1.${randIdForProcess}.1.1.2.1`, ); } finally { await database.close(); @@ -1379,7 +1401,7 @@ describe('Spanner with mock server', () => { }; spannerMock.putStatementResult( sql, - mock.StatementResult.resultSet(mock.createSimpleResultSet()) + mock.StatementResult.resultSet(mock.createSimpleResultSet()), ); try { await database.run(q); @@ -1387,8 +1409,8 @@ describe('Spanner with mock server', () => { } catch (err) { assert.ok( (err as ServiceError).message.includes( - 'Value of type undefined not recognized.' - ) + 'Value of type undefined not recognized.', + ), ); } finally { await database.close(); @@ -1406,7 +1428,7 @@ describe('Spanner with mock server', () => { }; spannerMock.putStatementResult( sql, - mock.StatementResult.resultSet(mock.createSimpleResultSet()) + mock.StatementResult.resultSet(mock.createSimpleResultSet()), ); const prs = database.runStream(q); setImmediate(() => { @@ -1417,9 +1439,12 @@ describe('Spanner with mock server', () => { done(); }) .on('end', () => { - database.close().then(() => { - done(assert.fail('missing error')); - }); + database + .close() + .then(() => { + done(assert.fail('missing error')); + }) + .catch(err => done(err)); }); }); }); @@ -1440,8 +1465,8 @@ describe('Spanner with mock server', () => { } catch (err) { assert.ok( (err as ServiceError).message.includes( - 'Value of type undefined not recognized.' - ) + 'Value of type undefined not recognized.', + ), ); } }); @@ -1460,7 +1485,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); const [rows] = await database.run(selectSql); assert.strictEqual(rows.length, 3); @@ -1475,7 +1500,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); const database = newTestDatabase(); @@ -1492,15 +1517,15 @@ describe('Spanner with mock server', () => { assert.strictEqual(requests.length, 2); assert.ok( requests[0].transaction?.begin!.readWrite, - 'inline txn is not set.' + 'inline txn is not set.', ); assert.ok( requests[1].transaction!.id, - 'Transaction ID is not used for retries.' + 'Transaction ID is not used for retries.', ); assert.ok( requests[1].resumeToken, - 'Resume token is not set for the retried' + 'Resume token is not set for the retried', ); }); @@ -1512,7 +1537,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); const database = newTestDatabase(); @@ -1534,15 +1559,15 @@ describe('Spanner with mock server', () => { assert.strictEqual(requests.length, 3); assert.ok( requests[0].transaction?.begin!.readWrite, - 'inline txn is not set.' + 'inline txn is not set.', ); assert.ok( requests[1].transaction!.id, - 'Transaction ID is not used for retries.' + 'Transaction ID is not used for retries.', ); assert.ok( requests[1].resumeToken, - 'Resume token is not set for the retried' + 'Resume token is not set for the retried', ); const commitRequests = spannerMock .getRequests() @@ -1551,16 +1576,16 @@ describe('Spanner with mock server', () => { assert.strictEqual(commitRequests.length, 1); assert.deepStrictEqual( requests[1].transaction!.id, - requests[2].transaction!.id + requests[2].transaction!.id, ); assert.deepStrictEqual( requests[1].transaction!.id, - commitRequests[0].transactionId + commitRequests[0].transactionId, ); const beginTxnRequests = spannerMock .getRequests() .filter( - val => (val as v1.BeginTransactionRequest).options?.readWrite + val => (val as v1.BeginTransactionRequest).options?.readWrite, ) .map(req => req as v1.BeginTransactionRequest); assert.deepStrictEqual(beginTxnRequests.length, 0); @@ -1574,7 +1599,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); try { await database.run(selectSql); @@ -1582,11 +1607,11 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - '2 UNKNOWN: Test error' + '2 UNKNOWN: Test error', ); assert.deepStrictEqual( (e as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.2.1` + `1.${randIdForProcess}.1.1.2.1`, ); } await database.close(); @@ -1601,15 +1626,15 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); database.run(selectSql, (err, rows) => { assert.ifError(err); assert.strictEqual(rows!.length, 3); database .close() - .catch(done) - .then(() => done()); + .then(() => done()) + .catch(err => done(err)); }); }); @@ -1621,19 +1646,19 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); database.run(selectSql, err => { assert.ok(err, 'Missing expected error'); assert.strictEqual(err!.message, '2 UNKNOWN: Non-retryable error'); assert.deepStrictEqual( (err as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.2.1` + `1.${randIdForProcess}.1.1.2.1`, ); database .close() - .catch(done) - .then(() => done()); + .then(() => done()) + .catch(err => done(err)); }); }); @@ -1645,7 +1670,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); const receivedRows: Row[] = []; database @@ -1655,12 +1680,12 @@ describe('Spanner with mock server', () => { assert.strictEqual(receivedRows.length, index); assert.deepStrictEqual( (err as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.2.1` + `1.${randIdForProcess}.1.1.2.1`, ); database .close() - .catch(done) - .then(() => done()); + .then(() => done()) + .catch(err => done(err)); }) // We will receive data for the partial result sets that are // returned before the error occurs. @@ -1686,7 +1711,7 @@ describe('Spanner with mock server', () => { } spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofErrors(errors) + SimulatedExecutionTime.ofErrors(errors), ); const [rows] = await database.run(selectSql); assert.strictEqual(rows.length, 3); @@ -1701,19 +1726,22 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); database.runTransaction((err, tx) => { assert.ifError(err); tx!.runUpdate(insertSql, (err, updateCount) => { assert.ifError(err); assert.strictEqual(updateCount, 1); - tx!.commit().then(() => { - database - .close() - .catch(done) - .then(() => done()); - }); + tx! + .commit() + .then(() => { + database + .close() + .then(() => done()) + .catch(err => done(err)); + }) + .catch(() => {}); }); }); }); @@ -1729,7 +1757,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); let attempts = 0; database.runTransaction((err, tx) => { @@ -1739,7 +1767,7 @@ describe('Spanner with mock server', () => { assert.ok(err, 'Missing expected error'); assert.deepStrictEqual( (err as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.2.1` + `1.${randIdForProcess}.1.1.2.1`, ); assert.strictEqual(err!.code, grpc.status.INVALID_ARGUMENT); // Only the update RPC should be retried and not the entire @@ -1750,8 +1778,8 @@ describe('Spanner with mock server', () => { .then(() => { database .close() - .catch(done) - .then(() => done()); + .then(() => done()) + .catch(err => done(err)); }) .catch(done); }); @@ -1764,12 +1792,12 @@ describe('Spanner with mock server', () => { function newTestDatabaseWithLARDisabled( options?: SessionPoolOptions, - queryOptions?: IQueryOptions + queryOptions?: IQueryOptions, ): Database { return instanceWithLARDisabled.database( `database-${dbCounter++}`, options, - queryOptions + queryOptions, ); } @@ -1799,7 +1827,7 @@ describe('Spanner with mock server', () => { metadataCountWithLAREnabled++; assert.strictEqual( metadata.get(LEADER_AWARE_ROUTING_HEADER)[0], - 'true' + 'true', ); } }); @@ -1818,7 +1846,7 @@ describe('Spanner with mock server', () => { spannerMock.getMetadata().forEach(metadata => { assert.strictEqual( metadata.get(LEADER_AWARE_ROUTING_HEADER)[0], - undefined + undefined, ); }); }); @@ -1845,7 +1873,7 @@ describe('Spanner with mock server', () => { assert.strictEqual( requests.session?.multiplexed, true, - 'Multiplexed should be true' + 'Multiplexed should be true', ); }); @@ -1920,7 +1948,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.createSession, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); const database = newTestDatabase().on('error', err => { assert.strictEqual(err.code, Status.NOT_FOUND); @@ -1931,11 +1959,11 @@ describe('Spanner with mock server', () => { assert.strictEqual((error as grpc.ServiceError).code, err.code); assert.strictEqual( (error as grpc.ServiceError).details, - 'create session failed' + 'create session failed', ); assert.strictEqual( (error as grpc.ServiceError).message, - '5 NOT_FOUND: create session failed' + '5 NOT_FOUND: create session failed', ); } }); @@ -1950,7 +1978,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofError(error) + SimulatedExecutionTime.ofError(error), ); const database = newTestDatabase(); database.run(query, (err, _) => { @@ -1980,7 +2008,7 @@ describe('Spanner with mock server', () => { assert.strictEqual(pool._inventory.sessions.length, 1); assert.strictEqual( pool._inventory.sessions[0].metadata.multiplexed, - false + false, ); // multiplexed session will get created since GOOGLE_CLOUD_SPANNER_MULTIPLEXED_SESSIONS is enabled assert.notEqual(multiplexedSession._multiplexedSession, null); @@ -2008,7 +2036,7 @@ describe('Spanner with mock server', () => { assert.strictEqual(pool._inventory.sessions.length, 1); assert.strictEqual( pool._inventory.sessions[0].metadata.multiplexed, - false + false, ); assert.strictEqual(multiplexedSession._multiplexedSession, null); assert.strictEqual(resp, 2); @@ -2058,7 +2086,7 @@ describe('Spanner with mock server', () => { assert.strictEqual(pool._inventory.sessions.length, 1); assert.strictEqual( pool._inventory.sessions[0].metadata.multiplexed, - false + false, ); assert.strictEqual(multiplexedSession._multiplexedSession, null); assert.strictEqual(resp, 2); @@ -2073,7 +2101,7 @@ describe('Spanner with mock server', () => { /** Common verify method for QueryOptions tests. */ function verifyQueryOptions( optimizerVersion: string, - optimizerStatisticsPackage: string + optimizerStatisticsPackage: string, ) { const request = spannerMock.getRequests().find(val => { return (val as v1.ExecuteSqlRequest).sql; @@ -2081,15 +2109,15 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.ok( request.queryOptions, - 'no queryOptions found on ExecuteSqlRequest' + 'no queryOptions found on ExecuteSqlRequest', ); assert.strictEqual( request.queryOptions!.optimizerVersion, - optimizerVersion + optimizerVersion, ); assert.strictEqual( request.queryOptions!.optimizerStatisticsPackage, - optimizerStatisticsPackage + optimizerStatisticsPackage, ); } @@ -2182,12 +2210,12 @@ describe('Spanner with mock server', () => { function newTestDatabase( options?: SessionPoolOptions, - queryOptions?: IQueryOptions + queryOptions?: IQueryOptions, ): Database { return instanceWithEnvVar.database( `database-${dbCounter++}`, options, - queryOptions + queryOptions, ); } @@ -2462,7 +2490,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); db.run(selectSql, (err, rows) => { if (err) { @@ -2483,8 +2511,8 @@ describe('Spanner with mock server', () => { done(); } db.close() - .catch(err => assert.fail(err)) - .then(() => done()); + .then(() => done()) + .catch(err => assert.fail(err)); }); }); }); @@ -2496,7 +2524,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); let rowCount = 0; db.runStream(selectSql) @@ -2517,7 +2545,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); } db.run(selectSql, (err, rows) => { @@ -2537,7 +2565,7 @@ describe('Spanner with mock server', () => { code: grpc.status.NOT_FOUND, message: 'Session not found', streamIndex: 1, - } as MockError) + } as MockError), ); db.run(selectSql, err => { if (err) { @@ -2563,7 +2591,7 @@ describe('Spanner with mock server', () => { sessionNotFound, sessionNotFound, sessionNotFound, - ]) + ]), ); db.getSnapshot((err, snapshot) => { assert.ifError(err); @@ -2587,7 +2615,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); runTransactionWithExpectedSessionRetry(db, done); }); @@ -2625,7 +2653,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); db.runTransaction((err, transaction) => { assert.ifError(err); @@ -2649,7 +2677,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); db.getSnapshot() .then(response => { @@ -2678,7 +2706,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); db.runTransaction((err, transaction) => { assert.ifError(err); @@ -2709,7 +2737,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); db.runTransaction((err, transaction) => { assert.ifError(err); @@ -2726,7 +2754,7 @@ describe('Spanner with mock server', () => { db.close(done); }); }); - } + }, ); }); }); @@ -2743,7 +2771,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); runAsyncTransactionWithExpectedSessionRetry(db).then(done).catch(done); }); @@ -2780,7 +2808,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); try { await db @@ -2811,7 +2839,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); try { await db @@ -2843,7 +2871,7 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.NOT_FOUND, message: 'Session not found', - } as MockError) + } as MockError), ); try { await db @@ -2975,13 +3003,13 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).name, - SessionPoolExhaustedError.name + SessionPoolExhaustedError.name, ); const exhausted = e as SessionPoolExhaustedError; assert.ok(exhausted.messages); assert.strictEqual(exhausted.messages.length, 1); assert.ok( - exhausted.messages[0].indexOf('testSessionPoolExhaustedError') > -1 + exhausted.messages[0].indexOf('testSessionPoolExhaustedError') > -1, ); } tx1.end(); @@ -3006,11 +3034,11 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}` + `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}`, ); assert.deepStrictEqual( (e as RequestIDError).requestID.match(requestIDRegex) !== null, - true + true, ); } } @@ -3054,11 +3082,11 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}` + `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}`, ); assert.deepStrictEqual( (e as RequestIDError).requestID.match(requestIDRegex) !== null, - true + true, ); } } @@ -3136,7 +3164,7 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - 'No resources available.' + 'No resources available.', ); } } finally { @@ -3204,7 +3232,7 @@ describe('Spanner with mock server', () => { code: Status.NOT_FOUND, message: 'Database not found', }, - ] as MockError[]) + ] as MockError[]), ); const database = newTestDatabase(); try { @@ -3226,7 +3254,7 @@ describe('Spanner with mock server', () => { code: Status.NOT_FOUND, message: msg, }, - ] as MockError[]) + ] as MockError[]), ); try { const database = newTestDatabase({ @@ -3260,7 +3288,7 @@ describe('Spanner with mock server', () => { code: Status.PERMISSION_DENIED, message: 'Needs permission', }, - ] as MockError[]) + ] as MockError[]), ); const database = newTestDatabase().on('error', err => { assert.strictEqual(err.code, Status.PERMISSION_DENIED); @@ -3271,7 +3299,7 @@ describe('Spanner with mock server', () => { } catch (err) { assert.strictEqual( (err as ServiceError).code, - Status.PERMISSION_DENIED + Status.PERMISSION_DENIED, ); } finally { await database.close(); @@ -3377,7 +3405,7 @@ describe('Spanner with mock server', () => { rows.forEach(() => count++); return transaction.commit().then(() => count); }); - } + }, ); assert.strictEqual(rowCount, 3); assert.strictEqual(attempts, 2); @@ -3401,13 +3429,13 @@ describe('Spanner with mock server', () => { assert.strictEqual(attempts, 2); transaction! .commit() - .catch(done) .then(() => { database .close() - .catch(done) - .then(() => done()); - }); + .then(() => done()) + .catch(err => done(err)); + }) + .catch(err => done(err)); }); }); }); @@ -3429,7 +3457,7 @@ describe('Spanner with mock server', () => { assert.strictEqual(rows2.length, 3); return transaction.commit().then(() => rows1.length + rows2.length); }); - } + }, ); assert.strictEqual(rowCount, 6); assert.strictEqual(attempts, 2); @@ -3444,12 +3472,12 @@ describe('Spanner with mock server', () => { assert.strictEqual(requests.length, 3); assert.ok( requests[0].transaction?.begin!.readWrite, - 'Inline txn is not set in request.' + 'Inline txn is not set in request.', ); requests.slice(1, 3).forEach((request, index) => { assert.ok( request.transaction!.id, - `Transaction ID is not used for retries. ${index}.` + `Transaction ID is not used for retries. ${index}.`, ); }); const beginTxnRequest = spannerMock @@ -3477,7 +3505,7 @@ describe('Spanner with mock server', () => { return transaction .runUpdate(insertSql) .then(updateCount => transaction.commit().then(() => updateCount)); - } + }, ); assert.strictEqual(updated, 1); assert.strictEqual(attempts, 2); @@ -3501,8 +3529,8 @@ describe('Spanner with mock server', () => { assert.strictEqual(attempts, 2); database .close() - .catch(done) - .then(() => done()); + .then(() => done()) + .catch(err => done(err)); }); }); }); @@ -3518,7 +3546,7 @@ describe('Spanner with mock server', () => { message: 'Transaction aborted', metadata: MockSpanner.createMinimalRetryDelayMetadata(), streamIndex: 1, - } as MockError) + } as MockError), ); const response = await database.runTransactionAsync(transaction => { attempts++; @@ -3537,7 +3565,7 @@ describe('Spanner with mock server', () => { const database = newTestDatabase(); const [updated] = await database.runTransactionAsync( (transaction): Promise => { - transaction.begin(); + void transaction.begin(); return transaction.runUpdate(insertSql).then(updateCount => { if (!attempts) { spannerMock.abortTransaction(transaction); @@ -3545,7 +3573,7 @@ describe('Spanner with mock server', () => { attempts++; return transaction.commit().then(() => updateCount); }); - } + }, ); assert.strictEqual(updated, 1); assert.strictEqual(attempts, 2); @@ -3559,21 +3587,21 @@ describe('Spanner with mock server', () => { await database.runTransactionAsync( {timeout: 1}, (transaction): Promise => { - transaction.begin(); + void transaction.begin(); attempts++; return transaction.runUpdate(insertSql).then(updateCount => { // Always abort the transaction. spannerMock.abortTransaction(transaction); return transaction.commit().then(() => updateCount); }); - } + }, ); assert.fail('missing expected DEADLINE_EXCEEDED error'); } catch (e) { assert.strictEqual( (e as ServiceError).code, grpc.status.DEADLINE_EXCEEDED, - `Got unexpected error ${e} with code ${(e as ServiceError).code}` + `Got unexpected error ${e} with code ${(e as ServiceError).code}`, ); // The transaction should be tried at least once before timing out. assert.ok(attempts >= 1); @@ -3587,7 +3615,7 @@ describe('Spanner with mock server', () => { const [updated] = await database.runTransactionAsync( (transaction): Promise => { - transaction.begin(); + void transaction.begin(); return transaction.runUpdate(insertSql).then(updateCount => { if (!attempts) { spannerMock.setExecutionTime( @@ -3595,13 +3623,13 @@ describe('Spanner with mock server', () => { SimulatedExecutionTime.ofError({ code: grpc.status.INTERNAL, message: 'Received RST_STREAM', - } as MockError) + } as MockError), ); } attempts++; return transaction.commit().then(() => updateCount); }); - } + }, ); assert.strictEqual(updated, 1); assert.strictEqual(attempts, 2); @@ -3633,6 +3661,64 @@ describe('Spanner with mock server', () => { }); }); + describe('batch-transactions', () => { + describe('createReadPartitions', () => { + it('should create set of read partitions', async () => { + const database = newTestDatabase({min: 0, incStep: 1}); + const query = { + table: 'abc', + keys: ['a', 'b'], + ranges: [{}, {}], + gaxOptions: {}, + dataBoostEnabled: true, + }; + const [transaction] = await database.createBatchTransaction(); + const [readPartitions] = + await transaction.createReadPartitions(query); + assert.strictEqual(readPartitions.length, 1); + assert.strictEqual(readPartitions[0].table, 'abc'); + }); + }); + + describe('createQueryPartitions', () => { + it('should create set of query partitions', async () => { + const database = newTestDatabase({min: 0, incStep: 1}); + const query = { + sql: select1, + }; + const [transaction] = await database.createBatchTransaction(); + const [queryPartitions] = + await transaction.createQueryPartitions(query); + assert.strictEqual(Object.keys(queryPartitions).length, 1); + assert.strictEqual(queryPartitions[0].sql, select1); + transaction.close(); + await database.close(); + }); + }); + + describe('execute', () => { + it('should create and execute query partitions', async () => { + const database = newTestDatabase({min: 0, incStep: 1}); + const [transaction] = await database.createBatchTransaction(); + const [queryPartitions] = + await transaction.createQueryPartitions(selectSql); + assert.strictEqual(queryPartitions.length, 1); + const [resp] = await transaction.execute(queryPartitions[0]); + assert.strictEqual(resp.length, 3); + }); + + it('should create and execute read partitions', async () => { + const database = newTestDatabase({min: 0, incStep: 1}); + const [transaction] = await database.createBatchTransaction(); + const [readPartitions] = + await transaction.createReadPartitions(readPartitionsQuery); + assert.strictEqual(readPartitions.length, 1); + const [resp] = await transaction.execute(readPartitions[0]); + assert.strictEqual(resp.length, 3); + }); + }); + }); + describe('pdml', () => { it('should retry on aborted error', async () => { const database = newTestDatabase(); @@ -3643,7 +3729,7 @@ describe('Spanner with mock server', () => { message: 'Transaction aborted', metadata: MockSpanner.createMinimalRetryDelayMetadata(), streamIndex: 1, - } as MockError) + } as MockError), ); const [updateCount] = await database.runPartitionedUpdate(updateSql); assert.strictEqual(updateCount, 2); @@ -3658,7 +3744,7 @@ describe('Spanner with mock server', () => { code: grpc.status.INTERNAL, message: 'Received unexpected EOS on DATA frame from server', streamIndex: 1, - } as MockError) + } as MockError), ); const [updateCount] = await database.runPartitionedUpdate(updateSql); assert.strictEqual(updateCount, 2); @@ -3673,7 +3759,7 @@ describe('Spanner with mock server', () => { code: grpc.status.INTERNAL, message: 'Generic internal error', streamIndex: 1, - } as MockError) + } as MockError), ); try { await database.runPartitionedUpdate(updateSql); @@ -3681,11 +3767,11 @@ describe('Spanner with mock server', () => { } catch (err) { assert.strictEqual((err as ServiceError).code, grpc.status.INTERNAL); assert.ok( - (err as ServiceError).message.includes('Generic internal error') + (err as ServiceError).message.includes('Generic internal error'), ); assert.deepStrictEqual( (err as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.3.1` + `1.${randIdForProcess}.1.1.3.1`, ); } finally { await database.close(); @@ -3707,7 +3793,7 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.ok( request.requestOptions, - 'no requestOptions found on ExecuteSqlRequest' + 'no requestOptions found on ExecuteSqlRequest', ); assert.strictEqual(request.requestOptions!.priority, 'PRIORITY_LOW'); assert.strictEqual(request.requestOptions!.requestTag, 'request-tag'); @@ -3726,7 +3812,7 @@ describe('Spanner with mock server', () => { }) as v1.BeginTransactionRequest; assert.strictEqual( beginTxnRequest.options?.excludeTxnFromChangeStreams, - true + true, ); await database.close(); }); @@ -3739,7 +3825,7 @@ describe('Spanner with mock server', () => { const [session] = await database.createSession({}); const transaction = session.transaction( {}, - {transactionTag: 'transaction-tag'} + {transactionTag: 'transaction-tag'}, ); await transaction.begin(); await database.close(); @@ -3749,12 +3835,12 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no BeginTransactionRequest found'); assert.ok( request.requestOptions, - 'no requestOptions found on BeginTransactionRequest' + 'no requestOptions found on BeginTransactionRequest', ); assert.strictEqual(request.requestOptions!.requestTag, ''); assert.strictEqual( request.requestOptions!.transactionTag, - 'transaction-tag' + 'transaction-tag', ); }); @@ -3801,7 +3887,7 @@ describe('Spanner with mock server', () => { assert.match((err as Error).message, /Table FOO not found/); assert.deepStrictEqual( (err as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.3.1` + `1.${randIdForProcess}.1.1.3.1`, ); } }); @@ -3849,7 +3935,7 @@ describe('Spanner with mock server', () => { 1, spannerMock.getRequests().filter(val => { return (val as v1.CommitRequest).mutations; - }).length + }).length, ); const commitRequest = spannerMock.getRequests().find(val => { return (val as v1.CommitRequest).mutations; @@ -3885,7 +3971,7 @@ describe('Spanner with mock server', () => { 1, spannerMock.getRequests().filter(val => { return (val as v1.CommitRequest).mutations; - }).length + }).length, ); const commitRequest = spannerMock.getRequests().find(val => { const request = val as v1.CommitRequest; @@ -3917,7 +4003,7 @@ describe('Spanner with mock server', () => { }) as v1.CommitRequest; assert.strictEqual( request.singleUseTransaction?.isolationLevel, - 'REPEATABLE_READ' + 'REPEATABLE_READ', ); }); @@ -3930,7 +4016,7 @@ describe('Spanner with mock server', () => { async tx => { await tx!.insert('foo', {id: 1, value: 'One'}); await tx.commit(); - } + }, ); await database.close(); @@ -3939,7 +4025,7 @@ describe('Spanner with mock server', () => { }) as v1.BeginTransactionRequest; assert.strictEqual( beginTxnRequest.options?.excludeTxnFromChangeStreams, - true + true, ); }); @@ -3952,7 +4038,7 @@ describe('Spanner with mock server', () => { async tx => { await tx!.run(selectSql); await tx.commit(); - } + }, ); await database.close(); @@ -3962,7 +4048,7 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.strictEqual( request.transaction!.begin!.readWrite!.readLockMode, - 'OPTIMISTIC' + 'OPTIMISTIC', ); }); @@ -3975,7 +4061,7 @@ describe('Spanner with mock server', () => { async tx => { await tx!.run(selectSql); await tx.commit(); - } + }, ); await database.close(); @@ -3985,7 +4071,7 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.strictEqual( request.transaction!.begin?.excludeTxnFromChangeStreams, - true + true, ); }); @@ -3998,7 +4084,7 @@ describe('Spanner with mock server', () => { async tx => { await tx!.run(selectSql); await tx.commit(); - } + }, ); await database.close(); @@ -4008,7 +4094,7 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.strictEqual( request.transaction!.begin!.isolationLevel, - 'REPEATABLE_READ' + 'REPEATABLE_READ', ); }); @@ -4035,7 +4121,7 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.strictEqual( request.transaction!.begin!.isolationLevel, - 'REPEATABLE_READ' + 'REPEATABLE_READ', ); }); @@ -4058,7 +4144,7 @@ describe('Spanner with mock server', () => { async tx => { await tx!.run(selectSql); await tx.commit(); - } + }, ); await database.close(); @@ -4068,15 +4154,15 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.strictEqual( request.transaction!.begin!.readWrite?.readLockMode, - 'OPTIMISTIC' + 'OPTIMISTIC', ); assert.strictEqual( request.transaction!.begin!.excludeTxnFromChangeStreams, - true + true, ); assert.strictEqual( request.transaction!.begin!.isolationLevel, - 'REPEATABLE_READ' + 'REPEATABLE_READ', ); }); @@ -4098,7 +4184,7 @@ describe('Spanner with mock server', () => { async tx => { await tx!.run(selectSql); await tx.commit(); - } + }, ); await database.close(); @@ -4108,7 +4194,7 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.strictEqual( request.transaction!.begin!.isolationLevel, - 'REPEATABLE_READ' + 'REPEATABLE_READ', ); }); @@ -4126,7 +4212,7 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.strictEqual( request.transaction!.begin!.readWrite!.readLockMode, - 'OPTIMISTIC' + 'OPTIMISTIC', ); done(); }); @@ -4148,10 +4234,10 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.strictEqual( request.transaction!.begin!.excludeTxnFromChangeStreams, - true + true, ); done(); - } + }, ); }); @@ -4171,10 +4257,10 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.strictEqual( request.transaction!.begin!.isolationLevel, - 'REPEATABLE_READ' + 'REPEATABLE_READ', ); done(); - } + }, ); }); @@ -4192,11 +4278,11 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.strictEqual( request.transaction!.begin!.readWrite!.readLockMode, - 'OPTIMISTIC' + 'OPTIMISTIC', ); assert.strictEqual( request.requestOptions?.transactionTag, - 'transaction-tag' + 'transaction-tag', ); }); }); @@ -4213,7 +4299,7 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.strictEqual( request.transaction!.begin!.isolationLevel, - 'REPEATABLE_READ' + 'REPEATABLE_READ', ); }); }); @@ -4240,7 +4326,7 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no ExecuteSqlRequest found'); assert.notStrictEqual( request.transaction!.begin!.readWrite!.readLockMode, - 'OPTIMISTIC' + 'OPTIMISTIC', ); }); @@ -4364,7 +4450,7 @@ describe('Spanner with mock server', () => { attempts++; await tx!.run(insertSql); await tx.commit(); - } + }, ); await database.close(); @@ -4375,7 +4461,7 @@ describe('Spanner with mock server', () => { assert.deepStrictEqual(beginTxnRequest.length, 1); assert.strictEqual( beginTxnRequest[0].options?.excludeTxnFromChangeStreams, - true + true, ); }); @@ -4400,7 +4486,7 @@ describe('Spanner with mock server', () => { assert.deepStrictEqual(beginTxnRequest.length, 1); assert.strictEqual( beginTxnRequest[0].options!.readWrite!.readLockMode, - 'OPTIMISTIC' + 'OPTIMISTIC', ); }); @@ -4413,11 +4499,11 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}` + `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}`, ); assert.deepStrictEqual( (e as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.2.1` + `1.${randIdForProcess}.1.1.2.1`, ); } await tx.run(selectSql); @@ -4445,12 +4531,12 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}` + `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}`, ); } await tx.run(selectSql); await tx.commit(); - } + }, ); await database.close(); @@ -4461,7 +4547,7 @@ describe('Spanner with mock server', () => { assert.deepStrictEqual(beginTxnRequest.length, 1); assert.strictEqual( beginTxnRequest[0].options?.excludeTxnFromChangeStreams, - true + true, ); }); @@ -4474,11 +4560,11 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}` + `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}`, ); assert.deepStrictEqual( (e as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.2.1` + `1.${randIdForProcess}.1.1.2.1`, ); } await tx.run(selectSql); @@ -4506,16 +4592,16 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}` + `${grpc.status.NOT_FOUND} NOT_FOUND: ${fooNotFoundErr.message}`, ); assert.deepStrictEqual( (e as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.2.1` + `1.${randIdForProcess}.1.1.2.1`, ); } await tx.run(selectSql); await tx.commit(); - } + }, ); await database.close(); @@ -4526,7 +4612,7 @@ describe('Spanner with mock server', () => { assert.deepStrictEqual(beginTxnRequest.length, 1); assert.strictEqual( beginTxnRequest[0].options?.excludeTxnFromChangeStreams, - true + true, ); }); @@ -4537,7 +4623,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.beginTransaction, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); try { await database.runTransactionAsync(async tx => { @@ -4550,11 +4636,11 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - '2 UNKNOWN: Test error' + '2 UNKNOWN: Test error', ); assert.deepStrictEqual( (e as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.4.1` + `1.${randIdForProcess}.1.1.4.1`, ); } finally { await database.close(); @@ -4566,7 +4652,7 @@ describe('Spanner with mock server', () => { const [session] = await database.createSession({}); const transaction = session.transaction( {}, - {transactionTag: 'transaction-tag'} + {transactionTag: 'transaction-tag'}, ); transaction.insert('foo', {id: 1, name: 'One'}); await transaction.commit(); @@ -4577,12 +4663,12 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no CommitRequest found'); assert.ok( request.requestOptions, - 'no requestOptions found on CommitRequest' + 'no requestOptions found on CommitRequest', ); assert.strictEqual(request.requestOptions!.requestTag, ''); assert.strictEqual( request.requestOptions!.transactionTag, - 'transaction-tag' + 'transaction-tag', ); }); @@ -4610,7 +4696,7 @@ describe('Spanner with mock server', () => { async tx => { tx.insert('foo', {id: 1, name: 'One'}); await tx.commit(); - } + }, ); await database.close(); @@ -4621,7 +4707,7 @@ describe('Spanner with mock server', () => { assert.deepStrictEqual(beginTxnRequest.length, 1); assert.strictEqual( beginTxnRequest[0].options?.excludeTxnFromChangeStreams, - true + true, ); }); @@ -4632,7 +4718,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.beginTransaction, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); try { await database.runTransactionAsync(async tx => { @@ -4642,11 +4728,11 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).message, - '2 UNKNOWN: Test error' + '2 UNKNOWN: Test error', ); assert.deepStrictEqual( (e as RequestIDError).requestID, - `1.${randIdForProcess}.1.1.2.1` + `1.${randIdForProcess}.1.1.2.1`, ); } finally { await database.close(); @@ -4660,7 +4746,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.beginTransaction, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); try { await database.runTransactionAsync( @@ -4670,7 +4756,7 @@ describe('Spanner with mock server', () => { async tx => { tx.insert('foo', {id: 1, name: 'One'}); await tx.commit(); - } + }, ); } catch (e) { const beginTxnRequest = spannerMock @@ -4680,11 +4766,11 @@ describe('Spanner with mock server', () => { assert.deepStrictEqual(beginTxnRequest.length, 1); assert.strictEqual( beginTxnRequest[0].options?.excludeTxnFromChangeStreams, - true + true, ); assert.strictEqual( (e as ServiceError).message, - '2 UNKNOWN: Test error' + '2 UNKNOWN: Test error', ); } finally { await database.close(); @@ -4702,7 +4788,7 @@ describe('Spanner with mock server', () => { priority: RequestOptions.Priority.PRIORITY_MEDIUM, transactionTag: 'transaction-tag', }, - } + }, ); const request = spannerMock.getRequests().find(val => { @@ -4711,12 +4797,12 @@ describe('Spanner with mock server', () => { assert.ok(request, 'no CommitRequest found'); assert.ok( request.requestOptions, - 'no requestOptions found on CommitRequest' + 'no requestOptions found on CommitRequest', ); assert.strictEqual(request.requestOptions!.priority, 'PRIORITY_MEDIUM'); assert.strictEqual( request.requestOptions!.transactionTag, - 'transaction-tag' + 'transaction-tag', ); await database.close(); @@ -4728,7 +4814,7 @@ describe('Spanner with mock server', () => { {id: 1, name: 'bar'}, { excludeTxnFromChangeStreams: true, - } + }, ); const beginTxnRequest = spannerMock .getRequests() @@ -4737,7 +4823,7 @@ describe('Spanner with mock server', () => { assert.deepStrictEqual(beginTxnRequest.length, 1); assert.strictEqual( beginTxnRequest[0].options?.excludeTxnFromChangeStreams, - true + true, ); await database.close(); }); @@ -4755,7 +4841,7 @@ describe('Spanner with mock server', () => { assert.deepStrictEqual(beginTxnRequest.length, 1); assert.strictEqual( beginTxnRequest[0].options?.isolationLevel, - 'REPEATABLE_READ' + 'REPEATABLE_READ', ); await database.close(); }); @@ -4774,23 +4860,23 @@ describe('Spanner with mock server', () => { assert.strictEqual(request.mutations.length, 1); assert.strictEqual( request.mutations[0].insertOrUpdate?.values?.length, - 1 + 1, ); assert.strictEqual( request.mutations[0].insertOrUpdate!.columns![0], - 'id' + 'id', ); assert.strictEqual( request.mutations[0].insertOrUpdate!.columns![1], - 'value' + 'value', ); assert.strictEqual( request.mutations[0].insertOrUpdate!.values![0].values![0].stringValue, - '1' + '1', ); assert.strictEqual( request.mutations[0].insertOrUpdate!.values![0].values![1].stringValue, - '{"key1":"value1","key2":"value2"}' + '{"key1":"value1","key2":"value2"}', ); await database.close(); @@ -4805,7 +4891,7 @@ describe('Spanner with mock server', () => { } as MockError; spannerMock.setExecutionTime( spannerMock.commit, - SimulatedExecutionTime.ofError(err) + SimulatedExecutionTime.ofError(err), ); try { await database.table('TestTable').upsert({ @@ -4816,12 +4902,12 @@ describe('Spanner with mock server', () => { } catch (e) { assert.strictEqual( (e as ServiceError).code, - Status.FAILED_PRECONDITION + Status.FAILED_PRECONDITION, ); assert.ok( (e as ServiceError).message.includes( - 'Convert the value to a JSON string containing an array instead' - ) + 'Convert the value to a JSON string containing an array instead', + ), ); } @@ -4845,12 +4931,12 @@ describe('Spanner with mock server', () => { }, }, ], - }) + }), ); } spannerMock.putStatementResult( sql, - mock.StatementResult.resultSet(partials) + mock.StatementResult.resultSet(partials), ); const database = newTestDatabase(); try { @@ -4959,7 +5045,7 @@ describe('Spanner with mock server', () => { assert.deepStrictEqual(rows[0].ColStringArray, ['One', 'Two']); assert.strictEqual( rows[1].ColString, - 'This value is also not chunked' + 'This value is also not chunked', ); assert.deepStrictEqual(rows[1].ColStringArray, ['Three', 'Four']); } finally { @@ -5026,7 +5112,7 @@ describe('Spanner with mock server', () => { ]); assert.strictEqual( rows[1].ColString, - 'This value is also not chunked' + 'This value is also not chunked', ); assert.deepStrictEqual(rows[1].ColStringArray, ['Three', 'Four']); } finally { @@ -5101,7 +5187,7 @@ describe('Spanner with mock server', () => { assert.strictEqual(rows[0].outerArray[0].innerField, 'First row'); assert.ok( rows[0].outerArray[0].innerArray === null, - 'Inner array should be null' + 'Inner array should be null', ); assert.strictEqual(rows[1].outerArray.length, 1); assert.strictEqual(rows[1].outerArray[0].innerField, 'Second row'); @@ -5437,11 +5523,11 @@ describe('Spanner with mock server', () => { function setupResultsAndErrors( sql: string, results: PartialResultSet[], - errorOnIndexes: number[] + errorOnIndexes: number[], ) { spannerMock.putStatementResult( sql, - mock.StatementResult.resultSet(results) + mock.StatementResult.resultSet(results), ); if (errorOnIndexes.length) { const errors: MockError[] = []; @@ -5454,7 +5540,7 @@ describe('Spanner with mock server', () => { } spannerMock.setExecutionTime( spannerMock.executeStreamingSql, - SimulatedExecutionTime.ofErrors(errors) + SimulatedExecutionTime.ofErrors(errors), ); } } @@ -5531,7 +5617,7 @@ describe('Spanner with mock server', () => { }); assert.strictEqual( createdInstance.name, - `projects/${spanner.projectId}/instances/new-instance` + `projects/${spanner.projectId}/instances/new-instance`, ); assert.strictEqual(createdInstance.nodeCount, 10); }); @@ -5554,7 +5640,7 @@ describe('Spanner with mock server', () => { }); assert.strictEqual( createdInstance.name, - `projects/${spanner.projectId}/instances/new-instance` + `projects/${spanner.projectId}/instances/new-instance`, ); assert.strictEqual(createdInstance.nodeCount, 10); assert.strictEqual(createdInstance.displayName, 'some new instance'); @@ -5572,19 +5658,19 @@ describe('Spanner with mock server', () => { assert.ok(resource, 'no instance returned'); assert.strictEqual( resource!.formattedName_, - `projects/${spanner.projectId}/instances/new-instance` + `projects/${spanner.projectId}/instances/new-instance`, ); assert.ok(operation, 'no operation returned'); operation!.on('error', assert.ifError).on('complete', instance => { // Instance created successfully. assert.strictEqual( instance.name, - `projects/${spanner.projectId}/instances/new-instance` + `projects/${spanner.projectId}/instances/new-instance`, ); assert.strictEqual(instance.nodeCount, 10); done(); }); - } + }, ); }); @@ -5605,7 +5691,7 @@ describe('Spanner with mock server', () => { }); assert.strictEqual( createdInstance.name, - `projects/${spanner.projectId}/instances/new-instance` + `projects/${spanner.projectId}/instances/new-instance`, ); assert.strictEqual(createdInstance.processingUnits, 500); assert.strictEqual(createdInstance.nodeCount, 0); @@ -5657,7 +5743,7 @@ describe('Spanner with mock server', () => { }); assert.strictEqual( createdDatabase.name, - `${instance.formattedName_}/databases/new-database` + `${instance.formattedName_}/databases/new-database`, ); }); @@ -5688,8 +5774,8 @@ describe('Spanner with mock server', () => { const provider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), exporter: exporter, + spanProcessors: [new SimpleSpanProcessor(exporter)], }); - provider.addSpanProcessor(new SimpleSpanProcessor(exporter)); provider.register(); after(async () => { @@ -5739,7 +5825,7 @@ describe('Spanner with mock server', () => { assert.deepStrictEqual( actualSpanNames, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}`, ); const expectedEventNames = [ @@ -5756,7 +5842,7 @@ describe('Spanner with mock server', () => { assert.deepEqual( actualEventNames, expectedEventNames, - `Mismatched events\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + `Mismatched events\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}`, ); done(); @@ -5785,8 +5871,8 @@ describe('Spanner with mock server', () => { const provider = new NodeTracerProvider({ sampler: new AlwaysOnSampler(), exporter: exporter, + spanProcessors: [new SimpleSpanProcessor(exporter)], }); - provider.addSpanProcessor(new SimpleSpanProcessor(exporter)); provider.register(); beforeEach(async () => { @@ -5836,13 +5922,13 @@ describe('Spanner with mock server', () => { const gotUnaryCalls = xGoogReqIDInterceptor.getUnaryCalls(); assert.deepStrictEqual( gotUnaryCalls[0].method, - '/google.spanner.v1.Spanner/BatchCreateSessions' + '/google.spanner.v1.Spanner/BatchCreateSessions', ); // It is non-deterministic to try to get the exact clientId used to invoke .BatchCreateSessions // given that these tests run as a collective and sessions are pooled. assert.deepStrictEqual( gotUnaryCalls.slice(1), - wantUnaryCallsWithoutBatchCreateSessions + wantUnaryCallsWithoutBatchCreateSessions, ); const gotStreamingCalls = xGoogReqIDInterceptor.getStreamingCalls(); @@ -5890,7 +5976,7 @@ describe('Spanner with mock server', () => { assert.strictEqual( X_GOOG_SPANNER_REQUEST_ID_SPAN_ATTR in span.attributes, true, - `Missing ${X_GOOG_SPANNER_REQUEST_ID_SPAN_ATTR} for ${span.name}` + `Missing ${X_GOOG_SPANNER_REQUEST_ID_SPAN_ATTR} for ${span.name}`, ); } }); @@ -5903,7 +5989,7 @@ describe('Spanner with mock server', () => { function executeSimpleUpdate( database: Database, - update: string | ExecuteSqlRequest + update: string | ExecuteSqlRequest, ): Promise { return database .runTransactionAsync<[number]>((transaction): Promise<[number]> => { @@ -5919,7 +6005,10 @@ function executeSimpleUpdate( return rowCount; }) .catch(() => { - transaction.rollback().then(() => {}); + transaction + .rollback() + .then(() => {}) + .catch(() => {}); return [-1]; }); }) @@ -5930,7 +6019,7 @@ function executeSimpleUpdate( function getRowCountFromStreamingSql( context: Database | Transaction, - query: ExecuteSqlRequest + query: ExecuteSqlRequest, ): Promise { return new Promise((resolve, reject) => { let rows = 0; diff --git a/test/table.ts b/test/table.ts index 8cfa0b27f..8d2ac7a26 100644 --- a/test/table.ts +++ b/test/table.ts @@ -481,7 +481,7 @@ describe('Table', () => { const gaxOptions = {}; (sandbox.stub(transaction, 'insert') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, gaxOptions); @@ -495,7 +495,7 @@ describe('Table', () => { const insertRowsOptions = {returnCommitStats: true}; (sandbox.stub(transaction, 'insert') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, insertRowsOptions); @@ -512,7 +512,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'insert') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, insertRowsOptions); @@ -528,7 +528,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'insert') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, insertRowsOptions); @@ -544,7 +544,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'insert') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, insertRowsOptions); @@ -567,10 +567,13 @@ describe('Table', () => { const stream = through.obj(); - setImmediate(() => { - split(rows, stream).then(() => { + setImmediate(async () => { + try { + await split(rows, stream); stream.end(); - }); + } catch (err) { + stream.destroy(err as Error); + } }); return stream; @@ -651,7 +654,7 @@ describe('Table', () => { const gaxOptions = {}; (sandbox.stub(transaction, 'replace') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, gaxOptions); @@ -665,7 +668,7 @@ describe('Table', () => { const replaceRowsOptions = {returnCommitStats: true}; (sandbox.stub(transaction, 'replace') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, replaceRowsOptions); @@ -682,7 +685,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'replace') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, replaceRowsOptions); @@ -698,7 +701,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'replace') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, replaceRowsOptions); @@ -714,7 +717,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'replace') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, replaceRowsOptions); @@ -757,7 +760,7 @@ describe('Table', () => { const gaxOptions = {}; (sandbox.stub(transaction, 'update') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, gaxOptions); @@ -771,7 +774,7 @@ describe('Table', () => { const updateRowsOptions = {returnCommitStats: true}; (sandbox.stub(transaction, 'update') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, updateRowsOptions); @@ -788,7 +791,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'update') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, updateRowsOptions); @@ -804,7 +807,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'update') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, updateRowsOptions); @@ -820,7 +823,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'update') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, updateRowsOptions); @@ -863,7 +866,7 @@ describe('Table', () => { const gaxOptions = {}; (sandbox.stub(transaction, 'upsert') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, gaxOptions); @@ -877,7 +880,7 @@ describe('Table', () => { const upsertRowsOptions = {returnCommitStats: true}; (sandbox.stub(transaction, 'upsert') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, upsertRowsOptions); @@ -894,7 +897,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'upsert') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, upsertRowsOptions); @@ -910,7 +913,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'upsert') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, upsertRowsOptions); @@ -926,7 +929,7 @@ describe('Table', () => { }; (sandbox.stub(transaction, 'upsert') as sinon.SinonStub).withArgs( table.name, - ROW + ROW, ); transaction.commit = options => { assert.strictEqual(options, upsertRowsOptions); diff --git a/test/transaction-runner.ts b/test/transaction-runner.ts index 0c8aed99a..2ed0f23d5 100644 --- a/test/transaction-runner.ts +++ b/test/transaction-runner.ts @@ -337,7 +337,7 @@ describe('TransactionRunner', () => { SESSION, fakeTransaction, runFn, - options + options, ); assert.deepStrictEqual(r.options, options); @@ -445,7 +445,7 @@ describe('TransactionRunner', () => { concat(data => { assert.deepStrictEqual(data, fakeData); done(); - }) + }), ); }); @@ -497,7 +497,7 @@ describe('TransactionRunner', () => { assert.deepStrictEqual(data, fakeData); assert.strictEqual(runFn.callCount, 2); done(); - }) + }), ); }); @@ -536,7 +536,7 @@ describe('TransactionRunner', () => { SESSION, fakeTransaction, runFn, - options + options, ); assert.deepStrictEqual(r.options, options); diff --git a/test/transaction.ts b/test/transaction.ts index f20dc6ce0..50cf4717f 100644 --- a/test/transaction.ts +++ b/test/transaction.ts @@ -850,7 +850,7 @@ describe('Transaction', () => { stream.on('error', error => { assert.strictEqual( error.message, - 'Value of type undefined not recognized.' + 'Value of type undefined not recognized.', ); done(); }); @@ -1406,7 +1406,7 @@ describe('Transaction', () => { transaction.batchUpdate(null, err => { assert.strictEqual( err.message, - 'batchUpdate requires at least 1 DML statement.' + 'batchUpdate requires at least 1 DML statement.', ); assert.strictEqual(err.code, 3); assert.deepStrictEqual(err.rowCounts, []); @@ -1418,7 +1418,7 @@ describe('Transaction', () => { transaction.batchUpdate([], err => { assert.strictEqual( err.message, - 'batchUpdate requires at least 1 DML statement.' + 'batchUpdate requires at least 1 DML statement.', ); assert.strictEqual(err.code, 3); assert.deepStrictEqual(err.rowCounts, []); @@ -1447,8 +1447,8 @@ describe('Transaction', () => { [X_GOOG_SPANNER_REQUEST_ID_HEADER]: craftRequestId(1, 1, 1, 1), [LEADER_AWARE_ROUTING_HEADER]: 'true', }, - transaction.commonHeaders_ - ) + transaction.commonHeaders_, + ), ); }); @@ -1484,7 +1484,7 @@ describe('Transaction', () => { assert.deepStrictEqual(rowCounts, []); assert.strictEqual(apiResponse, fakeResponse); done(); - } + }, ); const requestCallback = stub.lastCall.args[1]; @@ -1508,7 +1508,7 @@ describe('Transaction', () => { assert.deepStrictEqual(rowCounts, expectedRowCounts); assert.strictEqual(apiResponse, fakeResponse); done(); - } + }, ); const requestCallback = stub.lastCall.args[1]; @@ -1529,7 +1529,7 @@ describe('Transaction', () => { assert.deepStrictEqual(rowCounts, expectedRowCounts); assert.strictEqual(apiResponse, fakeResponse); done(); - } + }, ); const requestCallback = stub.lastCall.args[1]; @@ -1559,7 +1559,7 @@ describe('Transaction', () => { assert.deepStrictEqual(rowCounts, expectedRowCounts); assert.deepStrictEqual(apiResponse, fakeResponse); done(); - } + }, ); const requestCallback = stub.lastCall.args[1]; @@ -1583,8 +1583,8 @@ describe('Transaction', () => { headers, Object.assign( {[LEADER_AWARE_ROUTING_HEADER]: true}, - transaction.commonHeaders_ - ) + transaction.commonHeaders_, + ), ); }); @@ -1628,8 +1628,8 @@ describe('Transaction', () => { headers, Object.assign( {[LEADER_AWARE_ROUTING_HEADER]: true}, - transaction.commonHeaders_ - ) + transaction.commonHeaders_, + ), ); }); @@ -1654,8 +1654,8 @@ describe('Transaction', () => { headers, Object.assign( {[LEADER_AWARE_ROUTING_HEADER]: true}, - transaction.commonHeaders_ - ) + transaction.commonHeaders_, + ), ); }); }); @@ -1681,8 +1681,8 @@ describe('Transaction', () => { [X_GOOG_SPANNER_REQUEST_ID_HEADER]: craftRequestId(1, 1, 1, 1), [LEADER_AWARE_ROUTING_HEADER]: true, }, - transaction.commonHeaders_ - ) + transaction.commonHeaders_, + ), ); }); @@ -1749,7 +1749,7 @@ describe('Transaction', () => { transaction.request = config => { assert.strictEqual( config.reqOpts.requestOptions, - options.requestOptions + options.requestOptions, ); done(); }; @@ -1777,7 +1777,7 @@ describe('Transaction', () => { transaction.request = config => { assert.strictEqual( config.reqOpts.requestOptions.transactionTag, - transactionTag + transactionTag, ); done(); }; @@ -1848,11 +1848,11 @@ describe('Transaction', () => { new Error('Table TestTable not found'), { code: grpc.status.NOT_FOUND, - } + }, ); const decoratedError = Transaction.decorateCommitError( tableNotFoundErr, - [] + [], ); assert.strictEqual(decoratedError, tableNotFoundErr); }); @@ -1862,11 +1862,11 @@ describe('Transaction', () => { new Error('Invalid value for column TestColumn'), { code: grpc.status.FAILED_PRECONDITION, - } + }, ); const decoratedError = Transaction.decorateCommitError( failedPreconditionErr, - [] + [], ); assert.strictEqual(decoratedError, failedPreconditionErr); }); @@ -1874,15 +1874,15 @@ describe('Transaction', () => { it('should not decorate FAILED_PRECONDITION error with specific JSON error if mutations are empty', () => { const failedPreconditionErr = Object.assign( new Error( - 'Invalid value for column TestCol2 in table TestTable: Expected JSON.' + 'Invalid value for column TestCol2 in table TestTable: Expected JSON.', ), { code: grpc.status.FAILED_PRECONDITION, - } + }, ); const decoratedError = Transaction.decorateCommitError( failedPreconditionErr, - [] + [], ); assert.strictEqual(decoratedError, failedPreconditionErr); }); @@ -1896,15 +1896,15 @@ describe('Transaction', () => { const failedPreconditionErr = Object.assign( new Error( - 'Invalid value for column TestCol2 in table TestTable: Expected JSON.' + 'Invalid value for column TestCol2 in table TestTable: Expected JSON.', ), { code: grpc.status.FAILED_PRECONDITION, - } + }, ); const decoratedError = Transaction.decorateCommitError( failedPreconditionErr, - mutations + mutations, ); assert.strictEqual(decoratedError, failedPreconditionErr); }); @@ -1918,21 +1918,21 @@ describe('Transaction', () => { const failedPreconditionErr = Object.assign( new Error( - 'Invalid value for column TestCol2 in table TestTable: Expected JSON.' + 'Invalid value for column TestCol2 in table TestTable: Expected JSON.', ), { code: grpc.status.FAILED_PRECONDITION, - } + }, ); const decoratedError = Transaction.decorateCommitError( failedPreconditionErr, - mutations + mutations, ); assert.notStrictEqual(decoratedError, failedPreconditionErr); assert.ok( decoratedError.message.includes( - 'The value is an array. Convert the value to a JSON string containing an array instead in order to insert it into a JSON column. Example: `[{"key": "value 1"}, {"key": "value 2"}]` instead of [{key: "value 1"}, {key: "value 2"}]' - ) + 'The value is an array. Convert the value to a JSON string containing an array instead in order to insert it into a JSON column. Example: `[{"key": "value 1"}, {"key": "value 2"}]` instead of [{key: "value 1"}, {key: "value 2"}]', + ), ); }); }); @@ -2058,8 +2058,8 @@ describe('Transaction', () => { headers, Object.assign( {[LEADER_AWARE_ROUTING_HEADER]: true}, - transaction.commonHeaders_ - ) + transaction.commonHeaders_, + ), ); }); @@ -2235,8 +2235,8 @@ describe('Transaction', () => { [X_GOOG_SPANNER_REQUEST_ID_HEADER]: craftRequestId(1, 1, 1, 1), [LEADER_AWARE_ROUTING_HEADER]: true, }, - transaction.commonHeaders_ - ) + transaction.commonHeaders_, + ), ); done(); }; @@ -2286,8 +2286,8 @@ describe('Transaction', () => { [X_GOOG_SPANNER_REQUEST_ID_HEADER]: craftRequestId(1, 1, 1, 1), [LEADER_AWARE_ROUTING_HEADER]: true, }, - transaction.commonHeaders_ - ) + transaction.commonHeaders_, + ), ); }); @@ -2355,8 +2355,8 @@ describe('Transaction', () => { headers, Object.assign( {[LEADER_AWARE_ROUTING_HEADER]: true}, - pdml.commonHeaders_ - ) + pdml.commonHeaders_, + ), ); }); }); diff --git a/tsconfig.json b/tsconfig.json index 9b165b9e7..7721598f2 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -20,6 +20,10 @@ "test/**/*.ts", "system-test/*.ts", "benchmark/*.ts", - "observability-test/*.ts" + "observability-test/*.ts", + "src/**/*.json", + "system-test/*.ts", + "protos/protos.json", + "samples/**/*.d.ts" ] }