diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 24943e116..5fa6a644a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +12,5 @@ # See the License for the specific language governing permissions and # limitations under the License. docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest - digest: sha256:609822e3c09b7a1bd90b99655904609f162cc15acb4704f1edf778284c36f429 -# created: 2024-10-01T19:34:30.797530443Z + image: 'gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest' + digest: 'sha256:c7e4968cfc97a204a4b2381f3ecb55cabc40c4cccf88b1ef8bef0d976be87fee' diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index c378ea755..1bd79bfe5 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,7 +1,30 @@ -Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: -- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/nodejs-bigquery/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +> Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: + +## Description + +> Please provide a detailed description for the change. +> As much as possible, please try to keep changes separate by purpose. For example, try not to make a one-line bug fix in a feature request, or add an irrelevant README change to a bug fix. + +## Impact + +> What's the impact of this change? + +## Testing + +> Have you added unit and integration tests if necessary? +> Were any tests changed? Are any breaking changes necessary? + +## Additional Information + +> Any additional details that we should be aware of? + +## Checklist + +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/nodejs-bigquery/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [ ] Ensure the tests and linter pass -- [ ] Code coverage does not decrease (if any source code was changed) -- [ ] Appropriate docs were updated (if necessary) +- [ ] Code coverage does not decrease +- [ ] Appropriate docs were updated +- [ ] Appropriate comments were added, particularly in complex areas or places that require background +- [ ] No new warnings or issues will be generated from this change -Fixes # 🦕 +Fixes #issue_number_goes_here 🦕 diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml index d4ca94189..73144bc0c 100644 --- a/.github/release-trigger.yml +++ b/.github/release-trigger.yml @@ -1 +1,2 @@ enabled: true +multiScmName: nodejs-bigquery \ No newline at end of file diff --git a/.github/scripts/close-invalid-link.cjs b/.github/scripts/close-invalid-link.cjs index d7a3688e7..fdb514881 100644 --- a/.github/scripts/close-invalid-link.cjs +++ b/.github/scripts/close-invalid-link.cjs @@ -12,21 +12,26 @@ // See the License for the specific language governing permissions and // limitations under the License. +const fs = require('fs'); +const yaml = require('js-yaml'); +const path = require('path'); +const TEMPLATE_FILE_PATH = path.resolve(__dirname, '../ISSUE_TEMPLATE/bug_report.yml') + async function closeIssue(github, owner, repo, number) { await github.rest.issues.createComment({ owner: owner, repo: repo, issue_number: number, - body: 'Issue was opened with an invalid reproduction link. Please make sure the repository is a valid, publicly-accessible github repository, and make sure the url is complete (example: https://github.com/googleapis/google-cloud-node)' + body: "Issue was opened with an invalid reproduction link. Please make sure the repository is a valid, publicly-accessible github repository, and make sure the url is complete (example: https://github.com/googleapis/google-cloud-node)" }); await github.rest.issues.update({ owner: owner, repo: repo, issue_number: number, - state: 'closed' + state: "closed" }); } -module.exports = async ({github, context}) => { +module.exports = async ({ github, context }) => { const owner = context.repo.owner; const repo = context.repo.repo; const number = context.issue.number; @@ -37,20 +42,32 @@ module.exports = async ({github, context}) => { issue_number: number, }); - const isBugTemplate = issue.data.body.includes('Link to the code that reproduces this issue'); + const yamlData = fs.readFileSync(TEMPLATE_FILE_PATH, 'utf8'); + const obj = yaml.load(yamlData); + const linkMatchingText = (obj.body.find(x => {return x.type === 'input' && x.validations.required === true && x.attributes.label.includes('link')})).attributes.label; + const isBugTemplate = issue.data.body.includes(linkMatchingText); if (isBugTemplate) { console.log(`Issue ${number} is a bug template`) try { - const link = issue.data.body.split('\n')[18].match(/(https?:\/\/(gist\.)?github.com\/.*)/)[0]; - console.log(`Issue ${number} contains this link: ${link}`) - const isValidLink = (await fetch(link)).ok; - console.log(`Issue ${number} has a ${isValidLink ? 'valid' : 'invalid'} link`) - if (!isValidLink) { - await closeIssue(github, owner, repo, number); - } + const text = issue.data.body; + const match = text.indexOf(linkMatchingText); + if (match !== -1) { + const nextLineIndex = text.indexOf('http', match); + if (nextLineIndex == -1) { + await closeIssue(github, owner, repo, number); + return; + } + const link = text.substring(nextLineIndex, text.indexOf('\n', nextLineIndex)); + console.log(`Issue ${number} contains this link: ${link}`); + const isValidLink = (await fetch(link)).ok; + console.log(`Issue ${number} has a ${isValidLink ? "valid" : "invalid"} link`) + if (!isValidLink) { + await closeIssue(github, owner, repo, number); + } + } } catch (err) { await closeIssue(github, owner, repo, number); } } -}; +}; \ No newline at end of file diff --git a/.github/scripts/close-unresponsive.cjs b/.github/scripts/close-unresponsive.cjs index 142dc1265..6f81b508f 100644 --- a/.github/scripts/close-unresponsive.cjs +++ b/.github/scripts/close-unresponsive.cjs @@ -1,4 +1,4 @@ -// Copyright 2024 Google LLC +/// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -13,57 +13,57 @@ // limitations under the License. function labeledEvent(data) { - return data.event === 'labeled' && data.label.name === 'needs more info'; - } - - const numberOfDaysLimit = 15; - const close_message = `This has been closed since a request for information has \ - not been answered for ${numberOfDaysLimit} days. It can be reopened when the \ - requested information is provided.`; - - module.exports = async ({github, context}) => { - const owner = context.repo.owner; - const repo = context.repo.repo; - - const issues = await github.rest.issues.listForRepo({ - owner: owner, - repo: repo, - labels: 'needs more info', - }); - const numbers = issues.data.map((e) => e.number); - - for (const number of numbers) { - const events = await github.paginate( - github.rest.issues.listEventsForTimeline, - { - owner: owner, - repo: repo, - issue_number: number, - }, - (response) => response.data.filter(labeledEvent) - ); - - const latest_response_label = events[events.length - 1]; - - const created_at = new Date(latest_response_label.created_at); - const now = new Date(); - const diff = now - created_at; - const diffDays = diff / (1000 * 60 * 60 * 24); - - if (diffDays > numberOfDaysLimit) { - await github.rest.issues.update({ - owner: owner, - repo: repo, - issue_number: number, - state: 'closed', - }); - - await github.rest.issues.createComment({ - owner: owner, - repo: repo, - issue_number: number, - body: close_message, - }); - } + return data.event === "labeled" && data.label.name === "needs more info"; +} + +const numberOfDaysLimit = 15; +const close_message = `This has been closed since a request for information has \ +not been answered for ${numberOfDaysLimit} days. It can be reopened when the \ +requested information is provided.`; + +module.exports = async ({ github, context }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + + const issues = await github.rest.issues.listForRepo({ + owner: owner, + repo: repo, + labels: "needs more info", + }); + const numbers = issues.data.map((e) => e.number); + + for (const number of numbers) { + const events = await github.paginate( + github.rest.issues.listEventsForTimeline, + { + owner: owner, + repo: repo, + issue_number: number, + }, + (response) => response.data.filter(labeledEvent) + ); + + const latest_response_label = events[events.length - 1]; + + const created_at = new Date(latest_response_label.created_at); + const now = new Date(); + const diff = now - created_at; + const diffDays = diff / (1000 * 60 * 60 * 24); + + if (diffDays > numberOfDaysLimit) { + await github.rest.issues.update({ + owner: owner, + repo: repo, + issue_number: number, + state: "closed", + }); + + await github.rest.issues.createComment({ + owner: owner, + repo: repo, + issue_number: number, + body: close_message, + }); } - }; + } +}; \ No newline at end of file diff --git a/.github/scripts/fixtures/invalidIssueBody.txt b/.github/scripts/fixtures/invalidIssueBody.txt new file mode 100644 index 000000000..504bd6690 --- /dev/null +++ b/.github/scripts/fixtures/invalidIssueBody.txt @@ -0,0 +1,50 @@ +### Please make sure you have searched for information in the following guides. + +- [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues +- [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js +- [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting +- [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq +- [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md +- [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs +- [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples + +### A screenshot that you have tested with "Try this API". + + +N/A + +### Link to the code that reproduces this issue. A link to a **public** Github Repository or gist with a minimal reproduction. + +not-a-link + +### A step-by-step description of how to reproduce the issue, based on the linked reproduction. + + +Change MY_PROJECT to your project name, add credentials if needed and run. + +### A clear and concise description of what the bug is, and what you expected to happen. + +The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. +TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object + at _write (node:internal/streams/writable:474:13) + at Writable.write (node:internal/streams/writable:502:10) + at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) + at Object.onceWrapper (node:events:633:26) + at Pumpify.emit (node:events:518:28) + at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at PassThrough.ondata (node:internal/streams/readable:1007:22) + at PassThrough.emit (node:events:518:28) + at addChunk (node:internal/streams/readable:559:12) { + code: 'ERR_INVALID_ARG_TYPE' + + +### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** + +No library should crash an application this way. \ No newline at end of file diff --git a/.github/scripts/fixtures/validIssueBody.txt b/.github/scripts/fixtures/validIssueBody.txt new file mode 100644 index 000000000..6e0ace338 --- /dev/null +++ b/.github/scripts/fixtures/validIssueBody.txt @@ -0,0 +1,50 @@ +### Please make sure you have searched for information in the following guides. + +- [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues +- [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js +- [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting +- [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq +- [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md +- [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs +- [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples + +### A screenshot that you have tested with "Try this API". + + +N/A + +### Link to the code that reproduces this issue. A link to a **public** Github Repository or gist with a minimal reproduction. + +https://gist.github.com/orgads/13cbf44c91923da27d8772b5f10489c9 + +### A step-by-step description of how to reproduce the issue, based on the linked reproduction. + + +Change MY_PROJECT to your project name, add credentials if needed and run. + +### A clear and concise description of what the bug is, and what you expected to happen. + +The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. +TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object + at _write (node:internal/streams/writable:474:13) + at Writable.write (node:internal/streams/writable:502:10) + at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) + at Object.onceWrapper (node:events:633:26) + at Pumpify.emit (node:events:518:28) + at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at PassThrough.ondata (node:internal/streams/readable:1007:22) + at PassThrough.emit (node:events:518:28) + at addChunk (node:internal/streams/readable:559:12) { + code: 'ERR_INVALID_ARG_TYPE' + + +### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** + +No library should crash an application this way. \ No newline at end of file diff --git a/.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt b/.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt new file mode 100644 index 000000000..984a420e3 --- /dev/null +++ b/.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt @@ -0,0 +1,50 @@ +### Please make sure you have searched for information in the following guides. + +- [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues +- [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js +- [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting +- [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq +- [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md +- [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs +- [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples + +### A screenshot that you have tested with "Try this API". + + +N/A + +### A step-by-step description of how to reproduce the issue, based on the linked reproduction. + + +Change MY_PROJECT to your project name, add credentials if needed and run. + +### A clear and concise description of what the bug is, and what you expected to happen. + +The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. +TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object + at _write (node:internal/streams/writable:474:13) + at Writable.write (node:internal/streams/writable:502:10) + at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) + at Object.onceWrapper (node:events:633:26) + at Pumpify.emit (node:events:518:28) + at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) + at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) + at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) + at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) + at PassThrough.ondata (node:internal/streams/readable:1007:22) + at PassThrough.emit (node:events:518:28) + at addChunk (node:internal/streams/readable:559:12) { + code: 'ERR_INVALID_ARG_TYPE' + +### Link to the code that reproduces this issue. A link to a **public** Github Repository with a minimal reproduction. + + +https://gist.github.com/orgads/13cbf44c91923da27d8772b5f10489c9 + +### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** + +No library should crash an application this way. \ No newline at end of file diff --git a/.github/scripts/package.json b/.github/scripts/package.json new file mode 100644 index 000000000..2c2e5207d --- /dev/null +++ b/.github/scripts/package.json @@ -0,0 +1,21 @@ +{ + "name": "tests", + "private": true, + "description": "tests for script", + "scripts": { + "test": "mocha tests/close-invalid-link.test.cjs && mocha tests/close-or-remove-response-label.test.cjs" + }, + "author": "Google Inc.", + "license": "Apache-2.0", + "engines": { + "node": ">=18" + }, + "dependencies": { + "js-yaml": "^4.1.0" + }, + "devDependencies": { + "@octokit/rest": "^19.0.0", + "mocha": "^10.0.0", + "sinon": "^18.0.0" + } +} \ No newline at end of file diff --git a/.github/scripts/remove-response-label.cjs b/.github/scripts/remove-response-label.cjs index 887cf349e..4a784ddf7 100644 --- a/.github/scripts/remove-response-label.cjs +++ b/.github/scripts/remove-response-label.cjs @@ -13,21 +13,21 @@ // limitations under the License. module.exports = async ({ github, context }) => { - const commenter = context.actor; - const issue = await github.rest.issues.get({ + const commenter = context.actor; + const issue = await github.rest.issues.get({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + }); + const author = issue.data.user.login; + const labels = issue.data.labels.map((e) => e.name); + + if (author === commenter && labels.includes("needs more info")) { + await github.rest.issues.removeLabel({ owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number, + name: "needs more info", }); - const author = issue.data.user.login; - const labels = issue.data.labels.map((e) => e.name); - - if (author === commenter && labels.includes('needs more info')) { - await github.rest.issues.removeLabel({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - name: 'needs more info', - }); - } - }; + } +}; \ No newline at end of file diff --git a/.github/scripts/tests/close-invalid-link.test.cjs b/.github/scripts/tests/close-invalid-link.test.cjs new file mode 100644 index 000000000..f63ee89c8 --- /dev/null +++ b/.github/scripts/tests/close-invalid-link.test.cjs @@ -0,0 +1,86 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const { describe, it } = require('mocha'); +const closeInvalidLink = require('../close-invalid-link.cjs'); +const fs = require('fs'); +const sinon = require('sinon'); + +describe('close issues with invalid links', () => { + let octokitStub; + let issuesStub; + + beforeEach(() => { + issuesStub = { + get: sinon.stub(), + createComment: sinon.stub(), + update: sinon.stub(), + }; + octokitStub = { + rest: { + issues: issuesStub, + }, + }; + }); + + afterEach(() => { + sinon.restore(); + }); + + it('does not do anything if it is not a bug', async () => { + const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + issuesStub.get.resolves({ data: { body: "I'm having a problem with this." } }); + + await closeInvalidLink({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.notCalled(issuesStub.createComment); + sinon.assert.notCalled(issuesStub.update); + }); + + it('does not do anything if it is a bug with an appropriate link', async () => { + const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/validIssueBody.txt', 'utf-8') } }); + + await closeInvalidLink({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.notCalled(issuesStub.createComment); + sinon.assert.notCalled(issuesStub.update); + }); + + it('does not do anything if it is a bug with an appropriate link and the template changes', async () => { + const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/validIssueBodyDifferentLinkLocation.txt', 'utf-8') } }); + + await closeInvalidLink({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.notCalled(issuesStub.createComment); + sinon.assert.notCalled(issuesStub.update); + }); + + it('closes the issue if the link is invalid', async () => { + const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/invalidIssueBody.txt', 'utf-8') } }); + + await closeInvalidLink({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.calledOnce(issuesStub.createComment); + sinon.assert.calledOnce(issuesStub.update); + }); +}); \ No newline at end of file diff --git a/.github/scripts/tests/close-or-remove-response-label.test.cjs b/.github/scripts/tests/close-or-remove-response-label.test.cjs new file mode 100644 index 000000000..fb092c536 --- /dev/null +++ b/.github/scripts/tests/close-or-remove-response-label.test.cjs @@ -0,0 +1,109 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const { describe, it, beforeEach, afterEach } = require('mocha'); +const removeResponseLabel = require('../remove-response-label.cjs'); +const closeUnresponsive = require('../close-unresponsive.cjs'); +const sinon = require('sinon'); + +function getISODateDaysAgo(days) { + const today = new Date(); + const daysAgo = new Date(today.setDate(today.getDate() - days)); + return daysAgo.toISOString(); +} + +describe('close issues or remove needs more info labels', () => { + let octokitStub; + let issuesStub; + let paginateStub; + + beforeEach(() => { + issuesStub = { + listForRepo: sinon.stub(), + update: sinon.stub(), + createComment: sinon.stub(), + get: sinon.stub(), + removeLabel: sinon.stub(), + }; + paginateStub = sinon.stub(); + octokitStub = { + rest: { + issues: issuesStub, + }, + paginate: paginateStub, + }; + }); + + afterEach(() => { + sinon.restore(); + }); + + it('closes the issue if the OP has not responded within the allotted time and there is a needs-more-info label', async () => { + const context = { owner: 'testOrg', repo: 'testRepo' }; + const issuesInRepo = [{ user: { login: 'OP' }, labels: [{ name: 'needs more info' }] }]; + const eventsInIssue = [{ event: 'labeled', label: { name: 'needs more info' }, created_at: getISODateDaysAgo(16) }]; + + issuesStub.listForRepo.resolves({ data: issuesInRepo }); + paginateStub.resolves(eventsInIssue); + + await closeUnresponsive({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.listForRepo); + sinon.assert.calledOnce(paginateStub); + sinon.assert.calledOnce(issuesStub.update); + sinon.assert.calledOnce(issuesStub.createComment); + }); + + it('does nothing if not enough time has passed and there is a needs-more-info label', async () => { + const context = { owner: 'testOrg', repo: 'testRepo' }; + const issuesInRepo = [{ user: { login: 'OP' }, labels: [{ name: 'needs more info' }] }]; + const eventsInIssue = [{ event: 'labeled', label: { name: 'needs more info' }, created_at: getISODateDaysAgo(14) }]; + + issuesStub.listForRepo.resolves({ data: issuesInRepo }); + paginateStub.resolves(eventsInIssue); + + await closeUnresponsive({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.listForRepo); + sinon.assert.calledOnce(paginateStub); + sinon.assert.notCalled(issuesStub.update); + sinon.assert.notCalled(issuesStub.createComment); + }); + + it('removes the label if OP responded', async () => { + const context = { actor: 'OP', repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + const issueContext = { user: {login: 'OP'}, labels: [{ name: 'needs more info' }] }; + + issuesStub.get.resolves({ data: issueContext }); + + await removeResponseLabel({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.calledOnce(issuesStub.removeLabel); + }); + + it('does not remove the label if author responded', async () => { + const context = { actor: 'repo-maintainer', repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; + const issueContext = { user: {login: 'OP'}, labels: [{ name: 'needs more info' }] }; + + issuesStub.get.resolves({ data: issueContext }); + + await removeResponseLabel({ github: octokitStub, context }); + + sinon.assert.calledOnce(issuesStub.get); + sinon.assert.notCalled(issuesStub.removeLabel); + }); +}); \ No newline at end of file diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index b46e4c4d6..a013376d1 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -8,9 +8,9 @@ branchProtectionRules: - "ci/kokoro: Samples test" - "ci/kokoro: System test" - lint - - test (14) - - test (16) - test (18) + - test (20) + - test (22) - cla/google - windows - OwlBot Post Processor diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4892eb2c5..883082c0b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,10 +9,10 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18, 20] + node: [18, 20, 22] steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 with: node-version: ${{ matrix.node }} - run: node --version @@ -26,13 +26,27 @@ jobs: - run: npm test env: MOCHA_THROW_DEPRECATION: false + test-script: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 18 + - run: node --version + - run: npm install --engine-strict + working-directory: .github/scripts + - run: npm test + working-directory: .github/scripts + env: + MOCHA_THROW_DEPRECATION: false windows: runs-on: windows-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 with: - node-version: 14 + node-version: 18 - run: npm install --engine-strict - run: npm test env: @@ -40,19 +54,19 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 with: - node-version: 14 + node-version: 18 - run: npm install - run: npm run lint docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 with: - node-version: 14 + node-version: 18 - run: npm install - run: npm run docs - uses: JustinBeckwith/linkinator-action@v1 diff --git a/.github/workflows/discovery.yaml b/.github/workflows/discovery.yaml index c75d2248d..21e09ec99 100644 --- a/.github/workflows/discovery.yaml +++ b/.github/workflows/discovery.yaml @@ -7,7 +7,7 @@ jobs: sync: runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: 16 @@ -20,7 +20,7 @@ jobs: # Fix formatting - run: npm run fix # Submit pull request - - uses: googleapis/code-suggester@v4 + - uses: googleapis/code-suggester@v5 env: ACCESS_TOKEN: ${{ secrets.YOSHI_CODE_BOT_TOKEN }} with: diff --git a/.github/workflows/issues-no-repro.yaml b/.github/workflows/issues-no-repro.yaml index 442a46bcc..9b2f70148 100644 --- a/.github/workflows/issues-no-repro.yaml +++ b/.github/workflows/issues-no-repro.yaml @@ -11,6 +11,11 @@ jobs: pull-requests: write steps: - uses: actions/checkout@v4 + - uses: actions/setup-node@v3 + with: + node-version: 18 + - run: npm install + working-directory: ./.github/scripts - uses: actions/github-script@v7 with: script: | diff --git a/.kokoro/common.cfg b/.kokoro/common.cfg index 68f910dd9..8687a0200 100644 --- a/.kokoro/common.cfg +++ b/.kokoro/common.cfg @@ -16,7 +16,7 @@ build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/.kokoro/continuous/node14/common.cfg b/.kokoro/continuous/node18/common.cfg similarity index 89% rename from .kokoro/continuous/node14/common.cfg rename to .kokoro/continuous/node18/common.cfg index 68f910dd9..8687a0200 100644 --- a/.kokoro/continuous/node14/common.cfg +++ b/.kokoro/continuous/node18/common.cfg @@ -16,7 +16,7 @@ build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/.kokoro/continuous/node14/lint.cfg b/.kokoro/continuous/node18/lint.cfg similarity index 100% rename from .kokoro/continuous/node14/lint.cfg rename to .kokoro/continuous/node18/lint.cfg diff --git a/.kokoro/continuous/node14/samples-test.cfg b/.kokoro/continuous/node18/samples-test.cfg similarity index 100% rename from .kokoro/continuous/node14/samples-test.cfg rename to .kokoro/continuous/node18/samples-test.cfg diff --git a/.kokoro/continuous/node14/system-test.cfg b/.kokoro/continuous/node18/system-test.cfg similarity index 100% rename from .kokoro/continuous/node14/system-test.cfg rename to .kokoro/continuous/node18/system-test.cfg diff --git a/.kokoro/continuous/node14/test.cfg b/.kokoro/continuous/node18/test.cfg similarity index 100% rename from .kokoro/continuous/node14/test.cfg rename to .kokoro/continuous/node18/test.cfg diff --git a/.kokoro/presubmit/node14/common.cfg b/.kokoro/presubmit/node18/common.cfg similarity index 89% rename from .kokoro/presubmit/node14/common.cfg rename to .kokoro/presubmit/node18/common.cfg index 68f910dd9..8687a0200 100644 --- a/.kokoro/presubmit/node14/common.cfg +++ b/.kokoro/presubmit/node18/common.cfg @@ -16,7 +16,7 @@ build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/.kokoro/presubmit/node14/samples-test.cfg b/.kokoro/presubmit/node18/samples-test.cfg similarity index 100% rename from .kokoro/presubmit/node14/samples-test.cfg rename to .kokoro/presubmit/node18/samples-test.cfg diff --git a/.kokoro/presubmit/node14/system-test.cfg b/.kokoro/presubmit/node18/system-test.cfg similarity index 100% rename from .kokoro/presubmit/node14/system-test.cfg rename to .kokoro/presubmit/node18/system-test.cfg diff --git a/.kokoro/presubmit/node14/test.cfg b/.kokoro/presubmit/node18/test.cfg similarity index 100% rename from .kokoro/presubmit/node14/test.cfg rename to .kokoro/presubmit/node18/test.cfg diff --git a/.kokoro/release/docs-devsite.cfg b/.kokoro/release/docs-devsite.cfg index 5eb4cf6ac..682c8277c 100644 --- a/.kokoro/release/docs-devsite.cfg +++ b/.kokoro/release/docs-devsite.cfg @@ -11,7 +11,7 @@ before_action { # doc publications use a Python image. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } # Download trampoline resources. diff --git a/.kokoro/release/docs.cfg b/.kokoro/release/docs.cfg index 83eb5b102..dd568cd66 100644 --- a/.kokoro/release/docs.cfg +++ b/.kokoro/release/docs.cfg @@ -11,7 +11,7 @@ before_action { # doc publications use a Python image. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } # Download trampoline resources. diff --git a/.kokoro/release/docs.sh b/.kokoro/release/docs.sh index 1d8f3f490..e9079a605 100755 --- a/.kokoro/release/docs.sh +++ b/.kokoro/release/docs.sh @@ -16,7 +16,7 @@ set -eo pipefail -# build jsdocs (Python is installed on the Node 10 docker image). +# build jsdocs (Python is installed on the Node 18 docker image). if [[ -z "$CREDENTIALS" ]]; then # if CREDENTIALS are explicitly set, assume we're testing locally # and don't set NPM_CONFIG_PREFIX. diff --git a/.kokoro/release/publish.cfg b/.kokoro/release/publish.cfg index 3dfef4f9d..15dd939c0 100644 --- a/.kokoro/release/publish.cfg +++ b/.kokoro/release/publish.cfg @@ -30,7 +30,7 @@ build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" + value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" } env_vars: { diff --git a/.kokoro/samples-test.sh b/.kokoro/samples-test.sh index 8c5d108cb..528775394 100755 --- a/.kokoro/samples-test.sh +++ b/.kokoro/samples-test.sh @@ -16,7 +16,9 @@ set -eo pipefail -export NPM_CONFIG_PREFIX=${HOME}/.npm-global +# Ensure the npm global directory is writable, otherwise rebuild `npm` +mkdir -p $NPM_CONFIG_PREFIX +npm config -g ls || npm i -g npm@`npm --version` # Setup service account credentials. export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/secret_manager/long-door-651-kokoro-system-test-service-account @@ -56,7 +58,7 @@ fi # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=14 +COVERAGE_NODE=18 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/.kokoro/system-test.sh b/.kokoro/system-test.sh index 0b3043d26..a90d5cfec 100755 --- a/.kokoro/system-test.sh +++ b/.kokoro/system-test.sh @@ -49,7 +49,7 @@ npm run system-test # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=14 +COVERAGE_NODE=18 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/.kokoro/test.bat b/.kokoro/test.bat index 0bb124052..caf825656 100644 --- a/.kokoro/test.bat +++ b/.kokoro/test.bat @@ -21,7 +21,7 @@ cd .. @rem we upgrade Node.js in the image: SET PATH=%PATH%;/cygdrive/c/Program Files/nodejs/npm -call nvm use v14.17.3 +call nvm use 18 call which node call npm install || goto :error diff --git a/.kokoro/test.sh b/.kokoro/test.sh index 862d478d3..0d9f6392a 100755 --- a/.kokoro/test.sh +++ b/.kokoro/test.sh @@ -39,7 +39,7 @@ npm test # codecov combines coverage across integration and unit tests. Include # the logic below for any environment you wish to collect coverage for: -COVERAGE_NODE=14 +COVERAGE_NODE=18 if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then NYC_BIN=./node_modules/nyc/bin/nyc.js if [ -f "$NYC_BIN" ]; then diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 4d0311212..5d6cfcca5 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -44,7 +44,7 @@ # the project root. # # Here is an example for running this script. -# TRAMPOLINE_IMAGE=gcr.io/cloud-devrel-kokoro-resources/node:10-user \ +# TRAMPOLINE_IMAGE=gcr.io/cloud-devrel-kokoro-resources/node:18-user \ # TRAMPOLINE_BUILD_FILE=.kokoro/system-test.sh \ # .kokoro/trampoline_v2.sh diff --git a/CHANGELOG.md b/CHANGELOG.md index 9c6704683..89b365ab0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,17 @@ [1]: https://www.npmjs.com/package/@google-cloud/bigquery?activeTab=versions +## [8.0.0](https://github.com/googleapis/nodejs-bigquery/compare/v7.9.4...v8.0.0) (2025-04-23) + + +### ⚠ BREAKING CHANGES + +* migrate to node 18 ([#1458](https://github.com/googleapis/nodejs-bigquery/issues/1458)) + +### Miscellaneous Chores + +* Migrate to node 18 ([#1458](https://github.com/googleapis/nodejs-bigquery/issues/1458)) ([6cd706b](https://github.com/googleapis/nodejs-bigquery/commit/6cd706b6e96ac54a9289211e7e3d2cc1f4e934e2)) + ## [7.9.4](https://github.com/googleapis/nodejs-bigquery/compare/v7.9.3...v7.9.4) (2025-04-02) diff --git a/benchmark/bench.ts b/benchmark/bench.ts index 155cc422a..e8995f277 100644 --- a/benchmark/bench.ts +++ b/benchmark/bench.ts @@ -18,7 +18,7 @@ import {BigQuery} from '../src'; if (process.argv.length < 3) { throw new Error( 'need query file; ' + - `usage: '${process.argv[0]} ${process.argv[1]} '` + `usage: '${process.argv[0]} ${process.argv[1]} '`, ); } @@ -31,7 +31,7 @@ const client = new BigQuery(); Promise.all( queries.map((query: string) => { return doQuery(query).catch(console.error); - }) + }), ).catch(console.error); async function doQuery(queryTxt: string) { @@ -54,7 +54,7 @@ async function doQuery(queryTxt: string) { const receivedCols = Object.keys(row).length; const error = new Error( `query "${queryTxt}": ` + - `wrong number of columns, want ${numCols} got ${receivedCols}` + `wrong number of columns, want ${numCols} got ${receivedCols}`, ); reject(error); } @@ -65,7 +65,7 @@ async function doQuery(queryTxt: string) { console.log( `"${queryTxt}",${numRows},${numCols},${timeFirstByteMilli / 1000},${ timeTotalMilli / 1000 - }` + }`, ); resolve(); }); diff --git a/owlbot.py b/owlbot.py index bb0fc255d..302e64f08 100644 --- a/owlbot.py +++ b/owlbot.py @@ -21,4 +21,3 @@ # Regenerate Discovery types. shell.run(('npm','run','types')) -node.fix() diff --git a/package.json b/package.json index 6383d2cad..85e3e73b3 100644 --- a/package.json +++ b/package.json @@ -1,11 +1,11 @@ { "name": "@google-cloud/bigquery", "description": "Google BigQuery Client Library for Node.js", - "version": "7.9.4", + "version": "8.0.0", "license": "Apache-2.0", "author": "Google LLC", "engines": { - "node": ">=14.0.0" + "node": ">=18" }, "repository": "googleapis/nodejs-bigquery", "main": "./build/src/index.js", @@ -48,44 +48,49 @@ "precompile": "gts clean" }, "dependencies": { - "@google-cloud/common": "^5.0.0", - "@google-cloud/paginator": "^5.0.2", - "@google-cloud/precise-date": "^4.0.0", - "@google-cloud/promisify": "4.0.0", - "arrify": "^2.0.1", - "big.js": "^6.0.0", - "duplexify": "^4.0.0", + "@google-cloud/common": "^6.0.0", + "@google-cloud/paginator": "^6.0.0", + "@google-cloud/precise-date": "^5.0.0", + "@google-cloud/promisify": "^5.0.0", + "teeny-request": "^10.0.0", + "arrify": "^3.0.0", + "big.js": "^6.2.2", + "duplexify": "^4.1.3", "extend": "^3.0.2", "is": "^3.3.0", - "stream-events": "^1.0.5", - "uuid": "^9.0.0" + "stream-events": "^1.0.5" + }, + "overrides": { + "@google-cloud/common": { + "google-auth-library": "10.0.0-rc.1" + } }, "devDependencies": { - "@google-cloud/storage": "^7.0.0", - "@types/big.js": "^6.2.0", - "@types/extend": "^3.0.1", - "@types/is": "0.0.25", - "@types/mocha": "^9.0.0", - "@types/node": "^20.0.0", - "@types/proxyquire": "^1.3.28", - "@types/sinon": "^10.0.0", - "@types/uuid": "^9.0.0", - "c8": "^9.0.0", - "codecov": "^3.5.0", + "@google-cloud/storage": "^7.16.0", + "@types/big.js": "^6.2.2", + "@types/duplexify": "^3.6.4", + "@types/extend": "^3.0.4", + "@types/is": "^0.0.25", + "@types/mocha": "^10.0.10", + "@types/node": "^22.14.0", + "@types/proxyquire": "^1.3.31", + "@types/sinon": "^17.0.4", + "c8": "^10.1.3", + "codecov": "^3.8.3", "discovery-tsd": "^0.3.0", - "eslint-plugin-prettier": "^5.0.0", - "gts": "^5.0.0", - "jsdoc": "^4.0.0", + "eslint-plugin-prettier": "^5.2.6", + "gts": "^6.0.2", + "jsdoc": "^4.0.4", "jsdoc-fresh": "^3.0.0", "jsdoc-region-tag": "^3.0.0", - "linkinator": "^3.0.0", - "mocha": "^9.2.2", - "pack-n-play": "^2.0.0", - "prettier": "^3.0.0", - "proxyquire": "^2.1.0", - "sinon": "^18.0.0", - "nise": "6.0.0", - "path-to-regexp": "6.3.0", - "typescript": "^5.1.6" + "linkinator": "^6.1.2", + "mocha": "^11.1.0", + "nise": "^6.1.1", + "pack-n-play": "^3.0.1", + "path-to-regexp": "^8.2.0", + "prettier": "^3.5.3", + "proxyquire": "^2.1.3", + "sinon": "^20.0.0", + "typescript": "^5.8.2" } } diff --git a/samples/addColumnLoadAppend.js b/samples/addColumnLoadAppend.js index ed6db0c6f..aac537017 100644 --- a/samples/addColumnLoadAppend.js +++ b/samples/addColumnLoadAppend.js @@ -17,7 +17,7 @@ function main( datasetId = 'my_dataset', tableId = 'my_table', - fileName = '/path/to/file.csv' + fileName = '/path/to/file.csv', ) { // [START bigquery_add_column_load_append] // Import the Google Cloud client libraries diff --git a/samples/auth-user-sample/authUserFlow.js b/samples/auth-user-sample/authUserFlow.js index c45be49f2..d2606bb23 100644 --- a/samples/auth-user-sample/authUserFlow.js +++ b/samples/auth-user-sample/authUserFlow.js @@ -45,7 +45,7 @@ async function getRedirectUrl() { const oAuth2Client = new OAuth2Client( keys.installed.client_id, keys.installed.client_secret, - keys.installed.redirect_uris[0] + keys.installed.redirect_uris[0], ); // Generate the url that will be used for the consent dialog. @@ -56,7 +56,7 @@ async function getRedirectUrl() { }); console.info( - `Please visit this URL to authorize this application: ${authorizeUrl}` + `Please visit this URL to authorize this application: ${authorizeUrl}`, ); const code = await rl.questionAsync('Enter the authorization code: '); @@ -71,7 +71,7 @@ async function exchangeCode(code) { const oAuth2Client = new OAuth2Client( keys.installed.client_id, keys.installed.client_secret, - keys.installed.redirect_uris[0] + keys.installed.redirect_uris[0], ); const r = await oAuth2Client.getToken(code); diff --git a/samples/authViewTutorial.js b/samples/authViewTutorial.js index 7710c7dab..294586e3c 100644 --- a/samples/authViewTutorial.js +++ b/samples/authViewTutorial.js @@ -19,7 +19,7 @@ function main( sourceDatasetId = 'shared_views', sourceTableId = 'my_source_table', sharedDatasetId = 'shared_views', - sharedViewId = 'github_analyst_view' + sharedViewId = 'github_analyst_view', ) { // [START bigquery_authorized_view_tutorial] async function authorizedViewTutorial() { diff --git a/samples/browseTable.js b/samples/browseTable.js index 4fe94da93..4dba98d9c 100644 --- a/samples/browseTable.js +++ b/samples/browseTable.js @@ -16,7 +16,7 @@ function main( datasetId = 'my_dataset', // Existing dataset - tableId = 'my_table' // Table to create + tableId = 'my_table', // Table to create ) { // [START bigquery_browse_table] // Import the Google Cloud client library using default credentials diff --git a/samples/copyTable.js b/samples/copyTable.js index 76045270d..7fde19c69 100644 --- a/samples/copyTable.js +++ b/samples/copyTable.js @@ -18,7 +18,7 @@ function main( srcDatasetId = 'my_src_dataset', srcTableId = 'my_src_table', destDatasetId = 'my_dest_dataset', - destTableId = 'my_dest_table' + destTableId = 'my_dest_table', ) { // [START bigquery_copy_table] // Import the Google Cloud client library and create a client diff --git a/samples/copyTableMultipleSource.js b/samples/copyTableMultipleSource.js index 4dc72ca91..d49bda4f3 100644 --- a/samples/copyTableMultipleSource.js +++ b/samples/copyTableMultipleSource.js @@ -17,7 +17,7 @@ function main( datasetId = 'my_dataset', // Existing dataset sourceTable = 'my_table', // Existing table to copy from - destinationTable = 'testing' // Existing table to copy to + destinationTable = 'testing', // Existing table to copy to ) { // [START bigquery_copy_table_multiple_source] // Import the Google Cloud client library diff --git a/samples/createRoutine.js b/samples/createRoutine.js index 246edbb35..0ce4febf0 100644 --- a/samples/createRoutine.js +++ b/samples/createRoutine.js @@ -16,7 +16,7 @@ function main( datasetId = 'my_dataset', // Existing dataset - routineId = 'my_routine' // Routine to be created + routineId = 'my_routine', // Routine to be created ) { // [START bigquery_create_routine] // Import the Google Cloud client library and create a client diff --git a/samples/createRoutineDDL.js b/samples/createRoutineDDL.js index 3c69beecb..6f8b1023c 100644 --- a/samples/createRoutineDDL.js +++ b/samples/createRoutineDDL.js @@ -17,7 +17,7 @@ function main( projectId = 'my_project', // GCP project datasetId = 'my_dataset', // Existing dataset - routineId = 'my_routine' // Routine to be created + routineId = 'my_routine', // Routine to be created ) { // [START bigquery_create_routine_ddl] // Import the Google Cloud client library and create a client diff --git a/samples/createTable.js b/samples/createTable.js index ac00382ab..8fa76a3ec 100644 --- a/samples/createTable.js +++ b/samples/createTable.js @@ -22,7 +22,7 @@ function main( {name: 'Age', type: 'INTEGER'}, {name: 'Weight', type: 'FLOAT'}, {name: 'IsMagic', type: 'BOOLEAN'}, - ] + ], ) { // [START bigquery_create_table] // Import the Google Cloud client library and create a client diff --git a/samples/createTableColumnACL.js b/samples/createTableColumnACL.js index 4e1542eb3..ad1aa2793 100644 --- a/samples/createTableColumnACL.js +++ b/samples/createTableColumnACL.js @@ -17,7 +17,7 @@ function main( datasetId = 'my_dataset', // Existing dataset tableId = 'my_table', // Table to be created - policyTagName = 'projects/myProject/location/us/taxonomies/myTaxonomy/policyTags/myPolicyTag' // Existing policy tag + policyTagName = 'projects/myProject/location/us/taxonomies/myTaxonomy/policyTags/myPolicyTag', // Existing policy tag ) { // [START bigquery_create_table_column_ACL] // Import the Google Cloud client library and create a client diff --git a/samples/createView.js b/samples/createView.js index fee030ab3..f28cac87f 100644 --- a/samples/createView.js +++ b/samples/createView.js @@ -19,7 +19,7 @@ function main( myTableId = 'my_shared_view', // View to be created projectId = 'bigquery-public-data', // Source GCP project ID sourceDatasetId = 'usa_names', // Source dataset ID - sourceTableId = 'usa_1910_current' //Source table ID + sourceTableId = 'usa_1910_current', //Source table ID ) { // [START bigquery_create_view] // Import the Google Cloud client library and create a client diff --git a/samples/ddlCreateView.js b/samples/ddlCreateView.js index 3cd25f695..ab828d07a 100644 --- a/samples/ddlCreateView.js +++ b/samples/ddlCreateView.js @@ -17,7 +17,7 @@ function main( projectId = 'my_project', // GCP Project ID datasetId = 'my_dataset', // Existing dataset ID - tableId = 'my_new_view' // View to be created + tableId = 'my_new_view', // View to be created ) { // [START bigquery_ddl_create_view] // Import the Google Cloud client library and create a client diff --git a/samples/deleteLabelDataset.js b/samples/deleteLabelDataset.js index bcf1434ba..e9736a993 100644 --- a/samples/deleteLabelDataset.js +++ b/samples/deleteLabelDataset.js @@ -20,7 +20,7 @@ // usage: node deleteLabelDataset.js function main( - datasetId = 'my_dataset' // Existing dataset + datasetId = 'my_dataset', // Existing dataset ) { // [START bigquery_delete_label_dataset] // Import the Google Cloud client library diff --git a/samples/deleteRoutine.js b/samples/deleteRoutine.js index 4921fe7b2..72f89313a 100644 --- a/samples/deleteRoutine.js +++ b/samples/deleteRoutine.js @@ -16,7 +16,7 @@ function main( datasetId = 'my_dataset', // Existing dataset - routineId = 'my_routine' // Routine to be deleted + routineId = 'my_routine', // Routine to be deleted ) { // [START bigquery_delete_routine] // Import the Google Cloud client library and create a client diff --git a/samples/extractTableCompressed.js b/samples/extractTableCompressed.js index 280d4d1cc..e13dcddb6 100644 --- a/samples/extractTableCompressed.js +++ b/samples/extractTableCompressed.js @@ -18,7 +18,7 @@ function main( datasetId = 'my_dataset', tableId = 'my_table', bucketName = 'my-bucket', - filename = 'file.csv' + filename = 'file.csv', ) { // [START bigquery_extract_table_compressed] // Import the Google Cloud client libraries diff --git a/samples/extractTableJSON.js b/samples/extractTableJSON.js index 6a1065666..80d748f5c 100644 --- a/samples/extractTableJSON.js +++ b/samples/extractTableJSON.js @@ -18,7 +18,7 @@ function main( datasetId = 'my_dataset', tableId = 'my_table', bucketName = 'my-bucket', - filename = 'file.json' + filename = 'file.json', ) { // [START bigquery_extract_table_json] // Import the Google Cloud client libraries diff --git a/samples/extractTableToGCS.js b/samples/extractTableToGCS.js index a27fb7031..4a4c6fb29 100644 --- a/samples/extractTableToGCS.js +++ b/samples/extractTableToGCS.js @@ -18,7 +18,7 @@ function main( datasetId = 'my_dataset', tableId = 'my_table', bucketName = 'my-bucket', - filename = 'file.csv' + filename = 'file.csv', ) { // [START bigquery_extract_table] // Import the Google Cloud client libraries diff --git a/samples/getRoutine.js b/samples/getRoutine.js index 46213c70c..836bf5256 100644 --- a/samples/getRoutine.js +++ b/samples/getRoutine.js @@ -16,7 +16,7 @@ function main( datasetId = 'my_dataset', // Existing dataset - routineId = 'my_routine' // Existing routine + routineId = 'my_routine', // Existing routine ) { // [START bigquery_get_routine] // Import the Google Cloud client library and create a client @@ -38,7 +38,7 @@ function main( const [routine] = await dataset.routine(routineId).get(); console.log( - `Routine ${routine.metadata.routineReference.routineId} retrieved.` + `Routine ${routine.metadata.routineReference.routineId} retrieved.`, ); } getRoutine(); diff --git a/samples/getView.js b/samples/getView.js index 352a2bde6..7c54355bf 100644 --- a/samples/getView.js +++ b/samples/getView.js @@ -16,7 +16,7 @@ function main( datasetId = 'my_dataset', // Existing dataset ID - tableId = 'my_view' // Existing table ID + tableId = 'my_view', // Existing table ID ) { // [START bigquery_get_view] // Import the Google Cloud client library diff --git a/samples/listRoutines.js b/samples/listRoutines.js index cdeb504b9..487bbaf49 100644 --- a/samples/listRoutines.js +++ b/samples/listRoutines.js @@ -15,7 +15,7 @@ 'use strict'; function main( - datasetId = 'my_dataset' // Existing dataset + datasetId = 'my_dataset', // Existing dataset ) { // [START bigquery_list_routines] // Import the Google Cloud client library and create a client diff --git a/samples/loadCSVFromGCSTruncate.js b/samples/loadCSVFromGCSTruncate.js index 87754e076..07d477a57 100644 --- a/samples/loadCSVFromGCSTruncate.js +++ b/samples/loadCSVFromGCSTruncate.js @@ -68,7 +68,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') { // load() waits for the job to finish console.log(`Job ${job.id} completed.`); console.log( - `Write disposition used: ${job.configuration.load.writeDisposition}.` + `Write disposition used: ${job.configuration.load.writeDisposition}.`, ); } // [END bigquery_load_table_gcs_csv_truncate] diff --git a/samples/loadJSONFromGCSTruncate.js b/samples/loadJSONFromGCSTruncate.js index 12329fc34..11c06c888 100644 --- a/samples/loadJSONFromGCSTruncate.js +++ b/samples/loadJSONFromGCSTruncate.js @@ -67,7 +67,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') { // load() waits for the job to finish console.log(`Job ${job.id} completed.`); console.log( - `Write disposition used: ${job.configuration.load.writeDisposition}.` + `Write disposition used: ${job.configuration.load.writeDisposition}.`, ); } // [END bigquery_load_table_gcs_json_truncate] diff --git a/samples/loadLocalFile.js b/samples/loadLocalFile.js index 76828294a..b45c1f419 100644 --- a/samples/loadLocalFile.js +++ b/samples/loadLocalFile.js @@ -17,7 +17,7 @@ function main( datasetId = 'my_dataset', tableId = 'my_table', - filename = '/path/to/file.csv' + filename = '/path/to/file.csv', ) { // [START bigquery_load_from_file] // Imports the Google Cloud client library diff --git a/samples/loadOrcFromGCSTruncate.js b/samples/loadOrcFromGCSTruncate.js index a85ff4648..5773b6959 100644 --- a/samples/loadOrcFromGCSTruncate.js +++ b/samples/loadOrcFromGCSTruncate.js @@ -61,7 +61,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') { // load() waits for the job to finish console.log(`Job ${job.id} completed.`); console.log( - `Write disposition used: ${job.configuration.load.writeDisposition}.` + `Write disposition used: ${job.configuration.load.writeDisposition}.`, ); } // [END bigquery_load_table_gcs_orc_truncate] diff --git a/samples/loadParquetFromGCSTruncate.js b/samples/loadParquetFromGCSTruncate.js index 7d7b113a4..9a1e9518a 100644 --- a/samples/loadParquetFromGCSTruncate.js +++ b/samples/loadParquetFromGCSTruncate.js @@ -61,7 +61,7 @@ function main(datasetId = 'my_dataset', tableId = 'my_table') { // load() waits for the job to finish console.log(`Job ${job.id} completed.`); console.log( - `Write disposition used: ${job.configuration.load.writeDisposition}.` + `Write disposition used: ${job.configuration.load.writeDisposition}.`, ); } // [END bigquery_load_table_gcs_parquet_truncate] diff --git a/samples/loadTableGCSAvro.js b/samples/loadTableGCSAvro.js index d9111619b..fb2ee750b 100644 --- a/samples/loadTableGCSAvro.js +++ b/samples/loadTableGCSAvro.js @@ -16,7 +16,7 @@ function main( datasetId = 'my_dataset', // Existing dataset ID - tableId = 'us_states' // Existing table ID + tableId = 'us_states', // Existing table ID ) { // [START bigquery_load_table_gcs_avro] // Import the Google Cloud client libraries diff --git a/samples/loadTableGCSAvroTruncate.js b/samples/loadTableGCSAvroTruncate.js index 005a78f9e..c0721ae8a 100644 --- a/samples/loadTableGCSAvroTruncate.js +++ b/samples/loadTableGCSAvroTruncate.js @@ -16,7 +16,7 @@ function main( datasetId = 'my_dataset', // Existing dataset ID - tableId = 'us_states' // Existing table ID + tableId = 'us_states', // Existing table ID ) { // [START bigquery_load_table_gcs_avro_truncate] // Import the Google Cloud client libraries @@ -64,7 +64,7 @@ function main( // load() waits for the job to finish console.log(`Job ${job.id} completed.`); console.log( - `Write disposition used: ${job.configuration.load.writeDisposition}.` + `Write disposition used: ${job.configuration.load.writeDisposition}.`, ); } // [END bigquery_load_table_gcs_avro_truncate] diff --git a/samples/nestedRepeatedSchema.js b/samples/nestedRepeatedSchema.js index 2bf6ebf4a..c2f04f01d 100644 --- a/samples/nestedRepeatedSchema.js +++ b/samples/nestedRepeatedSchema.js @@ -30,7 +30,7 @@ function main( {name: 'Zip', type: 'STRING'}, ], }, - ] + ], ) { // [START bigquery_nested_repeated_schema] // Import the Google Cloud client library and create a client diff --git a/samples/package.json b/samples/package.json index e03e68c2b..9534c1bd5 100644 --- a/samples/package.json +++ b/samples/package.json @@ -17,7 +17,7 @@ "fix": "gts fix" }, "dependencies": { - "@google-cloud/bigquery": "^7.9.4", + "@google-cloud/bigquery": "^8.0.0", "@google-cloud/storage": "^7.0.0", "google-auth-library": "^9.6.0", "readline-promise": "^1.0.4", @@ -29,7 +29,7 @@ "gts": "^5.0.0", "mocha": "^8.0.0", "proxyquire": "^2.1.3", - "sinon": "^18.0.0", + "sinon": "^20.0.0", "uuid": "^9.0.0" } } \ No newline at end of file diff --git a/samples/queryExternalGCSPerm.js b/samples/queryExternalGCSPerm.js index 13d11d565..ea77b4762 100644 --- a/samples/queryExternalGCSPerm.js +++ b/samples/queryExternalGCSPerm.js @@ -20,7 +20,7 @@ function main( schema = [ {name: 'name', type: 'STRING'}, {name: 'post_abbr', type: 'STRING'}, - ] + ], ) { // [START bigquery_query_external_gcs_perm] // Import the Google Cloud client library and create a client diff --git a/samples/queryExternalGCSTemp.js b/samples/queryExternalGCSTemp.js index 8bc6ee87b..a418b76ec 100644 --- a/samples/queryExternalGCSTemp.js +++ b/samples/queryExternalGCSTemp.js @@ -18,7 +18,7 @@ function main( schema = [ {name: 'name', type: 'STRING'}, {name: 'post_abbr', type: 'STRING'}, - ] + ], ) { // [START bigquery_query_external_gcs_temp] // Import the Google Cloud client library and create a client diff --git a/samples/queryLegacyLargeResults.js b/samples/queryLegacyLargeResults.js index 456893908..fe19a70c2 100644 --- a/samples/queryLegacyLargeResults.js +++ b/samples/queryLegacyLargeResults.js @@ -17,7 +17,7 @@ function main( datasetId = 'my_dataset', tableId = 'my_table', - projectId = 'my_project' + projectId = 'my_project', ) { // [START bigquery_query_legacy_large_results] // Import the Google Cloud client library diff --git a/samples/queryShortMode.js b/samples/queryShortMode.js index a69d4b664..2ca0e3093 100644 --- a/samples/queryShortMode.js +++ b/samples/queryShortMode.js @@ -43,7 +43,7 @@ function main() { const jobRef = res.jobReference; const qualifiedId = `${jobRef.projectId}.${jobRef.location}.${jobRef.jobId}`; console.log( - `Query was run with job state. Job ID: ${qualifiedId}, Query ID: ${res.queryId}` + `Query was run with job state. Job ID: ${qualifiedId}, Query ID: ${res.queryId}`, ); } // Print the results diff --git a/samples/relaxColumn.js b/samples/relaxColumn.js index 4e6e0ca72..8c2a6251d 100644 --- a/samples/relaxColumn.js +++ b/samples/relaxColumn.js @@ -16,7 +16,7 @@ function main( datasetId = 'my_dataset', // Existing dataset - tableId = 'my_new_table' // Table to be created + tableId = 'my_new_table', // Table to be created ) { // [START bigquery_relax_column] // Import the Google Cloud client library and create a client diff --git a/samples/relaxColumnLoadAppend.js b/samples/relaxColumnLoadAppend.js index 967d70bc9..0d69f511a 100644 --- a/samples/relaxColumnLoadAppend.js +++ b/samples/relaxColumnLoadAppend.js @@ -17,7 +17,7 @@ function main( datasetId = 'my_dataset', tableId = 'my_table', - fileName = '/path/to/file.csv' + fileName = '/path/to/file.csv', ) { // [START bigquery_relax_column_load_append] // Import the Google Cloud client libraries diff --git a/samples/relaxColumnQueryAppend.js b/samples/relaxColumnQueryAppend.js index 40aa268e7..09536e8a5 100644 --- a/samples/relaxColumnQueryAppend.js +++ b/samples/relaxColumnQueryAppend.js @@ -17,7 +17,7 @@ function main( projectId = 'my_project', // GCP Project ID datasetId = 'my_dataset', // Existing dataset - tableId = 'my_table' // Existing table + tableId = 'my_table', // Existing table ) { // [START bigquery_relax_column_query_append] const {BigQuery} = require('@google-cloud/bigquery'); @@ -39,7 +39,7 @@ function main( const [metaData] = await table.getMetadata(); const requiredFields = metaData.schema.fields.filter( - ({mode}) => mode === 'REQUIRED' + ({mode}) => mode === 'REQUIRED', ).length; console.log(`${requiredFields} fields in the schema are required.`); @@ -84,11 +84,11 @@ function main( const [updatedMetaData] = await updatedTable.getMetadata(); const updatedRequiredFields = updatedMetaData.schema.fields.filter( - ({mode}) => mode === 'REQUIRED' + ({mode}) => mode === 'REQUIRED', ).length; console.log( - `${updatedRequiredFields} fields in the schema are now required.` + `${updatedRequiredFields} fields in the schema are now required.`, ); } // [END bigquery_relax_column_query_append] diff --git a/samples/test/authViewTutorial.test.js b/samples/test/authViewTutorial.test.js index ee0f4827a..c8b0bd3b9 100644 --- a/samples/test/authViewTutorial.test.js +++ b/samples/test/authViewTutorial.test.js @@ -17,7 +17,7 @@ const {assert} = require('chai'); const {describe, it, before, beforeEach, after} = require('mocha'); const cp = require('child_process'); -const uuid = require('uuid'); +const {randomUUID} = require('crypto'); const {BigQuery} = require('@google-cloud/bigquery'); @@ -25,7 +25,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests_authView'; const generateUuid = () => - `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); + `${GCLOUD_TESTS_PREFIX}_${randomUUID()}`.replace(/-/gi, '_'); let projectId; const datasetId = generateUuid(); const tableId = generateUuid(); @@ -61,11 +61,11 @@ describe('Authorized View Tutorial', () => { it('should create an authorized view', async () => { const output = execSync( - `node authViewTutorial.js ${projectId} ${sourceDatasetId} ${sourceTableId} ${sharedDatasetId} ${sharedViewId}` + `node authViewTutorial.js ${projectId} ${sourceDatasetId} ${sourceTableId} ${sharedDatasetId} ${sharedViewId}`, ); assert.include( output, - `View ${projectId}:${sharedDatasetId}.${sharedViewId} created.` + `View ${projectId}:${sharedDatasetId}.${sharedViewId} created.`, ); const [exists] = await bigquery .dataset(sharedDatasetId) diff --git a/samples/test/datasets.test.js b/samples/test/datasets.test.js index 6a0b5c820..093f73278 100644 --- a/samples/test/datasets.test.js +++ b/samples/test/datasets.test.js @@ -18,13 +18,13 @@ const {BigQuery} = require('@google-cloud/bigquery'); const {assert} = require('chai'); const {describe, it, after, before, beforeEach} = require('mocha'); const cp = require('child_process'); -const uuid = require('uuid'); +const {randomUUID} = require('crypto'); const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests'; -const datasetId = `${GCLOUD_TESTS_PREFIX}_datasets_${uuid.v4()}`.replace( +const datasetId = `${GCLOUD_TESTS_PREFIX}_datasets_${randomUUID()}`.replace( /-/gi, - '_' + '_', ); const bigquery = new BigQuery(); @@ -118,7 +118,7 @@ describe('Datasets', () => { const output = execSync(`node updateDatasetDescription.js ${datasetId}`); assert.include( output, - `${datasetId} description: New dataset description.` + `${datasetId} description: New dataset description.`, ); }); @@ -177,7 +177,7 @@ describe('Datasets', () => { async function deleteDatasets() { let [datasets] = await bigquery.getDatasets(); datasets = datasets.filter(dataset => - dataset.id.includes(GCLOUD_TESTS_PREFIX) + dataset.id.includes(GCLOUD_TESTS_PREFIX), ); for (const dataset of datasets) { diff --git a/samples/test/models.test.js b/samples/test/models.test.js index 48764cdbf..e6e707b72 100644 --- a/samples/test/models.test.js +++ b/samples/test/models.test.js @@ -18,7 +18,7 @@ const {BigQuery} = require('@google-cloud/bigquery'); const {assert} = require('chai'); const {describe, it, before, beforeEach, after} = require('mocha'); const cp = require('child_process'); -const uuid = require('uuid'); +const {randomUUID} = require('crypto'); const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); @@ -29,8 +29,11 @@ const bigquery = new BigQuery(); describe('Models', function () { // Increase timeout to accommodate model creation. this.timeout(300000); - const datasetId = `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); - const modelId = `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); + const datasetId = `${GCLOUD_TESTS_PREFIX}_${randomUUID()}`.replace( + /-/gi, + '_', + ); + const modelId = `${GCLOUD_TESTS_PREFIX}_${randomUUID()}`.replace(/-/gi, '_'); before(async () => { const query = `CREATE OR REPLACE MODEL \`${datasetId}.${modelId}\` @@ -96,13 +99,13 @@ describe('Models', function () { }); describe('Create/Delete Model', () => { - const datasetId = `${GCLOUD_TESTS_PREFIX}_delete_${uuid.v4()}`.replace( + const datasetId = `${GCLOUD_TESTS_PREFIX}_delete_${randomUUID()}`.replace( /-/gi, - '_' + '_', ); - const modelId = `${GCLOUD_TESTS_PREFIX}_delete_${uuid.v4()}`.replace( + const modelId = `${GCLOUD_TESTS_PREFIX}_delete_${randomUUID()}`.replace( /-/gi, - '_' + '_', ); before(async () => { diff --git a/samples/test/queries.test.js b/samples/test/queries.test.js index 16ca109c6..472dd88fd 100644 --- a/samples/test/queries.test.js +++ b/samples/test/queries.test.js @@ -17,7 +17,7 @@ const {assert} = require('chai'); const {describe, it, before, beforeEach, after} = require('mocha'); const cp = require('child_process'); -const uuid = require('uuid'); +const {randomUUID} = require('crypto'); const {BigQuery} = require('@google-cloud/bigquery'); @@ -26,7 +26,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests_queries'; const generateUuid = () => - `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); + `${GCLOUD_TESTS_PREFIX}_${randomUUID()}`.replace(/-/gi, '_'); const datasetId = generateUuid(); const tableId = generateUuid(); @@ -134,7 +134,7 @@ describe('Queries', () => { it('should run a query with a destination table', async () => { const output = execSync( - `node queryDestinationTable.js ${datasetId} ${tableId}` + `node queryDestinationTable.js ${datasetId} ${tableId}`, ); assert.include(output, `Query results loaded to table ${tableId}`); }); @@ -148,7 +148,7 @@ describe('Queries', () => { it('should run a query with a clustered destination table', async () => { const clusteredTableId = generateUuid(); const output = execSync( - `node queryClusteredTable.js ${datasetId} ${clusteredTableId}` + `node queryClusteredTable.js ${datasetId} ${clusteredTableId}`, ); assert.match(output, /started/); assert.match(output, /Status/); @@ -157,7 +157,7 @@ describe('Queries', () => { it('should run a query with legacy SQL and large results', async () => { const destTableId = generateUuid(); const output = execSync( - `node queryLegacyLargeResults.js ${datasetId} ${destTableId} ${projectId}` + `node queryLegacyLargeResults.js ${datasetId} ${destTableId} ${projectId}`, ); assert.match(output, /Rows:/); assert.match(output, /word/); @@ -167,7 +167,7 @@ describe('Queries', () => { const destTableId = generateUuid(); execSync(`node createTable.js ${datasetId} ${destTableId} 'name:STRING'`); const output = execSync( - `node addColumnQueryAppend.js ${datasetId} ${destTableId}` + `node addColumnQueryAppend.js ${datasetId} ${destTableId}`, ); assert.match(output, /completed\./); const [rows] = await bigquery @@ -179,7 +179,7 @@ describe('Queries', () => { it('should relax columns via a query job', async () => { const output = execSync( - `node relaxColumnQueryAppend.js ${projectId} ${datasetId} ${tableId}` + `node relaxColumnQueryAppend.js ${projectId} ${datasetId} ${tableId}`, ); assert.match(output, /1 fields in the schema are required\./); @@ -200,7 +200,7 @@ describe('Queries', () => { it('should query an external data source with permanent table', async () => { const permTableId = generateUuid(); const output = execSync( - `node queryExternalGCSPerm.js ${datasetId} ${permTableId}` + `node queryExternalGCSPerm.js ${datasetId} ${permTableId}`, ); assert.match(output, /Rows:/); assert.match(output, /post_abbr/); @@ -214,7 +214,7 @@ describe('Queries', () => { it('should create a routine using DDL', async () => { const output = execSync( - `node createRoutineDDL.js ${projectId} ${datasetId} ${routineId}` + `node createRoutineDDL.js ${projectId} ${datasetId} ${routineId}`, ); assert.include(output, `Routine ${routineId} created.`); }); diff --git a/samples/test/quickstart.test.js b/samples/test/quickstart.test.js index d9cd0a229..d159adb9d 100644 --- a/samples/test/quickstart.test.js +++ b/samples/test/quickstart.test.js @@ -16,7 +16,7 @@ const {assert} = require('chai'); const {describe, it, after, beforeEach} = require('mocha'); -const uuid = require('uuid'); +const {randomUUID} = require('crypto'); const cp = require('child_process'); const {BigQuery} = require('@google-cloud/bigquery'); @@ -25,9 +25,9 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); const bigquery = new BigQuery(); describe('Quickstart', () => { - const datasetName = `nodejs_samples_tests_quickstart_${uuid.v4()}`.replace( + const datasetName = `nodejs_samples_tests_quickstart_${randomUUID()}`.replace( /-/gi, - '_' + '_', ); beforeEach(async function () { this.currentTest.retries(2); diff --git a/samples/test/routines.test.js b/samples/test/routines.test.js index bee3f01db..c10eab055 100644 --- a/samples/test/routines.test.js +++ b/samples/test/routines.test.js @@ -17,7 +17,7 @@ const {assert} = require('chai'); const {describe, it, before, beforeEach, after} = require('mocha'); const cp = require('child_process'); -const uuid = require('uuid'); +const {randomUUID} = require('crypto'); const {BigQuery} = require('@google-cloud/bigquery'); @@ -26,7 +26,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests_routines'; const generateUuid = () => - `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); + `${GCLOUD_TESTS_PREFIX}_${randomUUID()}`.replace(/-/gi, '_'); const datasetId = generateUuid(); const routineId = generateUuid(); @@ -67,7 +67,7 @@ describe('Routines', () => { it('should create a routine', async () => { const output = execSync( - `node createRoutine.js ${datasetId} ${newRoutineId}` + `node createRoutine.js ${datasetId} ${newRoutineId}`, ); assert.include(output, `Routine ${newRoutineId} created.`); }); @@ -89,8 +89,8 @@ describe('Routines', () => { }); describe('Delete Routine', () => { - const datasetId = `gcloud_tests_${uuid.v4()}`.replace(/-/gi, '_'); - const routineId = `gcloud_tests_${uuid.v4()}`.replace(/-/gi, '_'); + const datasetId = `gcloud_tests_${randomUUID()}`.replace(/-/gi, '_'); + const routineId = `gcloud_tests_${randomUUID()}`.replace(/-/gi, '_'); before(async () => { await bigquery.createDataset(datasetId); @@ -128,7 +128,7 @@ describe('Routines', () => { it('should delete a routine', async () => { const output = execSync( - `node deleteRoutine.js ${datasetId} ${routineId}` + `node deleteRoutine.js ${datasetId} ${routineId}`, ); assert.include(output, `Routine ${routineId} deleted.`); const [exists] = await bigquery diff --git a/samples/test/tables.test.js b/samples/test/tables.test.js index a2ebae8be..8925292f1 100644 --- a/samples/test/tables.test.js +++ b/samples/test/tables.test.js @@ -17,7 +17,7 @@ const {assert} = require('chai'); const {describe, it, before, after, beforeEach} = require('mocha'); const path = require('path'); -const uuid = require('uuid'); +const {randomUUID} = require('crypto'); const cp = require('child_process'); const {Storage} = require('@google-cloud/storage'); const {BigQuery} = require('@google-cloud/bigquery'); @@ -33,7 +33,7 @@ const GCLOUD_TESTS_PREFIX = 'nodejs_samples_tests'; const storage = new Storage(); const generateUuid = () => - `${GCLOUD_TESTS_PREFIX}_${uuid.v4()}`.replace(/-/gi, '_'); + `${GCLOUD_TESTS_PREFIX}_${randomUUID()}`.replace(/-/gi, '_'); const datasetId = generateUuid(); const srcDatasetId = datasetId; @@ -57,7 +57,7 @@ let policyTag0; let policyTag1; const partialDataFilePath = path.join( __dirname, - `../resources/${partialDataFileName}` + `../resources/${partialDataFileName}`, ); const bigquery = new BigQuery(); @@ -134,11 +134,11 @@ describe('Tables', () => { it('should create a partitioned table', async () => { const output = execSync( - `node createTablePartitioned.js ${datasetId} ${partitionedTableId}` + `node createTablePartitioned.js ${datasetId} ${partitionedTableId}`, ); assert.include( output, - `Table ${partitionedTableId} created with partitioning:` + `Table ${partitionedTableId} created with partitioning:`, ); assert.include(output, "type: 'DAY'"); assert.include(output, "field: 'date'"); @@ -152,15 +152,15 @@ describe('Tables', () => { it('should create an integer range partitioned table', async () => { const rangePartTableId = generateUuid(); const output = execSync( - `node createTableRangePartitioned.js ${datasetId} ${rangePartTableId}` + `node createTableRangePartitioned.js ${datasetId} ${rangePartTableId}`, ); assert.include( output, - `Table ${rangePartTableId} created with integer range partitioning:` + `Table ${rangePartTableId} created with integer range partitioning:`, ); assert.include( output, - "range: { start: '0', end: '100000', interval: '10' }" + "range: { start: '0', end: '100000', interval: '10' }", ); const [exists] = await bigquery .dataset(datasetId) @@ -172,11 +172,11 @@ describe('Tables', () => { it('should create a clustered table', async () => { const clusteredTableId = generateUuid(); const output = execSync( - `node createTableClustered.js ${datasetId} ${clusteredTableId}` + `node createTableClustered.js ${datasetId} ${clusteredTableId}`, ); assert.include( output, - `Table ${clusteredTableId} created with clustering:` + `Table ${clusteredTableId} created with clustering:`, ); assert.include(output, "{ fields: [ 'city', 'zipcode' ] }"); const [exists] = await bigquery @@ -189,11 +189,11 @@ describe('Tables', () => { it('should update table clustering', async () => { const clusteredTableId = generateUuid(); const output = execSync( - `node removeTableClustering.js ${datasetId} ${clusteredTableId}` + `node removeTableClustering.js ${datasetId} ${clusteredTableId}`, ); assert.include( output, - `Table ${clusteredTableId} created with clustering.` + `Table ${clusteredTableId} created with clustering.`, ); assert.include(output, `Table ${clusteredTableId} updated clustering:`); const [exists] = await bigquery @@ -205,7 +205,7 @@ describe('Tables', () => { it('should create a table with nested schema', async () => { const output = execSync( - `node nestedRepeatedSchema.js ${datasetId} ${nestedTableId}` + `node nestedRepeatedSchema.js ${datasetId} ${nestedTableId}`, ); assert.include(output, `Table ${nestedTableId} created.`); const [exists] = await bigquery @@ -217,7 +217,7 @@ describe('Tables', () => { it('should create a table with column-level security', async () => { const output = execSync( - `node createTableColumnACL.js ${datasetId} ${aclTableId} ${policyTag0.name}` + `node createTableColumnACL.js ${datasetId} ${aclTableId} ${policyTag0.name}`, ); assert.include(output, `Created table ${aclTableId} with schema:`); assert.include(output, policyTag0.name); @@ -230,7 +230,7 @@ describe('Tables', () => { it('should update a table with column-level security', async () => { const output = execSync( - `node updateTableColumnACL.js ${datasetId} ${aclTableId} ${policyTag1.name}` + `node updateTableColumnACL.js ${datasetId} ${aclTableId} ${policyTag1.name}`, ); assert.include(output, `Updated table ${aclTableId} with schema:`); assert.include(output, policyTag1.name); @@ -251,7 +251,7 @@ describe('Tables', () => { it('should check whether a table exists', async () => { const nonexistentTableId = 'foobar'; const output = execSync( - `node tableExists.js ${datasetId} ${nonexistentTableId}` + `node tableExists.js ${datasetId} ${nonexistentTableId}`, ); assert.include(output, 'Not found'); assert.include(output, datasetId); @@ -307,7 +307,7 @@ describe('Tables', () => { it("should update table's description", async () => { const output = execSync( - `node updateTableDescription.js ${datasetId} ${tableId}` + `node updateTableDescription.js ${datasetId} ${tableId}`, ); assert.include(output, `${tableId} description: New table description.`); }); @@ -316,7 +316,7 @@ describe('Tables', () => { const currentTime = Date.now(); const expirationTime = currentTime + 1000 * 60 * 60 * 24 * 5; const output = execSync( - `node updateTableExpiration.js ${datasetId} ${tableId} ${expirationTime}` + `node updateTableExpiration.js ${datasetId} ${tableId} ${expirationTime}`, ); assert.include(output, `${tableId}`); assert.include(output, `expiration: ${expirationTime}`); @@ -336,7 +336,7 @@ describe('Tables', () => { it('should load a local CSV file', async () => { const output = execSync( - `node loadLocalFile.js ${datasetId} ${tableId} ${localFilePath}` + `node loadLocalFile.js ${datasetId} ${tableId} ${localFilePath}`, ); assert.match(output, /completed\./); const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); @@ -346,7 +346,7 @@ describe('Tables', () => { it('should browse table rows', async () => { const browseDestTable = generateUuid(); const output = execSync( - `node browseTable.js ${datasetId} ${browseDestTable}` + `node browseTable.js ${datasetId} ${browseDestTable}`, ); assert.match(output, /name/); assert.match(output, /total people/); @@ -354,7 +354,7 @@ describe('Tables', () => { it('should extract a table to GCS CSV file', async () => { const output = execSync( - `node extractTableToGCS.js ${datasetId} ${tableId} ${bucketName} ${exportCSVFileName}` + `node extractTableToGCS.js ${datasetId} ${tableId} ${bucketName} ${exportCSVFileName}`, ); assert.match(output, /created\./); @@ -367,7 +367,7 @@ describe('Tables', () => { it('should extract a table to GCS JSON file', async () => { const output = execSync( - `node extractTableJSON.js ${datasetId} ${tableId} ${bucketName} ${exportJSONFileName}` + `node extractTableJSON.js ${datasetId} ${tableId} ${bucketName} ${exportJSONFileName}`, ); assert.match(output, /created\./); @@ -380,7 +380,7 @@ describe('Tables', () => { it('should extract a table to GCS compressed file', async () => { const output = execSync( - `node extractTableCompressed.js ${datasetId} ${tableId} ${bucketName} ${exportCSVFileName}` + `node extractTableCompressed.js ${datasetId} ${tableId} ${bucketName} ${exportCSVFileName}`, ); assert.match(output, /created\./); @@ -402,7 +402,7 @@ describe('Tables', () => { it('should load a GCS Parquet file', async () => { const tableId = generateUuid(); const output = execSync( - `node loadTableGCSParquet.js ${datasetId} ${tableId}` + `node loadTableGCSParquet.js ${datasetId} ${tableId}`, ); assert.match(output, /completed\./); const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); @@ -420,7 +420,7 @@ describe('Tables', () => { it('should load a GCS Firestore backup file', async () => { const tableId = generateUuid(); const output = execSync( - `node loadTableURIFirestore.js ${datasetId} ${tableId}` + `node loadTableURIFirestore.js ${datasetId} ${tableId}`, ); assert.match(output, /completed\./); const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); @@ -446,7 +446,7 @@ describe('Tables', () => { it('should load a GCS CSV file to partitioned table', async () => { const tableId = generateUuid(); const output = execSync( - `node loadTablePartitioned.js ${datasetId} ${tableId}` + `node loadTablePartitioned.js ${datasetId} ${tableId}`, ); assert.match(output, /completed\./); const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); @@ -456,7 +456,7 @@ describe('Tables', () => { it('should load a GCS CSV file to clustered table', async () => { const tableId = generateUuid(); const output = execSync( - `node loadTableClustered.js ${datasetId} ${tableId}` + `node loadTableClustered.js ${datasetId} ${tableId}`, ); assert.match(output, /completed\./); const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); @@ -466,10 +466,10 @@ describe('Tables', () => { it('should add a new column via a GCS file load job', async () => { const destTableId = generateUuid(); execSync( - `node createTable.js ${datasetId} ${destTableId} 'Name:STRING, Age:INTEGER, Weight:FLOAT'` + `node createTable.js ${datasetId} ${destTableId} 'Name:STRING, Age:INTEGER, Weight:FLOAT'`, ); const output = execSync( - `node addColumnLoadAppend.js ${datasetId} ${destTableId} ${localFilePath}` + `node addColumnLoadAppend.js ${datasetId} ${destTableId} ${localFilePath}`, ); assert.match(output, /completed\./); const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); @@ -480,7 +480,7 @@ describe('Tables', () => { const destTableId = generateUuid(); execSync(`node createTable.js ${datasetId} ${destTableId}`); const output = execSync( - `node relaxColumnLoadAppend.js ${datasetId} ${destTableId} ${partialDataFilePath}` + `node relaxColumnLoadAppend.js ${datasetId} ${destTableId} ${partialDataFilePath}`, ); assert.match(output, /completed\./); const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); @@ -490,7 +490,7 @@ describe('Tables', () => { it('should load a GCS CSV file with autodetected schema', async () => { const tableId = generateUuid(); const output = execSync( - `node loadCSVFromGCSAutodetect.js ${datasetId} ${tableId}` + `node loadCSVFromGCSAutodetect.js ${datasetId} ${tableId}`, ); assert.match(output, /completed\./); const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); @@ -500,7 +500,7 @@ describe('Tables', () => { it('should load a GCS JSON file with autodetected schema', async () => { const tableId = generateUuid(); const output = execSync( - `node loadJSONFromGCSAutodetect.js ${datasetId} ${tableId}` + `node loadJSONFromGCSAutodetect.js ${datasetId} ${tableId}`, ); assert.match(output, /completed\./); const [rows] = await bigquery.dataset(datasetId).table(tableId).getRows(); @@ -510,7 +510,7 @@ describe('Tables', () => { it('should load a GCS CSV file truncate table', async () => { const tableId = generateUuid(); const output = execSync( - `node loadCSVFromGCSTruncate.js ${datasetId} ${tableId}` + `node loadCSVFromGCSTruncate.js ${datasetId} ${tableId}`, ); assert.match(output, /completed\./); assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); @@ -521,7 +521,7 @@ describe('Tables', () => { it('should load a GCS JSON file truncate table', async () => { const tableId = generateUuid(); const output = execSync( - `node loadJSONFromGCSTruncate.js ${datasetId} ${tableId}` + `node loadJSONFromGCSTruncate.js ${datasetId} ${tableId}`, ); assert.match(output, /completed\./); assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); @@ -532,7 +532,7 @@ describe('Tables', () => { it('should load a GCS parquet file truncate table', async () => { const tableId = generateUuid(); const output = execSync( - `node loadParquetFromGCSTruncate.js ${datasetId} ${tableId}` + `node loadParquetFromGCSTruncate.js ${datasetId} ${tableId}`, ); assert.match(output, /completed\./); assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); @@ -543,7 +543,7 @@ describe('Tables', () => { it('should load a GCS ORC file truncate table', async () => { const tableId = generateUuid(); const output = execSync( - `node loadOrcFromGCSTruncate.js ${datasetId} ${tableId}` + `node loadOrcFromGCSTruncate.js ${datasetId} ${tableId}`, ); assert.match(output, /completed\./); assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); @@ -554,7 +554,7 @@ describe('Tables', () => { it('should load a GCS Avro file truncate table', async () => { const tableId = generateUuid(); const output = execSync( - `node loadTableGCSAvroTruncate.js ${datasetId} ${tableId}` + `node loadTableGCSAvroTruncate.js ${datasetId} ${tableId}`, ); assert.match(output, /completed\./); assert.include(output, 'Write disposition used: WRITE_TRUNCATE.'); @@ -564,7 +564,7 @@ describe('Tables', () => { it('should copy a table', async () => { const output = execSync( - `node copyTable.js ${srcDatasetId} ${srcTableId} ${destDatasetId} ${destTableId}` + `node copyTable.js ${srcDatasetId} ${srcTableId} ${destDatasetId} ${destTableId}`, ); assert.match(output, /completed\./); const [rows] = await bigquery @@ -576,7 +576,7 @@ describe('Tables', () => { it('should insert rows', async () => { const output = execSync( - `node insertRowsAsStream.js ${datasetId} ${tableId}` + `node insertRowsAsStream.js ${datasetId} ${tableId}`, ); assert.match(output, /Inserted 2 rows/); }); @@ -584,7 +584,7 @@ describe('Tables', () => { it('should insert rows with supported data types', async () => { const typesTableId = generateUuid(); const output = execSync( - `node insertingDataTypes.js ${datasetId} ${typesTableId}` + `node insertingDataTypes.js ${datasetId} ${typesTableId}`, ); assert.match(output, /Inserted 2 rows/); }); @@ -592,7 +592,7 @@ describe('Tables', () => { it('copy multiple source tables to a given destination', async () => { execSync(`node createTable.js ${datasetId} destinationTable`); const output = execSync( - `node copyTableMultipleSource.js ${datasetId} ${tableId} destinationTable` + `node copyTableMultipleSource.js ${datasetId} ${tableId} destinationTable`, ); assert.include(output, 'sourceTable'); assert.include(output, 'destinationTable'); @@ -647,8 +647,8 @@ describe('Tables', () => { }); describe('Delete Table', () => { - const datasetId = `gcloud_tests_${uuid.v4()}`.replace(/-/gi, '_'); - const tableId = `gcloud_tests_${uuid.v4()}`.replace(/-/gi, '_'); + const datasetId = `gcloud_tests_${randomUUID()}`.replace(/-/gi, '_'); + const tableId = `gcloud_tests_${randomUUID()}`.replace(/-/gi, '_'); before(async () => { const datasetOptions = { @@ -690,7 +690,7 @@ describe('Tables', () => { execSync(`node createTable.js ${datasetId} ${tableId}`); const output = execSync( - `node undeleteTable.js ${datasetId} ${tableId} ${recoveredTableId}` + `node undeleteTable.js ${datasetId} ${tableId} ${recoveredTableId}`, ); assert.include(output, `Table ${tableId} deleted.`); @@ -718,7 +718,7 @@ describe('Tables', () => { parent: dataCatalog.locationPath(projectId, location), }; let [taxonomies] = await policyTagManager.listTaxonomies( - listTaxonomiesRequest + listTaxonomiesRequest, ); taxonomies = taxonomies.filter(taxonomy => { diff --git a/samples/undeleteTable.js b/samples/undeleteTable.js index 9b872bea7..bf4ac21f6 100644 --- a/samples/undeleteTable.js +++ b/samples/undeleteTable.js @@ -17,7 +17,7 @@ function main( datasetId = 'my_dataset', // Dataset tableId = 'my_table_to_undelete', // Table to recover - recoveredTableId = 'my_recovered_table' // Recovered table + recoveredTableId = 'my_recovered_table', // Recovered table ) { // [START bigquery_undelete_table] // Import the Google Cloud client library @@ -56,7 +56,7 @@ function main( .copy(bigquery.dataset(datasetId).table(recoveredTableId)); console.log( - `Copied data from deleted table ${tableId} to ${recoveredTableId}` + `Copied data from deleted table ${tableId} to ${recoveredTableId}`, ); } // [END bigquery_undelete_table] diff --git a/samples/updateRoutine.js b/samples/updateRoutine.js index 1240a45e1..ae714d161 100644 --- a/samples/updateRoutine.js +++ b/samples/updateRoutine.js @@ -16,7 +16,7 @@ function main( datasetId = 'my_dataset', // Existing dataset - routineId = 'my_routine' // Existing routine + routineId = 'my_routine', // Existing routine ) { // [START bigquery_update_routine] // Import the Google Cloud client library and create a client diff --git a/samples/updateTableColumnACL.js b/samples/updateTableColumnACL.js index 9bb52b078..8f9f26286 100644 --- a/samples/updateTableColumnACL.js +++ b/samples/updateTableColumnACL.js @@ -17,7 +17,7 @@ function main( datasetId = 'my_dataset', // Existing dataset tableId = 'my_table', // Table to be created - policyTagName = 'projects/myProject/location/us/taxonomies/myTaxonomy/policyTags/myPolicyTag' // Existing policy tag + policyTagName = 'projects/myProject/location/us/taxonomies/myTaxonomy/policyTags/myPolicyTag', // Existing policy tag ) { // [START bigquery_update_table_column_ACL] // Import the Google Cloud client library and create a client diff --git a/samples/updateTableExpiration.js b/samples/updateTableExpiration.js index 9adcb5111..a111921f0 100644 --- a/samples/updateTableExpiration.js +++ b/samples/updateTableExpiration.js @@ -17,7 +17,7 @@ function main( datasetId = 'my_dataset', // Existing dataset tableId = 'my_table', // Existing table - expirationTime = Date.now() + 1000 * 60 * 60 * 24 * 5 // 5 days from current time in ms + expirationTime = Date.now() + 1000 * 60 * 60 * 24 * 5, // 5 days from current time in ms ) { // [START bigquery_update_table_expiration] // Import the Google Cloud client library diff --git a/samples/updateViewQuery.js b/samples/updateViewQuery.js index ad426e2bd..f982c3b50 100644 --- a/samples/updateViewQuery.js +++ b/samples/updateViewQuery.js @@ -16,7 +16,7 @@ function main( datasetId = 'my_dataset', // Existing dataset ID - tableId = 'my_existing_view' // Existing view ID + tableId = 'my_existing_view', // Existing view ID ) { // [START bigquery_update_view_query] // Import the Google Cloud client library and create a client diff --git a/scripts/gen-types.js b/scripts/gen-types.js index 69b964e58..a33a85b28 100644 --- a/scripts/gen-types.js +++ b/scripts/gen-types.js @@ -51,13 +51,14 @@ function overridedRender() { } const patched = source.replaceAll( 'formatOptions.useInt64Timestamp', - "'formatOptions.useInt64Timestamp'" + "'formatOptions.useInt64Timestamp'", ); const fullSource = header + patched; return prettier.format(fullSource, { parser: 'typescript', singleQuote: true, + bracketSpacing: false, }); } diff --git a/src/bigquery.ts b/src/bigquery.ts index 8720df7de..fb85ac09e 100644 --- a/src/bigquery.ts +++ b/src/bigquery.ts @@ -25,11 +25,11 @@ import * as common from '@google-cloud/common'; import {paginator, ResourceStream} from '@google-cloud/paginator'; import {promisifyAll} from '@google-cloud/promisify'; import {PreciseDate} from '@google-cloud/precise-date'; -import arrify = require('arrify'); +import {toArray} from './util'; import * as Big from 'big.js'; import * as extend from 'extend'; import * as is from 'is'; -import * as uuid from 'uuid'; +import {randomUUID} from 'crypto'; import {Dataset, DatasetOptions} from './dataset'; import {Job, JobOptions, QueryResultsOptions} from './job'; @@ -67,7 +67,7 @@ export interface PagedCallback { err: Error | null, resource?: T[] | null, nextQuery?: Q | null, - response?: R | null + response?: R | null, ): void; } @@ -586,7 +586,7 @@ export class BigQuery extends Service { wrapIntegers: boolean | IntegerTypeCastOptions; selectedFields?: string[]; parseJSON?: boolean; - } + }, ) { // deep copy schema fields to avoid mutation let schemaFields: TableField[] = extend(true, [], schema?.fields); @@ -605,14 +605,14 @@ export class BigQuery extends Service { field => currentFields .map(c => c!.toLowerCase()) - .indexOf(field.name!.toLowerCase()) >= 0 + .indexOf(field.name!.toLowerCase()) >= 0, ); selectedFields = selectedFieldsArray .filter(c => c.length > 0) .map(c => c.join('.')); } - return arrify(rows).map(mergeSchema).map(flattenRows); + return toArray(rows).map(mergeSchema).map(flattenRows); function mergeSchema(row: TableRow) { return row.f!.map((field: TableRowField, index: number) => { @@ -915,7 +915,7 @@ export class BigQuery extends Service { */ static range( value: string | BigQueryRangeOptions, - elementType?: string + elementType?: string, ): BigQueryRange { return new BigQueryRange(value, elementType); } @@ -966,14 +966,14 @@ export class BigQuery extends Service { */ static int( value: string | number | IntegerTypeCastValue, - typeCastOptions?: IntegerTypeCastOptions + typeCastOptions?: IntegerTypeCastOptions, ) { return new BigQueryInt(value, typeCastOptions); } int( value: string | number | IntegerTypeCastValue, - typeCastOptions?: IntegerTypeCastOptions + typeCastOptions?: IntegerTypeCastOptions, ) { return BigQuery.int(value, typeCastOptions); } @@ -1018,7 +1018,7 @@ export class BigQuery extends Service { '{\n' + ' integerTypeCastFunction: provide \n' + ' fields: optionally specify field name(s) to be custom casted\n' + - '}\n' + '}\n', ); } return num; @@ -1037,7 +1037,7 @@ export class BigQuery extends Service { * @returns {string} The valid type provided. */ static getTypeDescriptorFromProvidedType_( - providedType: string | ProvidedTypeStruct | ProvidedTypeArray + providedType: string | ProvidedTypeStruct | ProvidedTypeArray, ): ValueType { // The list of types can be found in src/types.d.ts const VALID_TYPES = [ @@ -1081,7 +1081,7 @@ export class BigQuery extends Service { return { name: prop, type: BigQuery.getTypeDescriptorFromProvidedType_( - (providedType as ProvidedTypeStruct)[prop] + (providedType as ProvidedTypeStruct)[prop], ), }; }), @@ -1113,7 +1113,7 @@ export class BigQuery extends Service { if (value === null) { throw new Error( - "Parameter types must be provided for null values via the 'types' field in query options." + "Parameter types must be provided for null values via the 'types' field in query options.", ); } @@ -1147,7 +1147,7 @@ export class BigQuery extends Service { } else if (Array.isArray(value)) { if (value.length === 0) { throw new Error( - "Parameter types must be provided for empty arrays via the 'types' field in query options." + "Parameter types must be provided for empty arrays via the 'types' field in query options.", ); } return { @@ -1178,7 +1178,7 @@ export class BigQuery extends Service { [ 'This value could not be translated to a BigQuery data type.', value, - ].join('\n') + ].join('\n'), ); } @@ -1202,7 +1202,7 @@ export class BigQuery extends Service { static valueToQueryParameter_( // eslint-disable-next-line @typescript-eslint/no-explicit-any value: any, - providedType?: string | ProvidedTypeStruct | ProvidedTypeArray + providedType?: string | ProvidedTypeStruct | ProvidedTypeArray, ) { if (is.date(value)) { value = BigQuery.timestamp(value as Date); @@ -1230,7 +1230,7 @@ export class BigQuery extends Service { } } return {value} as bigquery.IQueryParameterValue; - } + }, ); } else if (typeName === 'STRUCT') { queryParameter.parameterValue!.structValues = Object.keys(value).reduce( @@ -1239,7 +1239,7 @@ export class BigQuery extends Service { if (providedType) { nestedQueryParameter = BigQuery.valueToQueryParameter_( value[prop], - (providedType as ProvidedTypeStruct)[prop] + (providedType as ProvidedTypeStruct)[prop], ); } else { nestedQueryParameter = BigQuery.valueToQueryParameter_(value[prop]); @@ -1248,7 +1248,7 @@ export class BigQuery extends Service { (structValues as any)[prop] = nestedQueryParameter.parameterValue; return structValues; }, - {} + {}, ); } else if (typeName === 'RANGE') { let rangeValue: BigQueryRange; @@ -1257,7 +1257,7 @@ export class BigQuery extends Service { } else { rangeValue = BigQuery.range( value, - queryParameter.parameterType?.rangeElementType?.type + queryParameter.parameterType?.rangeElementType?.type, ); } queryParameter.parameterValue!.rangeValue = { @@ -1273,7 +1273,7 @@ export class BigQuery extends Service { } else { queryParameter.parameterValue!.value = BigQuery._getValue( value, - parameterType + parameterType, ); } @@ -1337,18 +1337,18 @@ export class BigQuery extends Service { */ createDataset( id: string, - options?: DatasetResource + options?: DatasetResource, ): Promise; createDataset( id: string, options: DatasetResource, - callback: DatasetCallback + callback: DatasetCallback, ): void; createDataset(id: string, callback: DatasetCallback): void; createDataset( id: string, optionsOrCallback?: DatasetResource | DatasetCallback, - cb?: DatasetCallback + cb?: DatasetCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1368,7 +1368,7 @@ export class BigQuery extends Service { datasetReference: { datasetId: id, }, - } + }, ), }; if (options.projectId) { @@ -1486,7 +1486,7 @@ export class BigQuery extends Service { createQueryJob(options: Query | string, callback: JobCallback): void; createQueryJob( opts: Query | string, - callback?: JobCallback + callback?: JobCallback, ): void | Promise { const options = typeof opts === 'object' ? opts : {query: opts}; this.trace_('[createQueryJob]', options, callback); @@ -1499,7 +1499,7 @@ export class BigQuery extends Service { { useLegacySql: false, }, - options + options, ); this.trace_('[createQueryJob]', query); @@ -1520,7 +1520,7 @@ export class BigQuery extends Service { if (query.params) { const {parameterMode, params} = this.buildQueryParams_( query.params, - query.types + query.types, ); query.parameterMode = parameterMode; query.queryParameters = params; @@ -1567,7 +1567,7 @@ export class BigQuery extends Service { private buildQueryParams_( params: Query['params'], - types: Query['types'] + types: Query['types'], ): { parameterMode: ParameterMode; params: bigquery.IQueryParameter[] | undefined; @@ -1589,7 +1589,7 @@ export class BigQuery extends Service { if (types) { if (!is.object(types)) { throw new Error( - 'Provided types must match the value type passed to `params`' + 'Provided types must match the value type passed to `params`', ); } @@ -1598,7 +1598,7 @@ export class BigQuery extends Service { if (namedTypes[namedParameter]) { queryParameter = BigQuery.valueToQueryParameter_( value, - namedTypes[namedParameter] + namedTypes[namedParameter], ); } else { queryParameter = BigQuery.valueToQueryParameter_(value); @@ -1614,7 +1614,7 @@ export class BigQuery extends Service { if (types) { if (!is.array(types)) { throw new Error( - 'Provided types must match the value type passed to `params`' + 'Provided types must match the value type passed to `params`', ); } @@ -1626,7 +1626,7 @@ export class BigQuery extends Service { params.forEach((value: {}, i: number) => { const queryParameter = BigQuery.valueToQueryParameter_( value, - positionalTypes[i] + positionalTypes[i], ); queryParameters.push(queryParameter); }); @@ -1706,7 +1706,7 @@ export class BigQuery extends Service { createJob(options: JobOptions, callback: JobCallback): void; createJob( options: JobOptions, - callback?: JobCallback + callback?: JobCallback, ): void | Promise { const JOB_ID_PROVIDED = typeof options.jobId !== 'undefined'; const DRY_RUN = options.configuration?.dryRun @@ -1714,7 +1714,7 @@ export class BigQuery extends Service { : false; const reqOpts = Object.assign({}, options); - let jobId = JOB_ID_PROVIDED ? reqOpts.jobId : uuid.v4(); + let jobId = JOB_ID_PROVIDED ? reqOpts.jobId : randomUUID(); if (reqOpts.jobId) { delete reqOpts.jobId; @@ -1779,7 +1779,7 @@ export class BigQuery extends Service { job.location = resp.jobReference.location; job.metadata = resp; callback!(err, job, resp); - } + }, ); } @@ -1865,7 +1865,7 @@ export class BigQuery extends Service { getDatasets(callback: DatasetsCallback): void; getDatasets( optionsOrCallback?: GetDatasetsOptions | DatasetsCallback, - cb?: DatasetsCallback + cb?: DatasetsCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1906,7 +1906,7 @@ export class BigQuery extends Service { ds.metadata = dataset!; return ds; - } + }, ); callback!(null, datasets, nextQuery, resp); @@ -1986,7 +1986,7 @@ export class BigQuery extends Service { getJobs(callback: GetJobsCallback): void; getJobs( optionsOrCallback?: GetJobsOptions | GetJobsCallback, - cb?: GetJobsCallback + cb?: GetJobsCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -2017,7 +2017,7 @@ export class BigQuery extends Service { return job; }); callback!(null, jobs, nextQuery, resp); - } + }, ); } @@ -2165,12 +2165,12 @@ export class BigQuery extends Service { query( query: string, options: QueryOptions, - callback?: QueryRowsCallback + callback?: QueryRowsCallback, ): void; query( query: Query, options: QueryOptions, - callback?: SimpleQueryRowsCallback + callback?: SimpleQueryRowsCallback, ): void; query(query: string, callback?: QueryRowsCallback): void; query(query: Query, callback?: SimpleQueryRowsCallback): void; @@ -2180,7 +2180,7 @@ export class BigQuery extends Service { | QueryOptions | SimpleQueryRowsCallback | QueryRowsCallback, - cb?: SimpleQueryRowsCallback | QueryRowsCallback + cb?: SimpleQueryRowsCallback | QueryRowsCallback, ): void | Promise | Promise { let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -2215,7 +2215,7 @@ export class BigQuery extends Service { return; } - this.runJobsQuery(queryReq, (err, job, res) => { + void this.runJobsQuery(queryReq, (err, job, res) => { this.trace_('[runJobsQuery callback]: ', query, err, job, res); if (err) { (callback as SimpleQueryRowsCallback)(err, null, job); @@ -2247,7 +2247,7 @@ export class BigQuery extends Service { // If timeout override was provided, return error. if (queryReq.timeoutMs) { const err = new Error( - `The query did not complete before ${queryReq.timeoutMs}ms` + `The query did not complete before ${queryReq.timeoutMs}ms`, ); (callback as SimpleQueryRowsCallback)(err, null, job); return; @@ -2269,7 +2269,7 @@ export class BigQuery extends Service { */ private buildQueryRequest_( query: string | Query, - options: QueryOptions + options: QueryOptions, ): bigquery.IQueryRequest | undefined { if (process.env.FAST_QUERY_PATH === 'DISABLED') { return undefined; @@ -2325,7 +2325,7 @@ export class BigQuery extends Service { maxResults: queryObj.maxResults || options.maxResults, query: queryObj.query, useLegacySql: false, - requestId: uuid.v4(), + requestId: randomUUID(), jobCreationMode: 'JOB_CREATION_OPTIONAL', }; if (!this._enableQueryPreview) { @@ -2333,7 +2333,7 @@ export class BigQuery extends Service { } const {parameterMode, params} = this.buildQueryParams_( queryObj.params, - queryObj.types + queryObj.types, ); if (params) { req.queryParameters = params; @@ -2346,7 +2346,7 @@ export class BigQuery extends Service { private runJobsQuery( req: bigquery.IQueryRequest, - callback?: JobsQueryCallback + callback?: JobsQueryCallback, ): void | Promise { this.trace_('[runJobsQuery]', req, callback); this.request( @@ -2371,7 +2371,7 @@ export class BigQuery extends Service { job = this.job(res.queryId); // stateless query } callback!(null, job, res); - } + }, ); } @@ -2443,7 +2443,7 @@ function convertSchemaFieldValue( wrapIntegers: boolean | IntegerTypeCastOptions; selectedFields?: string[]; parseJSON?: boolean; - } + }, ) { if (is.null(value)) { return value; @@ -2471,7 +2471,7 @@ function convertSchemaFieldValue( ? typeof wrapIntegers === 'object' ? BigQuery.int( {integerValue: value, schemaFieldName: schemaField.name}, - wrapIntegers + wrapIntegers, ).valueOf() : BigQuery.int(value) : Number(value); @@ -2519,7 +2519,7 @@ function convertSchemaFieldValue( case 'RANGE': { value = BigQueryRange.fromSchemaValue_( value, - schemaField.rangeElementType!.type! + schemaField.rangeElementType!.type!, ); break; } @@ -2546,7 +2546,7 @@ export class BigQueryRange { if (typeof value === 'string') { if (!elementType) { throw new Error( - 'invalid RANGE. Element type required when using RANGE API string.' + 'invalid RANGE. Element type required when using RANGE API string.', ); } @@ -2559,7 +2559,7 @@ export class BigQueryRange { if (start && end) { if (typeof start !== typeof end) { throw Error( - 'upper and lower bound on a RANGE should be of the same type.' + 'upper and lower bound on a RANGE should be of the same type.', ); } } @@ -2608,7 +2608,7 @@ export class BigQueryRange { const parts = cleanedValue.split(','); if (parts.length !== 2) { throw new Error( - 'invalid RANGE. See RANGE literal format docs for more information.' + 'invalid RANGE. See RANGE literal format docs for more information.', ); } @@ -2631,13 +2631,13 @@ export class BigQueryRange { start: convertRangeSchemaValue(start), end: convertRangeSchemaValue(end), }, - elementType + elementType, ); } private convertElement_( value?: string | BigQueryDate | BigQueryDatetime | BigQueryTimestamp, - elementType?: string + elementType?: string, ) { if (typeof value === 'string') { if (value === 'UNBOUNDED' || value === 'NULL') { @@ -2796,7 +2796,7 @@ export class BigQueryInt extends Number { private _schemaFieldName: string | undefined; constructor( value: string | number | IntegerTypeCastValue, - typeCastOptions?: IntegerTypeCastOptions + typeCastOptions?: IntegerTypeCastOptions, ) { super(typeof value === 'object' ? value.integerValue : value); this._schemaFieldName = @@ -2811,12 +2811,12 @@ export class BigQueryInt extends Number { if (typeCastOptions) { if (typeof typeCastOptions.integerTypeCastFunction !== 'function') { throw new Error( - 'integerTypeCastFunction is not a function or was not provided.' + 'integerTypeCastFunction is not a function or was not provided.', ); } const typeCastFields = typeCastOptions.fields - ? arrify(typeCastOptions.fields) + ? toArray(typeCastOptions.fields) : undefined; let customCast = true; diff --git a/src/dataset.ts b/src/dataset.ts index e23474365..4b87084c7 100644 --- a/src/dataset.ts +++ b/src/dataset.ts @@ -365,7 +365,7 @@ class Dataset extends ServiceObject { createMethod: ( id: string, optionsOrCallback?: CreateDatasetOptions | DatasetCallback, - cb?: DatasetCallback + cb?: DatasetCallback, ) => { let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -408,7 +408,7 @@ class Dataset extends ServiceObject { // Override projectId if provided reqOpts.uri = reqOpts.uri.replace( `/projects/${this.bigQuery.projectId}/`, - `/projects/${this.projectId}/` + `/projects/${this.projectId}/`, ); } return reqOpts; @@ -531,7 +531,7 @@ class Dataset extends ServiceObject { createQueryJob(options: string | Query, callback: JobCallback): void; createQueryJob( options: string | Query, - callback?: JobCallback + callback?: JobCallback, ): void | Promise { if (typeof options === 'string') { options = { @@ -633,12 +633,12 @@ class Dataset extends ServiceObject { createRoutine( id: string, config: RoutineMetadata, - callback: RoutineCallback + callback: RoutineCallback, ): void; createRoutine( id: string, config: RoutineMetadata, - callback?: RoutineCallback + callback?: RoutineCallback, ): void | Promise { const json = Object.assign({}, config, { routineReference: { @@ -663,7 +663,7 @@ class Dataset extends ServiceObject { const routine = this.routine(resp.routineReference.routineId); routine.metadata = resp; callback!(null, routine, resp); - } + }, ); } @@ -726,13 +726,13 @@ class Dataset extends ServiceObject { createTable( id: string, options: TableMetadata, - callback: TableCallback + callback: TableCallback, ): void; createTable(id: string, callback: TableCallback): void; createTable( id: string, optionsOrCallback?: TableMetadata | TableCallback, - cb?: TableCallback + cb?: TableCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -764,7 +764,7 @@ class Dataset extends ServiceObject { table.metadata = resp; callback!(null, table, resp); - } + }, ); } @@ -818,7 +818,7 @@ class Dataset extends ServiceObject { delete(callback: DeleteCallback): void; delete( optionsOrCallback?: DeleteCallback | DatasetDeleteOptions, - callback?: DeleteCallback + callback?: DeleteCallback, ): void | Promise<[Metadata]> { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -835,7 +835,7 @@ class Dataset extends ServiceObject { uri: '', qs: query, }, - callback! + callback!, ); } @@ -921,7 +921,7 @@ class Dataset extends ServiceObject { getModels(callback: GetModelsCallback): void; getModels( optsOrCb?: GetModelsOptions | GetModelsCallback, - cb?: GetModelsCallback + cb?: GetModelsCallback, ): void | Promise { const options = typeof optsOrCb === 'object' ? optsOrCb : {}; const callback = typeof optsOrCb === 'function' ? optsOrCb : cb; @@ -951,7 +951,7 @@ class Dataset extends ServiceObject { }); callback!(null, models, nextQuery, resp); - } + }, ); } @@ -1035,7 +1035,7 @@ class Dataset extends ServiceObject { getRoutines(callback: GetRoutinesCallback): void; getRoutines( optsOrCb?: GetRoutinesOptions | GetRoutinesCallback, - cb?: GetRoutinesCallback + cb?: GetRoutinesCallback, ): void | Promise { const options = typeof optsOrCb === 'object' ? optsOrCb : {}; const callback = typeof optsOrCb === 'function' ? optsOrCb : cb; @@ -1065,7 +1065,7 @@ class Dataset extends ServiceObject { }); callback!(null, routines, nextQuery, resp); - } + }, ); } @@ -1151,7 +1151,7 @@ class Dataset extends ServiceObject { getTables(callback: GetTablesCallback): void; getTables( optionsOrCallback?: GetTablesOptions | GetTablesCallback, - cb?: GetTablesCallback + cb?: GetTablesCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1185,7 +1185,7 @@ class Dataset extends ServiceObject { return table; }); callback!(null, tables, nextQuery, resp); - } + }, ); } @@ -1230,7 +1230,7 @@ class Dataset extends ServiceObject { query(options: string, callback: SimpleQueryRowsCallback): void; query( options: Query | string, - callback?: SimpleQueryRowsCallback + callback?: SimpleQueryRowsCallback, ): void | Promise { if (typeof options === 'string') { options = { @@ -1307,7 +1307,7 @@ class Dataset extends ServiceObject { location: this.location, projectId: this.projectId, }, - options + options, ); return new Table(this, id, options); } diff --git a/src/job.ts b/src/job.ts index 04ce0ec2d..d39f950b7 100644 --- a/src/job.ts +++ b/src/job.ts @@ -132,9 +132,8 @@ export type QueryResultsOptions = { class Job extends Operation { bigQuery: BigQuery; location?: string; - projectId?: string; getQueryResultsStream( - options?: QueryResultsOptions + options?: QueryResultsOptions, ): ResourceStream { // placeholder body, overwritten in constructor return new ResourceStream({}, () => {}); @@ -382,7 +381,7 @@ class Job extends Operation { * ``` */ this.getQueryResultsStream = paginator.streamify( - 'getQueryResultsAsStream_' + 'getQueryResultsAsStream_', ); } @@ -449,7 +448,7 @@ class Job extends Operation { uri: '/cancel', qs, }, - callback! + callback!, ); } @@ -530,12 +529,12 @@ class Job extends Operation { getQueryResults(options?: QueryResultsOptions): Promise; getQueryResults( options: QueryResultsOptions, - callback: QueryRowsCallback + callback: QueryRowsCallback, ): void; getQueryResults(callback: QueryRowsCallback): void; getQueryResults( optionsOrCallback?: QueryResultsOptions | QueryRowsCallback, - cb?: QueryRowsCallback + cb?: QueryRowsCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -546,13 +545,13 @@ class Job extends Operation { location: this.location, 'formatOptions.useInt64Timestamp': true, }, - options + options, ); this.trace_( '[getQueryResults]', this.id, options.pageToken, - options.startIndex + options.startIndex, ); const wrapIntegers = qs.wrapIntegers ? qs.wrapIntegers : false; @@ -610,7 +609,7 @@ class Job extends Operation { // If timeout override was provided, return error. if (timeoutOverride) { const err = new Error( - `The query did not complete before ${timeoutOverride}ms` + `The query did not complete before ${timeoutOverride}ms`, ); callback!(err, null, nextQuery, resp); return; @@ -625,7 +624,7 @@ class Job extends Operation { } delete resp.rows; callback!(null, rows, nextQuery, resp); - } + }, ); } @@ -637,7 +636,7 @@ class Job extends Operation { */ getQueryResultsAsStream_( options: QueryResultsOptions, - callback: QueryRowsCallback + callback: QueryRowsCallback, ): void { options = extend({autoPaginate: false}, options); this.getQueryResults(options, callback); @@ -655,7 +654,7 @@ class Job extends Operation { * @param {function} callback */ poll_(callback: MetadataCallback): void { - this.getMetadata((err: Error, metadata: Metadata) => { + void this.getMetadata((err: Error, metadata: Metadata) => { if (!err && metadata.status && metadata.status.errorResult) { err = new util.ApiError(metadata.status); } diff --git a/src/logger.ts b/src/logger.ts index d8dfc0177..f59f45ca3 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -30,7 +30,7 @@ export function logger(source: string, msg: string, ...otherArgs: any[]) { const time = new Date().toISOString(); const formattedMsg = util.format( `D ${time} | ${source} | ${msg} |`, - ...otherArgs + ...otherArgs, ); logFunction(formattedMsg); } diff --git a/src/model.ts b/src/model.ts index ea0ea73a5..3c460e437 100644 --- a/src/model.ts +++ b/src/model.ts @@ -16,7 +16,7 @@ import {ServiceObject, util} from '@google-cloud/common'; import {promisifyAll} from '@google-cloud/promisify'; -import arrify = require('arrify'); +import {toArray} from './util'; import * as extend from 'extend'; import { BigQuery, @@ -378,18 +378,18 @@ class Model extends ServiceObject { */ createExtractJob( destination: string | File, - options?: CreateExtractJobOptions + options?: CreateExtractJobOptions, ): Promise; createExtractJob( destination: string | File, options: CreateExtractJobOptions, - callback: JobCallback + callback: JobCallback, ): void; createExtractJob(destination: string | File, callback: JobCallback): void; createExtractJob( destination: string | File, optionsOrCallback?: CreateExtractJobOptions | JobCallback, - cb?: JobCallback + cb?: JobCallback, ): void | Promise { let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -397,7 +397,7 @@ class Model extends ServiceObject { typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; options = extend(true, options, { - destinationUris: (arrify(destination) as Array).map( + destinationUris: (toArray(destination) as Array).map( dest => { if (util.isCustomType(dest, 'storage/file')) { return ( @@ -409,7 +409,7 @@ class Model extends ServiceObject { return dest; } throw new Error('Destination must be a string or a File object.'); - } + }, ), }); @@ -527,18 +527,18 @@ class Model extends ServiceObject { */ extract( destination: string | File, - options?: CreateExtractJobOptions + options?: CreateExtractJobOptions, ): Promise; extract( destination: string | File, options: CreateExtractJobOptions, - callback?: JobMetadataCallback + callback?: JobMetadataCallback, ): void; extract(destination: string | File, callback?: JobMetadataCallback): void; extract( destination: string | File, optionsOrCallback?: CreateExtractJobOptions | JobMetadataCallback, - cb?: JobMetadataCallback + cb?: JobMetadataCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; diff --git a/src/routine.ts b/src/routine.ts index 3d10af1b1..3c8b3dd6a 100644 --- a/src/routine.ts +++ b/src/routine.ts @@ -295,19 +295,21 @@ class Routine extends ServiceObject { setMetadata(metadata: RoutineMetadata, callback: ResponseCallback): void; setMetadata( metadata: RoutineMetadata, - callback?: ResponseCallback + callback?: ResponseCallback, ): void | Promise { // per the python client, it would appear that in order to update a routine // you need to send the routine in its entirety, not just the updated fields - this.getMetadata((err: Error | null, fullMetadata: RoutineMetadata) => { - if (err) { - callback!(err); - return; - } + void this.getMetadata( + (err: Error | null, fullMetadata: RoutineMetadata) => { + if (err) { + callback!(err); + return; + } - const updatedMetadata = extend(true, {}, fullMetadata, metadata); - super.setMetadata(updatedMetadata, callback!); - }); + const updatedMetadata = extend(true, {}, fullMetadata, metadata); + void super.setMetadata(updatedMetadata, callback!); + }, + ); } } diff --git a/src/rowQueue.ts b/src/rowQueue.ts index 534921cea..8f87cf517 100644 --- a/src/rowQueue.ts +++ b/src/rowQueue.ts @@ -16,7 +16,7 @@ import * as common from '@google-cloud/common'; import * as extend from 'extend'; -import * as uuid from 'uuid'; +import {randomUUID} from 'crypto'; import {RequestCallback, Table, InsertStreamOptions} from '.'; import {GoogleErrorBody} from '@google-cloud/common/build/src/util'; import bigquery from './types'; @@ -108,7 +108,7 @@ export class RowQueue { }; if (this.insertRowsOptions.createInsertId !== false) { - row.insertId = uuid.v4(); + row.insertId = randomUUID(); } } @@ -153,7 +153,7 @@ export class RowQueue { _insert( rows: RowMetadata | RowMetadata[], callbacks: InsertRowsCallback[], - cb?: InsertRowsCallback + cb?: InsertRowsCallback, ): void { const json = extend(true, {}, this.insertRowsOptions, {rows}); @@ -180,7 +180,7 @@ export class RowQueue { // eslint-disable-next-line @typescript-eslint/no-explicit-any row: rows[(insertError as any).index], }; - } + }, ); if (partialFailures.length > 0) { @@ -197,7 +197,7 @@ export class RowQueue { cb?.(err, resp); } cb?.(err, resp); - } + }, ); } @@ -213,7 +213,7 @@ export class RowQueue { const {maxBytes, maxRows, maxMilliseconds} = extend( true, defaults, - options + options, ); this.batchOptions = { diff --git a/src/table.ts b/src/table.ts index 5af4f7ba4..c3154b1ec 100644 --- a/src/table.ts +++ b/src/table.ts @@ -25,7 +25,7 @@ import { } from '@google-cloud/common'; import {paginator, ResourceStream} from '@google-cloud/paginator'; import {promisifyAll} from '@google-cloud/promisify'; -import arrify = require('arrify'); +import {toArray} from './util'; import * as Big from 'big.js'; import * as extend from 'extend'; import {once} from 'events'; @@ -33,7 +33,9 @@ import * as fs from 'fs'; import * as is from 'is'; import * as path from 'path'; import * as streamEvents from 'stream-events'; -import * as uuid from 'uuid'; +import {randomUUID} from 'crypto'; +import * as duplexify from 'duplexify'; + import { BigQuery, Job, @@ -52,12 +54,9 @@ import {GoogleErrorBody} from '@google-cloud/common/build/src/util'; import {Duplex, Writable} from 'stream'; import {JobMetadata} from './job'; import bigquery from './types'; -import {BigQueryRange, IntegerTypeCastOptions} from './bigquery'; +import {IntegerTypeCastOptions} from './bigquery'; import {RowQueue} from './rowQueue'; -// eslint-disable-next-line @typescript-eslint/no-var-requires -const duplexify = require('duplexify'); - // This is supposed to be a @google-cloud/storage `File` type. The storage npm // module includes these types, but is current installed as a devDependency. // Unless it's included as a production dependency, the types would not be @@ -539,7 +538,7 @@ class Table extends ServiceObject { * @returns {object} Table schema in the format the API expects. */ static createSchemaFromString_(str: string): TableSchema { - return str.split(/\s*,\s*/).reduce( + return str.split(',').reduce( (acc: {fields: Array<{name: string; type: string}>}, pair) => { acc.fields.push({ name: pair.split(':')[0].trim(), @@ -549,7 +548,7 @@ class Table extends ServiceObject { }, { fields: [], - } + }, ); } @@ -605,11 +604,11 @@ class Table extends ServiceObject { return Object.keys(value).reduce( (acc: {[index: string]: {} | null}, key) => { acc[key] = Table.encodeValue_( - (value as {[index: string]: {} | null})[key] + (value as {[index: string]: {} | null})[key], ); return acc; }, - {} + {}, ); } return value; @@ -722,18 +721,18 @@ class Table extends ServiceObject { */ copy( destination: Table, - metadata?: CopyTableMetadata + metadata?: CopyTableMetadata, ): Promise; copy( destination: Table, metadata: CopyTableMetadata, - callback: JobMetadataCallback + callback: JobMetadataCallback, ): void; copy(destination: Table, callback: JobMetadataCallback): void; copy( destination: Table, metadataOrCallback?: CopyTableMetadata | JobMetadataCallback, - cb?: JobMetadataCallback + cb?: JobMetadataCallback, ): void | Promise { const metadata = typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; @@ -751,7 +750,7 @@ class Table extends ServiceObject { job!.on('error', callback!).on('complete', (metadata: JobMetadata) => { callback!(null, metadata); }); - } + }, ); } @@ -818,18 +817,18 @@ class Table extends ServiceObject { */ copyFrom( sourceTables: Table | Table[], - metadata?: CopyTableMetadata + metadata?: CopyTableMetadata, ): Promise; copyFrom( sourceTables: Table | Table[], metadata: CopyTableMetadata, - callback: JobMetadataCallback + callback: JobMetadataCallback, ): void; copyFrom(sourceTables: Table | Table[], callback: JobMetadataCallback): void; copyFrom( sourceTables: Table | Table[], metadataOrCallback?: CopyTableMetadata | JobMetadataCallback, - cb?: JobMetadataCallback + cb?: JobMetadataCallback, ): void | Promise { const metadata = typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; @@ -901,18 +900,18 @@ class Table extends ServiceObject { */ createCopyJob( destination: Table, - metadata?: CreateCopyJobMetadata + metadata?: CreateCopyJobMetadata, ): Promise; createCopyJob( destination: Table, metadata: CreateCopyJobMetadata, - callback: JobCallback + callback: JobCallback, ): void; createCopyJob(destination: Table, callback: JobCallback): void; createCopyJob( destination: Table, metadataOrCallback?: CreateCopyJobMetadata | JobCallback, - cb?: JobCallback + cb?: JobCallback, ): void | Promise { if (!(destination instanceof Table)) { throw new Error('Destination must be a Table object.'); @@ -1021,20 +1020,20 @@ class Table extends ServiceObject { */ createCopyFromJob( source: Table | Table[], - metadata?: CopyTableMetadata + metadata?: CopyTableMetadata, ): Promise; createCopyFromJob( source: Table | Table[], metadata: CopyTableMetadata, - callback: JobCallback + callback: JobCallback, ): void; createCopyFromJob(source: Table | Table[], callback: JobCallback): void; createCopyFromJob( source: Table | Table[], metadataOrCallback?: CopyTableMetadata | JobCallback, - cb?: JobCallback + cb?: JobCallback, ): void | Promise { - const sourceTables = arrify(source) as Table[]; + const sourceTables = toArray(source) as Table[]; sourceTables.forEach(sourceTable => { if (!(sourceTable instanceof Table)) { throw new Error('Source must be a Table object.'); @@ -1167,18 +1166,18 @@ class Table extends ServiceObject { */ createExtractJob( destination: File, - options?: CreateExtractJobOptions + options?: CreateExtractJobOptions, ): Promise; createExtractJob( destination: File, options: CreateExtractJobOptions, - callback: JobCallback + callback: JobCallback, ): void; createExtractJob(destination: File, callback: JobCallback): void; createExtractJob( destination: File, optionsOrCallback?: CreateExtractJobOptions | JobCallback, - cb?: JobCallback + cb?: JobCallback, ): void | Promise { let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1186,7 +1185,7 @@ class Table extends ServiceObject { typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; options = extend(true, options, { - destinationUris: arrify(destination).map(dest => { + destinationUris: toArray(destination).map(dest => { if (!util.isCustomType(dest, 'storage/file')) { throw new Error('Destination must be a File object.'); } @@ -1340,18 +1339,18 @@ class Table extends ServiceObject { */ createLoadJob( source: string | File | File[], - metadata?: JobLoadMetadata + metadata?: JobLoadMetadata, ): Promise; createLoadJob( source: string | File | File[], metadata: JobLoadMetadata, - callback: JobCallback + callback: JobCallback, ): void; createLoadJob(source: string | File | File[], callback: JobCallback): void; createLoadJob( source: string | File | File[], metadataOrCallback?: JobLoadMetadata | JobCallback, - cb?: JobCallback + cb?: JobCallback, ): void | Promise { const metadata = typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; @@ -1360,7 +1359,7 @@ class Table extends ServiceObject { this._createLoadJob(source, metadata).then( ([resp]) => callback!(null, resp, resp.metadata), - err => callback!(err) + err => callback!(err), ); } @@ -1372,7 +1371,7 @@ class Table extends ServiceObject { */ async _createLoadJob( source: string | File | File[], - metadata: JobLoadMetadata + metadata: JobLoadMetadata, ): Promise { if (metadata.format) { metadata.sourceFormat = FORMATS[metadata.format.toLowerCase()]; @@ -1429,7 +1428,7 @@ class Table extends ServiceObject { } extend(true, body.configuration.load, metadata, { - sourceUris: arrify(source).map(src => { + sourceUris: toArray(source).map(src => { if (!util.isCustomType(src, 'storage/file')) { throw new Error('Source must be a File object.'); } @@ -1460,7 +1459,7 @@ class Table extends ServiceObject { createQueryJob(options: Query, callback: JobCallback): void; createQueryJob( options: Query, - callback?: JobCallback + callback?: JobCallback, ): void | Promise { return this.dataset.createQueryJob(options, callback!); } @@ -1516,10 +1515,10 @@ class Table extends ServiceObject { tableId: this.id, }, }, - metadata + metadata, ); - let jobId = metadata.jobId || uuid.v4(); + let jobId = metadata.jobId || randomUUID(); if (metadata.jobId) { delete metadata.jobId; @@ -1531,35 +1530,38 @@ class Table extends ServiceObject { } const dup = streamEvents(duplexify()); - + const jobMetadata = { + configuration: { + load: metadata, + }, + jobReference: { + jobId, + projectId: this.dataset.projectId, + location: this.location, + }, + }; dup.once('writing', () => { util.makeWritableStream( dup, { makeAuthenticatedRequest: this.bigQuery.makeAuthenticatedRequest, - metadata: { - configuration: { - load: metadata, - }, - jobReference: { - jobId, - projectId: this.dataset.projectId, - location: this.location, - }, - } as {}, + metadata: jobMetadata as {}, request: { uri: `${this.bigQuery.apiEndpoint}/upload/bigquery/v2/projects/${this.dataset.projectId}/jobs`, }, }, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (data: any) => { - const job = this.bigQuery.job(data.jobReference.jobId, { - location: data.jobReference.location, - projectId: data.jobReference.projectId, - }); - job.metadata = data; + (data: bigquery.IJob) => { + let job: Job | null = null; + const jobRef = data.jobReference; + if (jobRef && jobRef.jobId) { + job = this.bigQuery.job(jobRef.jobId, { + location: jobRef.location, + projectId: jobRef.projectId, + }); + job.metadata = data; + } dup.emit('job', job); - } + }, ); }); return dup; @@ -1718,18 +1720,18 @@ class Table extends ServiceObject { */ extract( destination: File, - options?: CreateExtractJobOptions + options?: CreateExtractJobOptions, ): Promise; extract( destination: File, options: CreateExtractJobOptions, - callback?: JobMetadataCallback + callback?: JobMetadataCallback, ): void; extract(destination: File, callback?: JobMetadataCallback): void; extract( destination: File, optionsOrCallback?: CreateExtractJobOptions | JobMetadataCallback, - cb?: JobMetadataCallback + cb?: JobMetadataCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1820,7 +1822,7 @@ class Table extends ServiceObject { */ getRows( optionsOrCallback?: GetRowsOptions | RowsCallback, - cb?: RowsCallback + cb?: RowsCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -1837,7 +1839,7 @@ class Table extends ServiceObject { err: Error | null, rows: TableRow[] | null, nextQuery: GetRowsOptions | null, - resp: bigquery.ITableList + resp: bigquery.ITableList, ) => { if (err) { callback!(err, null, null, resp); @@ -1855,7 +1857,7 @@ class Table extends ServiceObject { { 'formatOptions.useInt64Timestamp': true, }, - options + options, ); this.request( @@ -1877,20 +1879,20 @@ class Table extends ServiceObject { if (resp.rows && resp.rows.length > 0 && !this.metadata.schema) { // We don't know the schema for this table yet. Do a quick stat. - this.getMetadata( + void this.getMetadata( (err: Error, metadata: Metadata, apiResponse: bigquery.ITable) => { if (err) { onComplete(err, null, null, apiResponse!); return; } onComplete(null, resp.rows, nextQuery, resp); - } + }, ); return; } onComplete(null, resp.rows, nextQuery, resp); - } + }, ); } @@ -2046,18 +2048,18 @@ class Table extends ServiceObject { */ insert( rows: RowMetadata | RowMetadata[], - options?: InsertRowsOptions + options?: InsertRowsOptions, ): Promise; insert( rows: RowMetadata | RowMetadata[], options: InsertRowsOptions, - callback: InsertRowsCallback + callback: InsertRowsCallback, ): void; insert(rows: RowMetadata | RowMetadata[], callback: InsertRowsCallback): void; insert( rows: RowMetadata | RowMetadata[], optionsOrCallback?: InsertRowsOptions | InsertRowsCallback, - cb?: InsertRowsCallback + cb?: InsertRowsCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' @@ -2070,7 +2072,7 @@ class Table extends ServiceObject { if (callback) { promise.then( resp => callback(null, resp), - err => callback(err, null) + err => callback(err, null), ); } else { return promise.then(r => [r]); @@ -2087,7 +2089,7 @@ class Table extends ServiceObject { */ private async _insertAndCreateTable( rows: RowMetadata | RowMetadata[], - options: InsertRowsOptions + options: InsertRowsOptions, ): Promise { const {schema} = options; const delay = 60000; @@ -2128,7 +2130,7 @@ class Table extends ServiceObject { */ private async _insertWithRetry( rows: RowMetadata | RowMetadata[], - options: InsertRowsOptions + options: InsertRowsOptions, ): Promise { const {partialRetries = 3} = options; let error: GoogleErrorBody; @@ -2165,9 +2167,9 @@ class Table extends ServiceObject { */ private async _insert( rows: RowMetadata | RowMetadata[], - options: InsertRowsOptions + options: InsertRowsOptions, ): Promise { - rows = arrify(rows) as RowMetadata[]; + rows = toArray(rows) as RowMetadata[]; if (!rows.length) { throw new Error('You must provide at least 1 row to be inserted.'); @@ -2182,7 +2184,7 @@ class Table extends ServiceObject { }; if (options.createInsertId !== false) { - encoded.insertId = uuid.v4(); + encoded.insertId = randomUUID(); } return encoded; @@ -2212,7 +2214,7 @@ class Table extends ServiceObject { // eslint-disable-next-line @typescript-eslint/no-explicit-any row: rows[(insertError as any).index], }; - } + }, ); if (partialFailures.length > 0) { @@ -2231,7 +2233,7 @@ class Table extends ServiceObject { dup._write = ( chunk: RowMetadata, encoding: BufferEncoding, - cb: Function + cb: Function, ) => { this.rowQueue!.add(chunk, () => {}); cb!(); @@ -2243,12 +2245,12 @@ class Table extends ServiceObject { load( source: string | File | File[], - metadata?: JobLoadMetadata + metadata?: JobLoadMetadata, ): Promise; load( source: string | File | File[], metadata: JobLoadMetadata, - callback: JobMetadataCallback + callback: JobMetadataCallback, ): void; load(source: string | File | File[], callback: JobMetadataCallback): void; /** @@ -2333,18 +2335,18 @@ class Table extends ServiceObject { */ load( source: string | File | File[], - metadata?: JobLoadMetadata + metadata?: JobLoadMetadata, ): Promise; load( source: string | File | File[], metadata: JobLoadMetadata, - callback: JobMetadataCallback + callback: JobMetadataCallback, ): void; load(source: string | File | File[], callback: JobMetadataCallback): void; load( source: string | File | File[], metadataOrCallback?: JobLoadMetadata | JobMetadataCallback, - cb?: JobMetadataCallback + cb?: JobMetadataCallback, ): void | Promise { const metadata = typeof metadataOrCallback === 'object' ? metadataOrCallback : {}; @@ -2376,7 +2378,7 @@ class Table extends ServiceObject { query(query: Query, callback: SimpleQueryRowsCallback): void; query( query: Query | string, - callback?: SimpleQueryRowsCallback + callback?: SimpleQueryRowsCallback, ): void | Promise { if (typeof query === 'string') { query = { @@ -2434,14 +2436,14 @@ class Table extends ServiceObject { setMetadata(metadata: SetTableMetadataOptions): Promise; setMetadata( metadata: SetTableMetadataOptions, - callback: ResponseCallback + callback: ResponseCallback, ): void; setMetadata( metadata: SetTableMetadataOptions, - callback?: ResponseCallback + callback?: ResponseCallback, ): void | Promise { const body = Table.formatMetadata_(metadata as TableMetadata); - super.setMetadata(body, callback!); + void super.setMetadata(body, callback!); } /** @@ -2449,12 +2451,12 @@ class Table extends ServiceObject { * @returns {Promise} */ getIamPolicy( - optionsOrCallback?: GetPolicyOptions | PolicyCallback + optionsOrCallback?: GetPolicyOptions | PolicyCallback, ): Promise; getIamPolicy(options: GetPolicyOptions, callback: PolicyCallback): void; getIamPolicy( optionsOrCallback?: GetPolicyOptions, - cb?: PolicyCallback + cb?: PolicyCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -2482,7 +2484,7 @@ class Table extends ServiceObject { return; } callback!(null, resp); - } + }, ); } @@ -2492,18 +2494,18 @@ class Table extends ServiceObject { */ setIamPolicy( policy: Policy, - options?: SetPolicyOptions + options?: SetPolicyOptions, ): Promise; setIamPolicy( policy: Policy, options: SetPolicyOptions, - callback: PolicyCallback + callback: PolicyCallback, ): void; setIamPolicy(policy: Policy, callback: PolicyCallback): void; setIamPolicy( policy: Policy, optionsOrCallback?: SetPolicyOptions | PolicyCallback, - cb?: PolicyCallback + cb?: PolicyCallback, ): void | Promise { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; @@ -2528,7 +2530,7 @@ class Table extends ServiceObject { return; } callback!(null, resp); - } + }, ); } @@ -2537,17 +2539,17 @@ class Table extends ServiceObject { * @returns {Promise} */ testIamPermissions( - permissions: string | string[] + permissions: string | string[], ): Promise; testIamPermissions( permissions: string | string[], - callback: PermissionsCallback + callback: PermissionsCallback, ): void; testIamPermissions( permissions: string | string[], - callback?: PermissionsCallback + callback?: PermissionsCallback, ): void | Promise { - permissions = arrify(permissions); + permissions = toArray(permissions); const json = extend(true, {}, {permissions}); @@ -2563,7 +2565,7 @@ class Table extends ServiceObject { return; } callback!(null, resp); - } + }, ); } } diff --git a/src/util.ts b/src/util.ts new file mode 100644 index 000000000..c563f4c16 --- /dev/null +++ b/src/util.ts @@ -0,0 +1,37 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * Convert a value to an array. Replacement to arrify + * @internal + */ +export function toArray(value: any) { + if (value === null || value === undefined) { + return []; + } + + if (Array.isArray(value)) { + return value; + } + + if (typeof value === 'string') { + return [value]; + } + + if (typeof value[Symbol.iterator] === 'function') { + return [...value]; + } + + return [value]; +} diff --git a/system-test/bigquery.ts b/system-test/bigquery.ts index 340a29dc2..69c779fdf 100644 --- a/system-test/bigquery.ts +++ b/system-test/bigquery.ts @@ -22,7 +22,7 @@ import * as assert from 'assert'; import {describe, it, before, after} from 'mocha'; import * as Big from 'big.js'; import * as fs from 'fs'; -import * as uuid from 'uuid'; +import {randomUUID} from 'crypto'; import {Readable} from 'stream'; import { @@ -282,7 +282,7 @@ describe('BigQuery', () => { }); it('should honor the job id option', done => { - const jobId = `hi-im-a-job-id-${uuid.v4()}`; + const jobId = `hi-im-a-job-id-${randomUUID()}`; const options = {query, jobId}; bigquery.createQueryJob(options, (err, job) => { @@ -390,7 +390,7 @@ describe('BigQuery', () => { assert.notEqual(foundError, null); assert.equal( foundError?.message, - 'The query did not complete before 1000ms' + 'The query did not complete before 1000ms', ); }); @@ -409,7 +409,7 @@ describe('BigQuery', () => { assert.notEqual(foundError, null); assert.equal( foundError?.message, - 'The query did not complete before 1000ms' + 'The query did not complete before 1000ms', ); }); }); @@ -425,7 +425,7 @@ describe('BigQuery', () => { assert.strictEqual(rows!.length, 10); assert.strictEqual(typeof nextQuery!.pageToken, 'string'); done(); - } + }, ); }); @@ -441,7 +441,7 @@ describe('BigQuery', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any assert((resp as any).statistics.query); done(); - } + }, ); }); @@ -512,7 +512,7 @@ describe('BigQuery', () => { etag: 'a-fake-etag', description: 'oh no!', }), - /precondition/i + /precondition/i, ); }); @@ -576,7 +576,7 @@ describe('BigQuery', () => { // eslint-disable-next-line @typescript-eslint/no-var-requires const SCHEMA = require('../../system-test/data/schema.json'); const TEST_DATA_FILE = require.resolve( - '../../system-test/data/location-test-data.json' + '../../system-test/data/location-test-data.json', ); before(async () => { @@ -647,7 +647,7 @@ describe('BigQuery', () => { assert.strictEqual(err.errors![0].reason, 'notFound'); assert.strictEqual(job!.location, 'US'); done(); - } + }, ); }); @@ -881,7 +881,7 @@ describe('BigQuery', () => { describe('BigQuery/Table', () => { const TEST_DATA_JSON_PATH = require.resolve( - '../../system-test/data/kitten-test-data.json' + '../../system-test/data/kitten-test-data.json', ); it('should have created the correct schema', () => { @@ -986,14 +986,14 @@ describe('BigQuery', () => { }; const [basicMetadata] = await table.get(options); const basicMetadataProps = Object.values( - Object.keys(basicMetadata.metadata) + Object.keys(basicMetadata.metadata), ); assert.strictEqual(basicMetadataProps.includes('numBytes'), false); assert.strictEqual(basicMetadata.metadata.numBytes, undefined); assert.strictEqual( basicMetadataProps.includes('lastModifiedTime'), - false + false, ); assert.strictEqual(basicMetadata.metadata.lastModifiedTime, undefined); }); @@ -1013,7 +1013,7 @@ describe('BigQuery', () => { TABLES.map(tableItem => { const tableInstance = tableItem.table; return tableInstance!.create({schema: SCHEMA}); - }) + }), ); const table1Instance = TABLES[0].table; await table1Instance.insert(TABLES[0].data); @@ -1210,7 +1210,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1229,7 +1229,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1248,7 +1248,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1266,7 +1266,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1285,7 +1285,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1299,7 +1299,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 5); done(); - } + }, ); }); @@ -1343,7 +1343,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1357,7 +1357,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1371,7 +1371,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1385,7 +1385,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1399,7 +1399,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1413,7 +1413,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1424,7 +1424,7 @@ describe('BigQuery', () => { params: [ bigquery.range( '[2020-10-01 12:00:00+08, 2020-12-31 12:00:00+08)', - 'TIMESTAMP' + 'TIMESTAMP', ), ], }, @@ -1432,7 +1432,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1452,7 +1452,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); }); @@ -1477,7 +1477,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1498,7 +1498,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1519,7 +1519,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1539,7 +1539,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1560,7 +1560,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1576,7 +1576,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 5); done(); - } + }, ); }); @@ -1622,7 +1622,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1638,7 +1638,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1654,7 +1654,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1670,7 +1670,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1686,7 +1686,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1702,7 +1702,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1713,7 +1713,7 @@ describe('BigQuery', () => { params: { r: bigquery.range( '[2020-10-01 12:00:00+08, 2020-12-31 12:00:00+08)', - 'TIMESTAMP' + 'TIMESTAMP', ), }, }, @@ -1721,7 +1721,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); @@ -1746,7 +1746,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(rows!.length, 1); done(); - } + }, ); }); }); @@ -1784,7 +1784,7 @@ describe('BigQuery', () => { const GEOGRAPHY = bigquery.geography('POINT(1 2)'); const RANGE = bigquery.range( '[2020-10-01 12:00:00+08, 2020-12-31 12:00:00+08)', - 'TIMESTAMP' + 'TIMESTAMP', ); before(() => { @@ -1930,9 +1930,9 @@ describe('BigQuery', () => { }); function generateName(resourceType: string) { - return `${GCLOUD_TESTS_PREFIX}_${resourceType}_${uuid.v1()}`.replace( + return `${GCLOUD_TESTS_PREFIX}_${resourceType}_${randomUUID()}`.replace( /-/g, - '_' + '_', ); } diff --git a/system-test/install.ts b/system-test/install.ts index 454fe9e25..f2b8454d2 100644 --- a/system-test/install.ts +++ b/system-test/install.ts @@ -24,7 +24,7 @@ describe('📦 pack-n-play test', () => { sample: { description: 'TypeScript user can use the type definitions', ts: readFileSync( - './system-test/fixtures/sample/src/index.ts' + './system-test/fixtures/sample/src/index.ts', ).toString(), dependencies: ['@types/node'], }, diff --git a/test/bigquery.ts b/test/bigquery.ts index e1539897a..d63cdbb4b 100644 --- a/test/bigquery.ts +++ b/test/bigquery.ts @@ -20,15 +20,15 @@ import { util, } from '@google-cloud/common'; import * as pfy from '@google-cloud/promisify'; -import arrify = require('arrify'); import * as assert from 'assert'; import {describe, it, after, afterEach, before, beforeEach} from 'mocha'; import * as Big from 'big.js'; import * as extend from 'extend'; import * as proxyquire from 'proxyquire'; import * as sinon from 'sinon'; -import * as uuid from 'uuid'; +import * as crypto from 'crypto'; +import {toArray} from '../src/util'; import { BigQueryInt, BigQueryDate, @@ -46,7 +46,7 @@ import { import {SinonStub} from 'sinon'; import {PreciseDate} from '@google-cloud/precise-date'; -const fakeUuid = extend(true, {}, uuid); +const fakeCrypto = extend(true, {}, crypto); class FakeApiError { calledWith_: Array<{}>; @@ -125,7 +125,7 @@ const fakePaginator = { if (c.name !== 'BigQuery') { return; } - methods = arrify(methods); + methods = toArray(methods); assert.strictEqual(c.name, 'BigQuery'); assert.deepStrictEqual(methods, ['getDatasets', 'getJobs']); extended = true; @@ -166,7 +166,7 @@ describe('BigQuery', () => { before(() => { delete process.env.BIGQUERY_EMULATOR_HOST; BigQuery = proxyquire('../src/bigquery', { - uuid: fakeUuid, + crypto: fakeCrypto, './dataset': { Dataset: FakeDataset, }, @@ -227,7 +227,7 @@ describe('BigQuery', () => { assert.deepStrictEqual( calledWith.packageJson, // eslint-disable-next-line @typescript-eslint/no-var-requires - require('../../package.json') + require('../../package.json'), ); }); @@ -249,11 +249,11 @@ describe('BigQuery', () => { const calledWith = bq.calledWith_[0]; assert.strictEqual( calledWith.baseUrl, - `https://${protocollessApiEndpoint}/bigquery/v2` + `https://${protocollessApiEndpoint}/bigquery/v2`, ); assert.strictEqual( calledWith.apiEndpoint, - `https://${protocollessApiEndpoint}` + `https://${protocollessApiEndpoint}`, ); }); @@ -275,11 +275,11 @@ describe('BigQuery', () => { const calledWith = bq.calledWith_[0]; assert.strictEqual( calledWith.baseUrl, - 'https://bigquery.fake-tpc-env.example.com/bigquery/v2' + 'https://bigquery.fake-tpc-env.example.com/bigquery/v2', ); assert.strictEqual( calledWith.apiEndpoint, - 'https://bigquery.fake-tpc-env.example.com' + 'https://bigquery.fake-tpc-env.example.com', ); }); @@ -372,7 +372,7 @@ describe('BigQuery', () => { assert.strictEqual(calledWith.baseUrl, EMULATOR_HOST); assert.strictEqual( calledWith.apiEndpoint, - 'https://internal.benchmark.com/path' + 'https://internal.benchmark.com/path', ); }); @@ -399,7 +399,7 @@ describe('BigQuery', () => { assert.strictEqual(calledWith.baseUrl, EMULATOR_HOST); assert.strictEqual( calledWith.apiEndpoint, - 'https://internal.benchmark.com/path' + 'https://internal.benchmark.com/path', ); }); }); @@ -709,7 +709,7 @@ describe('BigQuery', () => { const mergedRows = BigQuery.mergeSchemaWithRows_( SCHEMA_OBJECT, rows.raw, - {} + {}, ); mergedRows.forEach((mergedRow: {}, i: number) => { assert.deepStrictEqual(mergedRow, rows.expectedParsed[i]); @@ -1003,7 +1003,7 @@ describe('BigQuery', () => { assert.strictEqual(dateRange.apiValue, '[2020-01-01, 2020-12-31)'); assert.strictEqual( dateRange.literalValue, - 'RANGE [2020-01-01, 2020-12-31)' + 'RANGE [2020-01-01, 2020-12-31)', ); assert.deepStrictEqual(dateRange.value, { start: '2020-01-01', @@ -1013,11 +1013,11 @@ describe('BigQuery', () => { const datetimeRange = bq.range(INPUT_DATETIME_RANGE, 'DATETIME'); assert.strictEqual( datetimeRange.apiValue, - '[2020-01-01 12:00:00, 2020-12-31 12:00:00)' + '[2020-01-01 12:00:00, 2020-12-31 12:00:00)', ); assert.strictEqual( datetimeRange.literalValue, - 'RANGE [2020-01-01 12:00:00, 2020-12-31 12:00:00)' + 'RANGE [2020-01-01 12:00:00, 2020-12-31 12:00:00)', ); assert.deepStrictEqual(datetimeRange.value, { start: '2020-01-01 12:00:00', @@ -1027,11 +1027,11 @@ describe('BigQuery', () => { const timestampRange = bq.range(INPUT_TIMESTAMP_RANGE, 'TIMESTAMP'); assert.strictEqual( timestampRange.apiValue, - '[2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)' + '[2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)', ); assert.strictEqual( timestampRange.literalValue, - 'RANGE [2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)' + 'RANGE [2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)', ); assert.deepStrictEqual(timestampRange.value, { start: '2020-10-01T04:00:00.000Z', @@ -1047,7 +1047,7 @@ describe('BigQuery', () => { assert.strictEqual(dateRange.apiValue, INPUT_DATE_RANGE); assert.strictEqual( dateRange.literalValue, - `RANGE ${INPUT_DATE_RANGE}` + `RANGE ${INPUT_DATE_RANGE}`, ); assert.strictEqual(dateRange.elementType, 'DATE'); assert.deepStrictEqual(dateRange.value, { @@ -1062,7 +1062,7 @@ describe('BigQuery', () => { assert.strictEqual(datetimeRange.apiValue, INPUT_DATETIME_RANGE); assert.strictEqual( datetimeRange.literalValue, - `RANGE ${INPUT_DATETIME_RANGE}` + `RANGE ${INPUT_DATETIME_RANGE}`, ); assert.strictEqual(datetimeRange.elementType, 'DATETIME'); assert.deepStrictEqual(datetimeRange.value, { @@ -1076,11 +1076,11 @@ describe('BigQuery', () => { }); assert.strictEqual( timestampRange.apiValue, - '[2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)' + '[2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)', ); assert.strictEqual( timestampRange.literalValue, - 'RANGE [2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)' + 'RANGE [2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)', ); assert.strictEqual(timestampRange.elementType, 'TIMESTAMP'); assert.deepStrictEqual(timestampRange.value, { @@ -1095,12 +1095,12 @@ describe('BigQuery', () => { start: '2020-01-01', end: '2020-12-31', }, - 'DATE' + 'DATE', ); assert.strictEqual(dateRange.apiValue, INPUT_DATE_RANGE); assert.strictEqual( dateRange.literalValue, - `RANGE ${INPUT_DATE_RANGE}` + `RANGE ${INPUT_DATE_RANGE}`, ); assert.strictEqual(dateRange.elementType, 'DATE'); @@ -1109,12 +1109,12 @@ describe('BigQuery', () => { start: '2020-01-01 12:00:00', end: '2020-12-31 12:00:00', }, - 'DATETIME' + 'DATETIME', ); assert.strictEqual(datetimeRange.apiValue, INPUT_DATETIME_RANGE); assert.strictEqual( datetimeRange.literalValue, - `RANGE ${INPUT_DATETIME_RANGE}` + `RANGE ${INPUT_DATETIME_RANGE}`, ); assert.strictEqual(datetimeRange.elementType, 'DATETIME'); @@ -1123,15 +1123,15 @@ describe('BigQuery', () => { start: '2020-10-01 12:00:00+08', end: '2020-12-31 12:00:00+08', }, - 'TIMESTAMP' + 'TIMESTAMP', ); assert.strictEqual( timestampRange.apiValue, - '[2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)' + '[2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)', ); assert.strictEqual( timestampRange.literalValue, - 'RANGE [2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)' + 'RANGE [2020-10-01T04:00:00.000Z, 2020-12-31T04:00:00.000Z)', ); assert.strictEqual(timestampRange.elementType, 'TIMESTAMP'); }); @@ -1141,28 +1141,28 @@ describe('BigQuery', () => { { start: '2020-01-01', }, - 'DATE' + 'DATE', ); assert.strictEqual( dateRange.literalValue, - 'RANGE [2020-01-01, UNBOUNDED)' + 'RANGE [2020-01-01, UNBOUNDED)', ); const datetimeRange = bq.range( { end: '2020-12-31 12:00:00', }, - 'DATETIME' + 'DATETIME', ); assert.strictEqual( datetimeRange.literalValue, - 'RANGE [UNBOUNDED, 2020-12-31 12:00:00)' + 'RANGE [UNBOUNDED, 2020-12-31 12:00:00)', ); const timestampRange = bq.range({}, 'TIMESTAMP'); assert.strictEqual( timestampRange.literalValue, - 'RANGE [UNBOUNDED, UNBOUNDED)' + 'RANGE [UNBOUNDED, UNBOUNDED)', ); }); }); @@ -1242,7 +1242,7 @@ describe('BigQuery', () => { '{\n' + ' integerTypeCastFunction: provide \n' + ' fields: optionally specify field name(s) to be custom casted\n' + - '}\n' + '}\n', ); }; @@ -1251,9 +1251,9 @@ describe('BigQuery', () => { () => new BigQueryInt( valueObject, - {} as IntegerTypeCastOptions + {} as IntegerTypeCastOptions, ).valueOf(), - /integerTypeCastFunction is not a function or was not provided\./ + /integerTypeCastFunction is not a function or was not provided\./, ); }); @@ -1289,7 +1289,7 @@ describe('BigQuery', () => { () => { new BigQueryInt(largeIntegerValue).valueOf(); }, - expectedError({integerValue: largeIntegerValue}) + expectedError({integerValue: largeIntegerValue}), ); // should throw when string is passed @@ -1297,7 +1297,7 @@ describe('BigQuery', () => { () => { new BigQueryInt(smallIntegerValue.toString()).valueOf(); }, - expectedError({integerValue: smallIntegerValue}) + expectedError({integerValue: smallIntegerValue}), ); }); @@ -1312,7 +1312,7 @@ describe('BigQuery', () => { () => { new BigQueryInt(valueObject); }, - new RegExp(`Integer value ${largeIntegerValue} is out of bounds.`) + new RegExp(`Integer value ${largeIntegerValue} is out of bounds.`), ); }); @@ -1323,7 +1323,7 @@ describe('BigQuery', () => { new BigQueryInt(valueObject, { integerTypeCastFunction: {} as Function, }).valueOf(), - /integerTypeCastFunction is not a function or was not provided\./ + /integerTypeCastFunction is not a function or was not provided\./, ); }); @@ -1372,7 +1372,7 @@ describe('BigQuery', () => { new BigQueryInt(valueObject, { integerTypeCastFunction: stub, }).valueOf(), - /integerTypeCastFunction threw an error:/ + /integerTypeCastFunction threw an error:/, ); }); }); @@ -1392,63 +1392,63 @@ describe('BigQuery', () => { it('should return correct types', () => { assert.strictEqual( BigQuery.getTypeDescriptorFromValue_(bq.date()).type, - 'DATE' + 'DATE', ); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_(bq.datetime('')).type, - 'DATETIME' + 'DATETIME', ); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_(bq.time()).type, - 'TIME' + 'TIME', ); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_(bq.timestamp(0)).type, - 'TIMESTAMP' + 'TIMESTAMP', ); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_(Buffer.alloc(2)).type, - 'BYTES' + 'BYTES', ); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_(true).type, - 'BOOL' + 'BOOL', ); assert.strictEqual(BigQuery.getTypeDescriptorFromValue_(8).type, 'INT64'); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_(8.1).type, - 'FLOAT64' + 'FLOAT64', ); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_('hi').type, - 'STRING' + 'STRING', ); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_(new Big('1.1')).type, - 'NUMERIC' + 'NUMERIC', ); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_( - new Big('1999.9876543210123456789') + new Big('1999.9876543210123456789'), ).type, - 'BIGNUMERIC' + 'BIGNUMERIC', ); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_(bq.int('100')).type, - 'INT64' + 'INT64', ); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_(bq.geography('POINT (1 1')).type, - 'GEOGRAPHY' + 'GEOGRAPHY', ); assert.strictEqual( BigQuery.getTypeDescriptorFromValue_( bq.range( '[2020-10-01 12:00:00+08, 2020-12-31 12:00:00+08)', - 'TIMESTAMP' - ) + 'TIMESTAMP', + ), ).type, - 'RANGE' + 'RANGE', ); }); @@ -1484,7 +1484,7 @@ describe('BigQuery', () => { [ 'This value could not be translated to a BigQuery data type.', undefined, - ].join('\n') + ].join('\n'), ); assert.throws(() => { @@ -1500,7 +1500,7 @@ describe('BigQuery', () => { it('should throw with a null value', () => { const expectedError = new RegExp( - "Parameter types must be provided for null values via the 'types' field in query options." + "Parameter types must be provided for null values via the 'types' field in query options.", ); assert.throws(() => { @@ -1580,7 +1580,7 @@ describe('BigQuery', () => { const queryParameter = BigQuery.valueToQueryParameter_( value, - providedType + providedType, ); assert.strictEqual(queryParameter.parameterValue.value, value); @@ -1718,7 +1718,7 @@ describe('BigQuery', () => { const queryParameter = BigQuery.valueToQueryParameter_( array, - providedType + providedType, ); const arrayValues = queryParameter.parameterValue.arrayValues; assert.deepStrictEqual(arrayValues, [ @@ -1764,7 +1764,7 @@ describe('BigQuery', () => { const getTypeStub = sandbox.stub( BigQuery, - 'getTypeDescriptorFromProvidedType_' + 'getTypeDescriptorFromProvidedType_', ); getTypeStub.onFirstCall().returns({ type: 'STRUCT', @@ -1781,7 +1781,7 @@ describe('BigQuery', () => { const queryParameter = BigQuery.valueToQueryParameter_( struct, - providedType + providedType, ); const structValues = queryParameter.parameterValue.structValues; assert.deepStrictEqual(structValues, { @@ -1839,7 +1839,7 @@ describe('BigQuery', () => { parameterValue: { value: strValue, }, - } + }, ); }); @@ -1873,7 +1873,7 @@ describe('BigQuery', () => { sandbox.stub(BigQuery, '_isCustomType').returns(true); assert.strictEqual( BigQuery._getValue(geography, geography.type), - geography.value + geography.value, ); }); @@ -1923,7 +1923,7 @@ describe('BigQuery', () => { assert.strictEqual(reqOpts.projectId, ANOTHER_PROJECT_ID); assert.strictEqual( reqOpts.uri, - `https://bigquery.googleapis.com/bigquery/v2/projects/${ANOTHER_PROJECT_ID}/datasets` + `https://bigquery.googleapis.com/bigquery/v2/projects/${ANOTHER_PROJECT_ID}/datasets`, ); assert.deepStrictEqual(reqOpts.json.datasetReference, { datasetId: DATASET_ID, @@ -1937,7 +1937,7 @@ describe('BigQuery', () => { { projectId: ANOTHER_PROJECT_ID, }, - assert.ifError + assert.ifError, ); }); @@ -2010,7 +2010,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.deepStrictEqual(apiResponse, resp); done(); - } + }, ); }); @@ -2042,11 +2042,10 @@ describe('BigQuery', () => { let fakeJobId: string; beforeEach(() => { - fakeJobId = uuid.v4(); + fakeJobId = crypto.randomUUID(); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (fakeUuid as any).v4 = () => { - return fakeJobId; + fakeCrypto.randomUUID = _ => { + return fakeJobId as crypto.UUID; }; }); @@ -2303,7 +2302,7 @@ describe('BigQuery', () => { datasetId: dataset.id, projectId: dataset.projectId, tableId: TABLE_ID, - } + }, ); done(); @@ -2352,7 +2351,7 @@ describe('BigQuery', () => { query: QUERY_STRING, params: NAMED_PARAMS, }, - assert.ifError + assert.ifError, ); }); @@ -2362,7 +2361,7 @@ describe('BigQuery', () => { assert.strictEqual((reqOpts as any).params, undefined); assert.deepStrictEqual( reqOpts.configuration?.query?.queryParameters, - NAMED_PARAMS + NAMED_PARAMS, ); done(); }; @@ -2372,7 +2371,7 @@ describe('BigQuery', () => { query: QUERY_STRING, queryParameters: NAMED_PARAMS, }, - assert.ifError + assert.ifError, ); }); @@ -2389,7 +2388,7 @@ describe('BigQuery', () => { query: QUERY_STRING, params: NAMED_PARAMS, }, - assert.ifError + assert.ifError, ); }); @@ -2413,7 +2412,7 @@ describe('BigQuery', () => { query: QUERY_STRING, params: NAMED_PARAMS, }, - assert.ifError + assert.ifError, ); }); @@ -2427,7 +2426,7 @@ describe('BigQuery', () => { assert.strictEqual(value, NAMED_PARAMS.key); assert.strictEqual(providedType, NAMED_TYPES.key); return queryParameter; - } + }, ); bq.createJob = (reqOpts: JobOptions) => { // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -2440,7 +2439,7 @@ describe('BigQuery', () => { params: NAMED_PARAMS, types: NAMED_TYPES, }, - assert.ifError + assert.ifError, ); }); @@ -2463,7 +2462,7 @@ describe('BigQuery', () => { params: NAMED_PARAMS, types: {}, }, - assert.ifError + assert.ifError, ); }); @@ -2497,7 +2496,7 @@ describe('BigQuery', () => { query: QUERY_STRING, params: POSITIONAL_PARAMS, }, - assert.ifError + assert.ifError, ); }); @@ -2520,7 +2519,7 @@ describe('BigQuery', () => { query: QUERY_STRING, params: POSITIONAL_PARAMS, }, - assert.ifError + assert.ifError, ); }); @@ -2561,7 +2560,7 @@ describe('BigQuery', () => { params: POSITIONAL_PARAMS, types: POSITIONAL_TYPES, }, - assert.ifError + assert.ifError, ); }); @@ -2598,7 +2597,7 @@ describe('BigQuery', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.query as any).dryRun, - undefined + undefined, ); assert.strictEqual(reqOpts.configuration!.dryRun, options.dryRun); done(); @@ -2617,7 +2616,7 @@ describe('BigQuery', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.query as any).labels, - undefined + undefined, ); assert.deepStrictEqual(reqOpts.configuration!.labels, options.labels); done(); @@ -2636,7 +2635,7 @@ describe('BigQuery', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.query as any).jobPrefix, - undefined + undefined, ); assert.strictEqual(reqOpts.jobPrefix, options.jobPrefix); done(); @@ -2655,7 +2654,7 @@ describe('BigQuery', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.query as any).location, - undefined + undefined, ); assert.strictEqual(reqOpts.location, LOCATION); done(); @@ -2674,7 +2673,7 @@ describe('BigQuery', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.query as any).jobId, - undefined + undefined, ); assert.strictEqual(reqOpts.jobId, options.jobId); done(); @@ -2693,11 +2692,11 @@ describe('BigQuery', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.query as any).jobTimeoutMs, - undefined + undefined, ); assert.strictEqual( reqOpts.configuration!.jobTimeoutMs, - `${options.jobTimeoutMs}` + `${options.jobTimeoutMs}`, ); done(); }; @@ -2844,7 +2843,7 @@ describe('BigQuery', () => { assert.ifError(err); assert.strictEqual(apiResponse, resp); done(); - } + }, ); }); @@ -2891,7 +2890,7 @@ describe('BigQuery', () => { bq.makeAuthenticatedRequest = (reqOpts: DecorateRequestOptions) => { assert.strictEqual( reqOpts.uri, - `https://bigquery.googleapis.com/bigquery/v2/projects/${ANOTHER_PROJECT_ID}/datasets` + `https://bigquery.googleapis.com/bigquery/v2/projects/${ANOTHER_PROJECT_ID}/datasets`, ); done(); }; @@ -3124,7 +3123,7 @@ describe('BigQuery', () => { bq.runJobsQuery = (query: {}, callback: Function) => { callback(null, fakeJob, { - queryId: uuid.v1(), + queryId: crypto.randomUUID(), jobComplete: false, }); }; @@ -3135,12 +3134,12 @@ describe('BigQuery', () => { (err: Error, rows: {}, resp: {}) => { assert.strictEqual( err.message, - 'The query did not complete before 1000ms' + 'The query did not complete before 1000ms', ); assert.strictEqual(rows, null); assert.strictEqual(resp, fakeJob); done(); - } + }, ); }); @@ -3459,7 +3458,7 @@ describe('BigQuery', () => { const query = 'SELECT'; bq.queryAsStream_(query, done); assert( - queryStub.calledOnceWithExactly(query, defaultOpts, sinon.match.func) + queryStub.calledOnceWithExactly(query, defaultOpts, sinon.match.func), ); }); @@ -3530,18 +3529,18 @@ describe('BigQuery', () => { it('should default protocol to https', () => { const endpoint = BigQuery.sanitizeEndpoint( - USER_DEFINED_SHORT_API_ENDPOINT + USER_DEFINED_SHORT_API_ENDPOINT, ); assert.strictEqual(endpoint.match(PROTOCOL_REGEX)![1], 'https'); }); it('should not override protocol', () => { const endpoint = BigQuery.sanitizeEndpoint( - USER_DEFINED_FULL_API_ENDPOINT + USER_DEFINED_FULL_API_ENDPOINT, ); assert.strictEqual( endpoint.match(PROTOCOL_REGEX)![1], - USER_DEFINED_PROTOCOL + USER_DEFINED_PROTOCOL, ); }); diff --git a/test/dataset.ts b/test/dataset.ts index 5d77e33d9..321ff85b7 100644 --- a/test/dataset.ts +++ b/test/dataset.ts @@ -19,13 +19,13 @@ import { util, } from '@google-cloud/common'; import * as pfy from '@google-cloud/promisify'; -import arrify = require('arrify'); import * as assert from 'assert'; import {describe, it, before, beforeEach} from 'mocha'; import * as extend from 'extend'; import * as proxyquire from 'proxyquire'; import * as _root from '../src'; +import {toArray} from '../src/util'; import {DatasetOptions} from '../src/dataset'; import {FormattedMetadata, TableOptions} from '../src/table'; @@ -56,7 +56,7 @@ const fakePaginator = { if (c.name !== 'Dataset') { return; } - methods = arrify(methods); + methods = toArray(methods); assert.strictEqual(c.name, 'Dataset'); assert.deepStrictEqual(methods, [ 'getModels', @@ -203,7 +203,7 @@ describe('BigQuery/Dataset', () => { bq.createDataset = ( id: string, options: DatasetOptions, - callback: Function + callback: Function, ) => { assert.strictEqual(options.location, LOCATION); callback(); // the done fn @@ -217,7 +217,7 @@ describe('BigQuery/Dataset', () => { bq.createDataset = ( id: string, options: DatasetOptions, - callback: Function + callback: Function, ) => { assert.strictEqual(options.projectId, 'project-id'); callback(); // the done fn @@ -323,7 +323,7 @@ describe('BigQuery/Dataset', () => { defaultDataset: { datasetId: ds.id, }, - } + }, ); ds.bigQuery.createQueryJob = (options: {}, callback: Function) => { @@ -339,7 +339,7 @@ describe('BigQuery/Dataset', () => { it('should accept a query string', done => { ds.bigQuery.createQueryJob = ( options: _root.Query, - callback: Function + callback: Function, ) => { assert.strictEqual(options.query, FAKE_QUERY); callback(); // the done fn @@ -531,7 +531,7 @@ describe('BigQuery/Dataset', () => { ds.request = (reqOpts: DecorateRequestOptions) => { assert.deepStrictEqual( reqOpts.json.schema.fields, - SCHEMA_OBJECT.fields + SCHEMA_OBJECT.fields, ); done(); }; @@ -541,7 +541,7 @@ describe('BigQuery/Dataset', () => { { schema: SCHEMA_OBJECT.fields, }, - assert.ifError + assert.ifError, ); }); @@ -563,7 +563,7 @@ describe('BigQuery/Dataset', () => { fields: [{id: 'name', type: 'STRING'}, nestedField], }, }, - assert.ifError + assert.ifError, ); }); @@ -592,7 +592,7 @@ describe('BigQuery/Dataset', () => { assert.ifError(err); assert(table instanceof Table); done(); - } + }, ); }); @@ -642,7 +642,7 @@ describe('BigQuery/Dataset', () => { assert.ifError(err); assert.strictEqual(apiResponse, API_RESPONSE); done(); - } + }, ); }); @@ -652,7 +652,7 @@ describe('BigQuery/Dataset', () => { a: 'b', c: 'd', }, - API_RESPONSE + API_RESPONSE, ); ds.request = (reqOpts: DecorateRequestOptions, callback: Function) => { @@ -666,7 +666,7 @@ describe('BigQuery/Dataset', () => { assert.ifError(err); assert.strictEqual(table.metadata, apiResponse); done(); - } + }, ); }); }); @@ -798,7 +798,7 @@ describe('BigQuery/Dataset', () => { err: Error, models: _root.Model[], nextQuery: {}, - apiResponse_: {} + apiResponse_: {}, ) => { assert.ifError(err); @@ -808,7 +808,7 @@ describe('BigQuery/Dataset', () => { assert.strictEqual(model.id, modelId); assert.strictEqual(apiResponse_, apiResponse); done(); - } + }, ); }); @@ -842,7 +842,7 @@ describe('BigQuery/Dataset', () => { assert.ifError(err); assert.deepStrictEqual(nextQuery, expectedNextQuery); done(); - } + }, ); }); }); @@ -920,7 +920,7 @@ describe('BigQuery/Dataset', () => { err: Error, tables: _root.Table[], nextQuery: {}, - apiResponse_: {} + apiResponse_: {}, ) => { assert.ifError(err); @@ -931,7 +931,7 @@ describe('BigQuery/Dataset', () => { assert.strictEqual(table.location, LOCATION); assert.strictEqual(apiResponse_, apiResponse); done(); - } + }, ); }); @@ -965,7 +965,7 @@ describe('BigQuery/Dataset', () => { assert.ifError(err); assert.deepStrictEqual(nextQuery, expectedNextQuery); done(); - } + }, ); }); }); diff --git a/test/job.ts b/test/job.ts index cb777b62b..ced932d4b 100644 --- a/test/job.ts +++ b/test/job.ts @@ -14,13 +14,13 @@ import {DecorateRequestOptions, util} from '@google-cloud/common'; import * as pfy from '@google-cloud/promisify'; -import arrify = require('arrify'); import * as assert from 'assert'; import {describe, it, beforeEach, afterEach, before} from 'mocha'; import * as proxyquire from 'proxyquire'; import * as sinon from 'sinon'; import {BigQuery} from '../src/bigquery'; +import {toArray} from '../src/util'; import {QueryResultsOptions} from '../src/job'; class FakeOperation { @@ -61,7 +61,7 @@ const fakePaginator = { return; } - methods = arrify(methods); + methods = toArray(methods); assert.deepStrictEqual(methods, ['getQueryResults']); extended = true; }, @@ -217,7 +217,7 @@ describe('BigQuery/Job', () => { beforeEach(() => { BIGQUERY.request = ( reqOpts: DecorateRequestOptions, - callback: Function + callback: Function, ) => { callback(null, RESPONSE); }; @@ -240,7 +240,7 @@ describe('BigQuery/Job', () => { const options = {a: 'b'}; const expectedOptions = Object.assign( {location: undefined, 'formatOptions.useInt64Timestamp': true}, - options + options, ); BIGQUERY.request = (reqOpts: DecorateRequestOptions) => { @@ -287,7 +287,7 @@ describe('BigQuery/Job', () => { BIGQUERY.request = ( reqOpts: DecorateRequestOptions, - callback: Function + callback: Function, ) => { callback(error, response); }; @@ -320,7 +320,7 @@ describe('BigQuery/Job', () => { BIGQUERY.request = ( reqOpts: DecorateRequestOptions, - callback: Function + callback: Function, ) => { callback(null, response); }; @@ -406,7 +406,7 @@ describe('BigQuery/Job', () => { it('should return the query when the job is not complete', done => { BIGQUERY.request = ( reqOpts: DecorateRequestOptions, - callback: Function + callback: Function, ) => { callback(null, { jobComplete: false, @@ -430,7 +430,7 @@ describe('BigQuery/Job', () => { BIGQUERY.request = ( reqOpts: DecorateRequestOptions, - callback: Function + callback: Function, ) => { callback(null, response); }; @@ -443,7 +443,7 @@ describe('BigQuery/Job', () => { assert.deepStrictEqual(nextQuery, options); assert.strictEqual(resp, response); done(); - } + }, ); }); @@ -454,7 +454,7 @@ describe('BigQuery/Job', () => { assert.ifError(err); assert.strictEqual(nextQuery.pageToken, pageToken); done(); - } + }, ); }); }); @@ -471,7 +471,7 @@ describe('BigQuery/Job', () => { job.getQueryResults = ( options_: QueryResultsOptions, - callback: Function + callback: Function, ) => { assert.deepStrictEqual(options_, { a: 'b', diff --git a/test/model.ts b/test/model.ts index f9dabeb86..5408e11d5 100644 --- a/test/model.ts +++ b/test/model.ts @@ -166,7 +166,7 @@ describe('BigQuery/Model', () => { model.createExtractJob( URI, {format: 'ml_tf_saved_model'}, - assert.ifError + assert.ifError, ); }); @@ -180,7 +180,7 @@ describe('BigQuery/Model', () => { model.createExtractJob( URI, {format: 'ml_xgboost_booster'}, - assert.ifError + assert.ifError, ); }); @@ -192,7 +192,7 @@ describe('BigQuery/Model', () => { model.bigQuery.createJob = (reqOpts: JobOptions) => { assert.deepStrictEqual( reqOpts.configuration!.extract!.destinationUris, - ['gs://' + FILE.bucket.name + '/' + FILE.name] + ['gs://' + FILE.bucket.name + '/' + FILE.name], ); done(); }; @@ -230,7 +230,7 @@ describe('BigQuery/Model', () => { model.createExtractJob( URI, {format: 'interpretive_dance'}, - util.noop + util.noop, ); }, /Destination format not recognized/); }); @@ -243,13 +243,13 @@ describe('BigQuery/Model', () => { model.bigQuery.createJob = ( reqOpts: JobOptions, - callback: Function + callback: Function, ) => { assert.strictEqual(reqOpts.jobPrefix, fakeJobPrefix); assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.extract as any).jobPrefix, - undefined + undefined, ); callback(); // the done fn }; @@ -263,13 +263,13 @@ describe('BigQuery/Model', () => { model.bigQuery.createJob = ( reqOpts: JobOptions, - callback: Function + callback: Function, ) => { assert.strictEqual(reqOpts.jobId, jobId); assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.extract as any).jobId, - undefined + undefined, ); callback(); // the done fn }; @@ -280,7 +280,7 @@ describe('BigQuery/Model', () => { it('should pass the callback to createJob', done => { model.bigQuery.createJob = ( reqOpts: JobOptions, - callback: Function + callback: Function, ) => { assert.strictEqual(done, callback); callback(); // the done fn @@ -292,7 +292,7 @@ describe('BigQuery/Model', () => { it('should optionally accept options', done => { model.bigQuery.createJob = ( reqOpts: JobOptions, - callback: Function + callback: Function, ) => { assert.strictEqual(done, callback); callback(); // the done fn @@ -312,7 +312,7 @@ describe('BigQuery/Model', () => { model.createExtractJob = ( destination: {}, metadata: {}, - callback: Function + callback: Function, ) => { callback(null, fakeJob); }; @@ -347,7 +347,7 @@ describe('BigQuery/Model', () => { model.createExtractJob = ( destination: {}, metadata: {}, - callback: Function + callback: Function, ) => { callback(error, null, response); }; diff --git a/test/routine.ts b/test/routine.ts index 094e21fee..d1cdb0966 100644 --- a/test/routine.ts +++ b/test/routine.ts @@ -134,7 +134,7 @@ describe('BigQuery/Routine', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (FakeServiceObject.prototype as any).setMetadata = function ( metadata: {}, - callback: Function + callback: Function, ) { assert.strictEqual(this, routine); assert.deepStrictEqual(metadata, expectedMetadata); diff --git a/test/table.ts b/test/table.ts index 5394f006d..710d4cf1a 100644 --- a/test/table.ts +++ b/test/table.ts @@ -21,7 +21,6 @@ import { import {GoogleErrorBody} from '@google-cloud/common/build/src/util'; import * as pfy from '@google-cloud/promisify'; import {File} from '@google-cloud/storage'; -import arrify = require('arrify'); import * as assert from 'assert'; import {describe, it, afterEach, beforeEach, before, after} from 'mocha'; import * as Big from 'big.js'; @@ -30,9 +29,10 @@ import * as extend from 'extend'; import * as proxyquire from 'proxyquire'; import * as sinon from 'sinon'; import * as stream from 'stream'; -import * as uuid from 'uuid'; +import * as crypto from 'crypto'; import {BigQuery, Query} from '../src/bigquery'; +import {toArray} from '../src/util'; import {Job, JobOptions} from '../src/job'; import { CopyTableMetadata, @@ -100,7 +100,7 @@ const fakePaginator = { return; } - methods = arrify(methods); + methods = toArray(methods); assert.strictEqual(c.name, 'Table'); assert.deepStrictEqual(methods, ['getRows']); extended = true; @@ -111,7 +111,7 @@ const fakePaginator = { }, }; -let fakeUuid = extend(true, {}, uuid); +let fakeCrypto = extend(true, {}, crypto); class FakeServiceObject extends ServiceObject { calledWith_: IArguments; @@ -132,7 +132,7 @@ const sandbox = sinon.createSandbox(); describe('BigQuery/Table', () => { before(() => { Table = proxyquire('../src/table.js', { - uuid: fakeUuid, + crypto: fakeCrypto, '@google-cloud/common': { ServiceObject: FakeServiceObject, util: fakeUtil, @@ -159,7 +159,7 @@ describe('BigQuery/Table', () => { }); beforeEach(() => { - fakeUuid = Object.assign(fakeUuid, uuid); + fakeCrypto = Object.assign(fakeCrypto, crypto); isCustomTypeOverride = null; makeWritableStreamOverride = null; tableOverrides = {}; @@ -319,7 +319,7 @@ describe('BigQuery/Table', () => { it('should create a schema object from a string', () => { assert.deepStrictEqual( Table.createSchemaFromString_(SCHEMA_STRING), - SCHEMA_OBJECT + SCHEMA_OBJECT, ); }); @@ -349,7 +349,7 @@ describe('BigQuery/Table', () => { const range = BigQuery.range( '[2020-10-01 12:00:00+08, 2020-12-31 12:00:00+08)', - 'TIMESTAMP' + 'TIMESTAMP', ); assert.deepEqual(Table.encodeValue_(range), { start: '2020-10-01T04:00:00.000Z', @@ -436,11 +436,11 @@ describe('BigQuery/Table', () => { assert.strictEqual(Table.encodeValue_(new Big('-123.456')), '-123.456'); assert.strictEqual( Table.encodeValue_(new Big('99999999999999999999999999999.999999999')), - '99999999999999999999999999999.999999999' + '99999999999999999999999999999.999999999', ); assert.strictEqual( Table.encodeValue_(new Big('-99999999999999999999999999999.999999999')), - '-99999999999999999999999999999.999999999' + '-99999999999999999999999999999.999999999', ); }); @@ -543,7 +543,7 @@ describe('BigQuery/Table', () => { table.createCopyJob = ( destination: {}, metadata: {}, - callback: Function + callback: Function, ) => { callback(null, fakeJob); }; @@ -581,7 +581,7 @@ describe('BigQuery/Table', () => { table.createCopyJob = ( destination: {}, metadata: {}, - callback: Function + callback: Function, ) => { callback(error, null, response); }; @@ -626,7 +626,7 @@ describe('BigQuery/Table', () => { table.createCopyFromJob = ( sourceTables: {}, metadata: {}, - callback: Function + callback: Function, ) => { callback(null, fakeJob); }; @@ -661,7 +661,7 @@ describe('BigQuery/Table', () => { table.createCopyFromJob = ( sourceTables: {}, metadata: {}, - callback: Function + callback: Function, ) => { callback(error, null, response); }; @@ -707,17 +707,17 @@ describe('BigQuery/Table', () => { it('should throw if a destination is not a Table', async () => { await assert.rejects( async () => table.createCopyJob(), - /Destination must be a Table/ + /Destination must be a Table/, ); await assert.rejects( async () => table.createCopyJob({}), - /Destination must be a Table/ + /Destination must be a Table/, ); await assert.rejects( async () => table.createCopyJob(() => {}), - /Destination must be a Table/ + /Destination must be a Table/, ); }); @@ -759,7 +759,7 @@ describe('BigQuery/Table', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.copy as any).jobPrefix, - undefined + undefined, ); callback(); // the done fn }; @@ -786,7 +786,7 @@ describe('BigQuery/Table', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.copy as any).jobId, - undefined + undefined, ); callback(); // the done fn }; @@ -823,22 +823,22 @@ describe('BigQuery/Table', () => { it('should throw if a source is not a Table', async () => { await assert.rejects( async () => table.createCopyFromJob(['table']), - /Source must be a Table/ + /Source must be a Table/, ); await assert.rejects( async () => table.createCopyFromJob([SOURCE_TABLE, 'table']), - /Source must be a Table/ + /Source must be a Table/, ); await assert.rejects( async () => table.createCopyFromJob({}), - /Source must be a Table/ + /Source must be a Table/, ); await assert.rejects( async () => table.createCopyFromJob(() => {}), - /Source must be a Table/ + /Source must be a Table/, ); }); @@ -903,7 +903,7 @@ describe('BigQuery/Table', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.copy as any).jobPrefix, - undefined + undefined, ); callback(); // the done fn }; @@ -930,7 +930,7 @@ describe('BigQuery/Table', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.copy as any).jobId, - undefined + undefined, ); callback(); // the done fn }; @@ -1045,7 +1045,7 @@ describe('BigQuery/Table', () => { const extract = reqOpts.configuration!.extract!; assert.strictEqual( extract.destinationFormat, - 'NEWLINE_DELIMITED_JSON' + 'NEWLINE_DELIMITED_JSON', ); done(); }; @@ -1093,7 +1093,7 @@ describe('BigQuery/Table', () => { table.createExtractJob( FILE, {format: 'export_metadata'}, - assert.ifError + assert.ifError, ); }); }); @@ -1101,7 +1101,7 @@ describe('BigQuery/Table', () => { table.bigQuery.createJob = (reqOpts: JobOptions) => { assert.deepStrictEqual( reqOpts.configuration!.extract!.destinationUris, - ['gs://' + FILE.bucket.name + '/' + FILE.name] + ['gs://' + FILE.bucket.name + '/' + FILE.name], ); done(); }; @@ -1168,7 +1168,7 @@ describe('BigQuery/Table', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.extract as any).gzip, - undefined + undefined, ); done(); }; @@ -1187,7 +1187,7 @@ describe('BigQuery/Table', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.extract as any).jobPrefix, - undefined + undefined, ); callback(); // the done fn }; @@ -1215,7 +1215,7 @@ describe('BigQuery/Table', () => { assert.strictEqual( // eslint-disable-next-line @typescript-eslint/no-explicit-any (reqOpts.configuration!.extract as any).jobId, - undefined + undefined, ); callback(); // the done fn }; @@ -1353,7 +1353,7 @@ describe('BigQuery/Table', () => { }; await assert.rejects( async () => table.createLoadJob({}), - /Source must be a File object/ + /Source must be a File object/, ); }); @@ -1367,7 +1367,7 @@ describe('BigQuery/Table', () => { sourceUris: ['gs://' + FILE.bucket.name + '/' + FILE.name], }, }, - }) + }), ); }); @@ -1381,7 +1381,7 @@ describe('BigQuery/Table', () => { sourceFormat: 'NEWLINE_DELIMITED_JSON', }, }, - }) + }), ); }); @@ -1395,7 +1395,7 @@ describe('BigQuery/Table', () => { sourceFormat: 'AVRO', }, }, - }) + }), ); }); @@ -1421,7 +1421,7 @@ describe('BigQuery/Table', () => { jobPrefix: undefined, }, }, - }) + }), ); }); @@ -1443,7 +1443,7 @@ describe('BigQuery/Table', () => { jobId: undefined, }, }, - }) + }), ); }); @@ -1458,7 +1458,7 @@ describe('BigQuery/Table', () => { sourceFormat: 'CSV', }, }, - }) + }), ); }); @@ -1472,7 +1472,7 @@ describe('BigQuery/Table', () => { sourceFormat: 'NEWLINE_DELIMITED_JSON', }, }, - }) + }), ); }); @@ -1486,7 +1486,7 @@ describe('BigQuery/Table', () => { sourceFormat: 'AVRO', }, }, - }) + }), ); }); @@ -1500,7 +1500,7 @@ describe('BigQuery/Table', () => { sourceFormat: 'DATASTORE_BACKUP', }, }, - }) + }), ); }); }); @@ -1548,7 +1548,7 @@ describe('BigQuery/Table', () => { it('should accept export_metadata', done => { makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { const load = options.metadata.configuration!.load!; assert.strictEqual(load.sourceFormat, 'DATASTORE_BACKUP'); @@ -1561,7 +1561,7 @@ describe('BigQuery/Table', () => { it('should accept csv', done => { makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { const load = options.metadata.configuration!.load!; assert.strictEqual(load.sourceFormat, 'CSV'); @@ -1574,7 +1574,7 @@ describe('BigQuery/Table', () => { it('should accept json', done => { makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { const load = options.metadata.configuration!.load!; assert.strictEqual(load.sourceFormat, 'NEWLINE_DELIMITED_JSON'); @@ -1587,7 +1587,7 @@ describe('BigQuery/Table', () => { it('should accept avro', done => { makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { const load = options.metadata.configuration!.load!; assert.strictEqual(load.sourceFormat, 'AVRO'); @@ -1600,7 +1600,7 @@ describe('BigQuery/Table', () => { it('should accept export_metadata', done => { makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { const load = options.metadata.configuration!.load!; assert.strictEqual(load.sourceFormat, 'DATASTORE_BACKUP'); @@ -1620,7 +1620,7 @@ describe('BigQuery/Table', () => { makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { const load = options.metadata.configuration!.load!; assert.deepStrictEqual(load.schema, expectedSchema); @@ -1640,11 +1640,11 @@ describe('BigQuery/Table', () => { }; makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { assert.deepStrictEqual( options.metadata.configuration?.load?.destinationTable, - expectedMetadata.destinationTable + expectedMetadata.destinationTable, ); done(); }; @@ -1670,8 +1670,10 @@ describe('BigQuery/Table', () => { beforeEach(() => { fakeJob = new EventEmitter(); - fakeJobId = uuid.v4(); - sandbox.stub(fakeUuid, 'v4').returns(fakeJobId); + fakeJobId = crypto.randomUUID(); + sandbox + .stub(fakeCrypto, 'randomUUID') + .returns(fakeJobId as crypto.UUID); }); it('should make a writable stream when written to', done => { @@ -1686,7 +1688,7 @@ describe('BigQuery/Table', () => { it('should pass extended metadata', done => { makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { assert.deepStrictEqual(options.metadata, { configuration: { @@ -1715,7 +1717,7 @@ describe('BigQuery/Table', () => { it('should pass the correct request uri', done => { makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { const uri = table.bigQuery.apiEndpoint + @@ -1735,7 +1737,7 @@ describe('BigQuery/Table', () => { makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { const jobId = options.metadata.jobReference!.jobId; assert.strictEqual(jobId, expectedJobId); @@ -1755,7 +1757,7 @@ describe('BigQuery/Table', () => { makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { const location = options.metadata.jobReference!.location; assert.strictEqual(location, LOCATION); @@ -1772,7 +1774,7 @@ describe('BigQuery/Table', () => { makeWritableStreamOverride = ( stream: stream.Stream, - options: MakeWritableStreamOptions + options: MakeWritableStreamOptions, ) => { const jobReference = options.metadata.jobReference!; assert.strictEqual(jobReference.jobId, jobId); @@ -1810,7 +1812,7 @@ describe('BigQuery/Table', () => { makeWritableStreamOverride = ( stream: {}, options: {}, - callback: Function + callback: Function, ) => { callback(metadata); }; @@ -1906,7 +1908,7 @@ describe('BigQuery/Table', () => { table.createExtractJob = ( destination: {}, metadata: {}, - callback: Function + callback: Function, ) => { callback(null, fakeJob); }; @@ -1941,7 +1943,7 @@ describe('BigQuery/Table', () => { table.createExtractJob = ( destination: {}, metadata: {}, - callback: Function + callback: Function, ) => { callback(error, null, response); }; @@ -2079,7 +2081,7 @@ describe('BigQuery/Table', () => { err: Error, rows: {}, nextQuery: {}, - apiResponse_: {} + apiResponse_: {}, ) { assert.strictEqual(err, error); assert.strictEqual(rows, null); @@ -2267,7 +2269,7 @@ describe('BigQuery/Table', () => { for (const [i, call] of callSequence.entries()) { table.request = ( reqOpts: DecorateRequestOptions, - callback: Function + callback: Function, ) => { callback(null, {rows: call.rows}); }; @@ -2279,7 +2281,7 @@ describe('BigQuery/Table', () => { if (i === callSequence.length - 1) { done(); } - } + }, ); } }); @@ -2455,7 +2457,9 @@ describe('BigQuery/Table', () => { beforeEach(() => { insertSpy = sinon.spy(table, '_insert'); requestStub = sinon.stub(table, 'request').resolves([{}]); - sandbox.stub(fakeUuid, 'v4').returns(fakeInsertId); + sandbox + .stub(fakeCrypto, 'randomUUID') + .returns(fakeInsertId as crypto.UUID); }); afterEach(() => { @@ -2491,7 +2495,7 @@ describe('BigQuery/Table', () => { it('should throw an error if rows is empty', async () => { await assert.rejects( async () => table.insert([]), - /You must provide at least 1 row to be inserted/ + /You must provide at least 1 row to be inserted/, ); }); @@ -2502,7 +2506,7 @@ describe('BigQuery/Table', () => { method: 'POST', uri: '/insertAll', json: dataApiFormat, - }) + }), ); }); @@ -2520,8 +2524,8 @@ describe('BigQuery/Table', () => { await table.insert([data[0]]); assert( requestStub.calledOnceWith( - sinon.match.hasNested('json.rows[0].insertId', fakeInsertId) - ) + sinon.match.hasNested('json.rows[0].insertId', fakeInsertId), + ), ); }); @@ -2532,8 +2536,8 @@ describe('BigQuery/Table', () => { requestStub.calledWithMatch( ({json}: DecorateRequestOptions) => json.rows[0].insertId === undefined && - json.createInsertId === undefined - ) + json.createInsertId === undefined, + ), ); }); @@ -2607,7 +2611,7 @@ describe('BigQuery/Table', () => { ]); const reflection = await reflectAfterTimer(() => - table.insert(data, OPTIONS) + table.insert(data, OPTIONS), ); assert(reflection.isRejected); assert.strictEqual(insertSpy.callCount, 4); @@ -2629,7 +2633,7 @@ describe('BigQuery/Table', () => { ]); const reflection = await reflectAfterTimer(() => - table.insert(data, {...OPTIONS, partialRetries}) + table.insert(data, {...OPTIONS, partialRetries}), ); assert(reflection.isRejected); assert.strictEqual(insertSpy.callCount, partialRetries + 1); @@ -2650,7 +2654,7 @@ describe('BigQuery/Table', () => { ]); const reflection = await reflectAfterTimer(() => - table.insert(data, {...OPTIONS, partialRetries: 0}) + table.insert(data, {...OPTIONS, partialRetries: 0}), ); assert(reflection.isRejected); assert.strictEqual(insertSpy.callCount, 1); @@ -2671,7 +2675,7 @@ describe('BigQuery/Table', () => { ]); const reflection = await reflectAfterTimer(() => - table.insert(data, {...OPTIONS, partialRetries: -1}) + table.insert(data, {...OPTIONS, partialRetries: -1}), ); assert(reflection.isRejected); assert.strictEqual(insertSpy.callCount, 1); @@ -2714,29 +2718,29 @@ describe('BigQuery/Table', () => { requestStub.onCall(3).resolves(goodResponse); const reflection = await reflectAfterTimer(() => - table.insert(data, OPTIONS) + table.insert(data, OPTIONS), ); assert(reflection.isFulfilled); assert.deepStrictEqual( requestStub.getCall(0).args[0].json, dataApiFormat, - 'first call: try all 5' + 'first call: try all 5', ); assert.deepStrictEqual( requestStub.getCall(1).args[0].json, {rows: dataApiFormat.rows.slice(0, 4)}, - 'second call: previous failures were 4/5' + 'second call: previous failures were 4/5', ); assert.deepStrictEqual( requestStub.getCall(2).args[0].json, {rows: dataApiFormat.rows.slice(0, 3)}, - 'third call: previous failures were 3/5' + 'third call: previous failures were 3/5', ); assert.deepStrictEqual( requestStub.getCall(3).args[0].json, {rows: dataApiFormat.rows.slice(1, 3)}, - 'fourth call: previous failures were 2/5' + 'fourth call: previous failures were 2/5', ); assert(!requestStub.getCall(4), 'fifth call: should not have happened'); assert.ok(reflection.value); @@ -2769,7 +2773,7 @@ describe('BigQuery/Table', () => { assert.strictEqual( reqOpts.json.ignoreUnknownValues, - opts.ignoreUnknownValues + opts.ignoreUnknownValues, ); assert.strictEqual(reqOpts.json.skipInvalidRows, opts.skipInvalidRows); assert.strictEqual(reqOpts.json.templateSuffix, opts.templateSuffix); @@ -2800,13 +2804,13 @@ describe('BigQuery/Table', () => { assert(requestStub.calledOnce); assert.strictEqual( requestStub.firstCall.lastArg.json.schema, - undefined + undefined, ); }); it('should attempt to create table if not created', async () => { const reflection = await reflectAfterTimer(() => - table.insert(data, OPTIONS) + table.insert(data, OPTIONS), ); assert(reflection.isFulfilled); assert(createStub.calledOnce); @@ -2820,7 +2824,7 @@ describe('BigQuery/Table', () => { const firstCheckDelay = 50000; const remainingCheckDelay = expectedDelay - firstCheckDelay; - pReflect(table.insert(data, OPTIONS)); // gracefully handle async errors + void pReflect(table.insert(data, OPTIONS)); // gracefully handle async errors assert(insertCreateSpy.calledOnce); // just called `insert`, that's 1 so far await clock.tickAsync(firstCheckDelay); // first 50s @@ -2835,7 +2839,7 @@ describe('BigQuery/Table', () => { await clock.runAllAsync(); // for good measure assert( insertCreateSpy.calledTwice, - 'should not have called insert again' + 'should not have called insert again', ); }); @@ -2844,7 +2848,7 @@ describe('BigQuery/Table', () => { createStub.rejects(error); const reflection = await reflectAfterTimer(() => - table.insert(data, OPTIONS) + table.insert(data, OPTIONS), ); assert(reflection.isRejected); assert.strictEqual(reflection.reason, error); @@ -2854,7 +2858,7 @@ describe('BigQuery/Table', () => { createStub.rejects({code: 409}); const reflection = await reflectAfterTimer(() => - table.insert(data, OPTIONS) + table.insert(data, OPTIONS), ); assert(reflection.isFulfilled); assert(createStub.calledOnce); @@ -2872,7 +2876,7 @@ describe('BigQuery/Table', () => { requestStub.onThirdCall().resolves(goodResponse); const reflection = await reflectAfterTimer(() => - table.insert(data, OPTIONS) + table.insert(data, OPTIONS), ); assert(reflection.isFulfilled); assert(requestStub.calledThrice); @@ -2881,7 +2885,7 @@ describe('BigQuery/Table', () => { method: 'POST', uri: '/insertAll', json: dataApiFormat, - }) + }), ); assert.deepStrictEqual(reflection.value, goodResponse); }); @@ -2984,7 +2988,7 @@ describe('BigQuery/Table', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any (FakeServiceObject.prototype as any).setMetadata = function ( metadata: {}, - callback: Function + callback: Function, ) { assert.strictEqual(this, table); assert.strictEqual(metadata, formattedMetadata); diff --git a/tsconfig.json b/tsconfig.json index c78f1c884..a53f330d2 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -5,7 +5,7 @@ "outDir": "build", "resolveJsonModule": true, "lib": [ - "es2018", + "es2023", "dom" ] }, @@ -14,6 +14,7 @@ "src/**/*.ts", "test/*.ts", "test/**/*.ts", - "system-test/*.ts" + "system-test/*.ts", + "benchmark/*.ts" ] }