这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions google/cloud/bigquery/_job_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -526,6 +526,8 @@ def do_query():
query_id=query_results.query_id,
project=query_results.project,
num_dml_affected_rows=query_results.num_dml_affected_rows,
query=query,
total_bytes_processed=query_results.total_bytes_processed,
)

if job_retry is not None:
Expand Down
8 changes: 8 additions & 0 deletions google/cloud/bigquery/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -4081,6 +4081,8 @@ def _list_rows_from_query_results(
query_id: Optional[str] = None,
first_page_response: Optional[Dict[str, Any]] = None,
num_dml_affected_rows: Optional[int] = None,
query: Optional[str] = None,
total_bytes_processed: Optional[int] = None,
) -> RowIterator:
"""List the rows of a completed query.
See
Expand Down Expand Up @@ -4128,6 +4130,10 @@ def _list_rows_from_query_results(
num_dml_affected_rows (Optional[int]):
If this RowIterator is the result of a DML query, the number of
rows that were affected.
query (Optional[str]):
The query text used.
total_bytes_processed (Optinal[int]):
total bytes processed from job statistics, if present.

Returns:
google.cloud.bigquery.table.RowIterator:
Expand Down Expand Up @@ -4165,6 +4171,8 @@ def _list_rows_from_query_results(
query_id=query_id,
first_page_response=first_page_response,
num_dml_affected_rows=num_dml_affected_rows,
query=query,
total_bytes_processed=total_bytes_processed,
)
return row_iterator

Expand Down
2 changes: 2 additions & 0 deletions google/cloud/bigquery/job/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -1741,6 +1741,8 @@ def is_job_done():
query_id=self.query_id,
first_page_response=first_page_response,
num_dml_affected_rows=self._query_results.num_dml_affected_rows,
query=self.query,
total_bytes_processed=self.total_bytes_processed,
**list_rows_kwargs,
)
rows._preserve_order = _contains_order_by(self.query)
Expand Down
18 changes: 18 additions & 0 deletions google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -1760,6 +1760,10 @@ class RowIterator(HTTPIterator):
first_page_response (Optional[dict]):
API response for the first page of results. These are returned when
the first page is requested.
query (Optional[str]):
The query text used.
total_bytes_processed (Optinal[int]):
total bytes processed from job statistics, if present.
"""

def __init__(
Expand All @@ -1781,6 +1785,8 @@ def __init__(
query_id: Optional[str] = None,
project: Optional[str] = None,
num_dml_affected_rows: Optional[int] = None,
query: Optional[str] = None,
total_bytes_processed: Optional[int] = None,
):
super(RowIterator, self).__init__(
client,
Expand Down Expand Up @@ -1808,6 +1814,8 @@ def __init__(
self._query_id = query_id
self._project = project
self._num_dml_affected_rows = num_dml_affected_rows
self._query = query
self._total_bytes_processed = total_bytes_processed

@property
def _billing_project(self) -> Optional[str]:
Expand Down Expand Up @@ -1855,6 +1863,16 @@ def query_id(self) -> Optional[str]:
"""
return self._query_id

@property
def query(self) -> Optional[str]:
"""The query text used."""
return self._query

@property
def total_bytes_processed(self) -> Optional[int]:
"""total bytes processed from job statistics, if present."""
return self._total_bytes_processed

def _is_almost_completely_cached(self):
"""Check if all results are completely cached.

Expand Down
3 changes: 3 additions & 0 deletions tests/unit/job/test_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -887,6 +887,7 @@ def test_result_reloads_job_state_until_done(self):
}
job_resource = self._make_resource(started=True, location="EU")
job_resource_done = self._make_resource(started=True, ended=True, location="EU")
job_resource_done["statistics"]["query"]["totalBytesProcessed"] = str(1234)
job_resource_done["configuration"]["query"]["destinationTable"] = {
"projectId": "dest-project",
"datasetId": "dest_dataset",
Expand Down Expand Up @@ -966,6 +967,8 @@ def test_result_reloads_job_state_until_done(self):
# Test that the total_rows property has changed during iteration, based
# on the response from tabledata.list.
self.assertEqual(result.total_rows, 1)
self.assertEqual(result.query, job.query)
self.assertEqual(result.total_bytes_processed, 1234)

query_results_path = f"/projects/{self.PROJECT}/queries/{self.JOB_ID}"
query_results_call = mock.call(
Expand Down
3 changes: 3 additions & 0 deletions tests/unit/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5517,6 +5517,7 @@ def test_query_and_wait_defaults(self):
"totalRows": "1",
"rows": [{"f": [{"v": "5552452"}]}],
"queryId": "job_abcDEF_",
"totalBytesProcessed": 1234,
}
creds = _make_credentials()
http = object()
Expand All @@ -5532,6 +5533,8 @@ def test_query_and_wait_defaults(self):
self.assertIsNone(rows.job_id)
self.assertIsNone(rows.project)
self.assertIsNone(rows.location)
self.assertEqual(rows.query, query)
self.assertEqual(rows.total_bytes_processed, 1234)

# Verify the request we send is to jobs.query.
conn.api_request.assert_called_once()
Expand Down