Skip to content
This repository was archived by the owner on Apr 1, 2026. It is now read-only.

Commit 211370d

Browse files
committed
feat: only show customer facing query
1 parent 902c546 commit 211370d

File tree

4 files changed

+177
-58
lines changed

4 files changed

+177
-58
lines changed

bigframes/session/_io/bigquery/read_gbq_table.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -256,7 +256,6 @@ def check_if_index_columns_are_unique(
256256
index_cols: Sequence[str],
257257
*,
258258
publisher: bigframes.core.events.Publisher,
259-
metrics: Optional[bigframes.session.metrics.ExecutionMetrics] = None,
260259
) -> Tuple[str, ...]:
261260
import bigframes.core.sql
262261
import bigframes.session._io.bigquery
@@ -274,7 +273,7 @@ def check_if_index_columns_are_unique(
274273
timeout=None,
275274
location=None,
276275
project=None,
277-
metrics=metrics,
276+
metrics=None,
278277
query_with_job=False,
279278
publisher=publisher,
280279
)

bigframes/session/loader.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -926,7 +926,6 @@ def read_gbq_table(
926926
table=table,
927927
index_cols=index_cols,
928928
publisher=self._publisher,
929-
metrics=self._metrics,
930929
)
931930
if publish_execution:
932931
self._publisher.publish(

bigframes/session/metrics.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,6 @@ def count_job_stats(
200200
self.jobs.append(metadata)
201201

202202
else:
203-
# Handle other job types (e.g. LoadJob)
204203
self.execution_count += 1
205204
duration = (
206205
(query_job.ended - query_job.created).total_seconds()

tests/unit/session/test_execution_history.py

Lines changed: 176 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121
import bigframes
2222
import bigframes.session
23-
import bigframes.session.metrics as metrics
2423

2524
NOW = datetime.datetime.now(datetime.timezone.utc)
2625

@@ -29,6 +28,7 @@ def test_execution_history_returns_dataframe_with_correct_job_id(monkeypatch):
2928
query_job = unittest.mock.create_autospec(bigquery.QueryJob, instance=True)
3029
query_job.job_id = "job_pandas"
3130
query_job.location = "US"
31+
query_job.project = "test-project"
3232
query_job.created = NOW
3333
query_job.started = NOW
3434
query_job.ended = NOW + datetime.timedelta(seconds=1)
@@ -40,25 +40,43 @@ def test_execution_history_returns_dataframe_with_correct_job_id(monkeypatch):
4040
query_job.cache_hit = False
4141
query_job.query = "SELECT 1"
4242
query_job.configuration.dry_run = False
43+
query_job.statement_type = "SELECT"
4344

44-
# Mock the clients provider to avoid actual BQ client creation
45-
clients_provider = unittest.mock.create_autospec(
46-
bigframes.session.clients.ClientsProvider
47-
)
4845
bq_client = unittest.mock.create_autospec(bigquery.Client)
4946
bq_client.project = "test-project"
5047
bq_client.default_query_job_config = bigquery.QueryJobConfig()
5148
bq_client.query_and_wait.return_value = iter([[NOW]])
49+
bq_client.get_job.return_value = query_job
50+
51+
clients_provider = unittest.mock.create_autospec(
52+
bigframes.session.clients.ClientsProvider
53+
)
5254
clients_provider.bqclient = bq_client
5355

56+
row_iterator = unittest.mock.create_autospec(
57+
bigquery.table.RowIterator, instance=True
58+
)
59+
row_iterator.job_id = "job_pandas"
60+
row_iterator.location = "US"
61+
row_iterator.project = "test-project"
62+
63+
import bigframes.session._io.bigquery as bf_io_bigquery
64+
65+
monkeypatch.setattr(
66+
bf_io_bigquery,
67+
"start_query_with_client",
68+
lambda *args, **kwargs: (row_iterator, None),
69+
)
70+
5471
session = bigframes.session.Session(clients_provider=clients_provider)
72+
session._loader.read_gbq_table = unittest.mock.MagicMock() # type: ignore[method-assign]
5573

56-
# Mock get_global_session to return our session
5774
monkeypatch.setattr(
5875
bigframes.core.global_session, "get_global_session", lambda: session
5976
)
6077

61-
session._metrics.count_job_stats(query_job=query_job)
78+
# Act
79+
session.read_gbq_query("SELECT 1")
6280

6381
df = bigframes.execution_history()
6482

@@ -67,10 +85,11 @@ def test_execution_history_returns_dataframe_with_correct_job_id(monkeypatch):
6785
assert df.iloc[0]["job_id"] == "job_pandas"
6886

6987

70-
def test_execution_history_includes_query_job_metadata():
88+
def test_execution_history_includes_query_job_metadata(monkeypatch):
7189
query_job = unittest.mock.create_autospec(bigquery.QueryJob, instance=True)
7290
query_job.job_id = "job1"
7391
query_job.location = "US"
92+
query_job.project = "test-project"
7493
query_job.created = NOW
7594
query_job.started = NOW
7695
query_job.ended = NOW + datetime.timedelta(seconds=1)
@@ -82,21 +101,39 @@ def test_execution_history_includes_query_job_metadata():
82101
query_job.cache_hit = False
83102
query_job.query = "SELECT 1"
84103
query_job.configuration.dry_run = False
104+
query_job.statement_type = "SELECT"
85105

86-
# Mock the clients provider to avoid actual BQ client creation
87-
clients_provider = unittest.mock.create_autospec(
88-
bigframes.session.clients.ClientsProvider
89-
)
90-
# We need to mock bqclient specifically as it's accessed during Session init
91106
bq_client = unittest.mock.create_autospec(bigquery.Client)
92107
bq_client.project = "test-project"
93108
bq_client.default_query_job_config = bigquery.QueryJobConfig()
94-
# Mock clock sync query
95109
bq_client.query_and_wait.return_value = iter([[NOW]])
110+
bq_client.get_job.return_value = query_job
111+
112+
clients_provider = unittest.mock.create_autospec(
113+
bigframes.session.clients.ClientsProvider
114+
)
96115
clients_provider.bqclient = bq_client
97116

117+
row_iterator = unittest.mock.create_autospec(
118+
bigquery.table.RowIterator, instance=True
119+
)
120+
row_iterator.job_id = "job1"
121+
row_iterator.location = "US"
122+
row_iterator.project = "test-project"
123+
124+
import bigframes.session._io.bigquery as bf_io_bigquery
125+
126+
monkeypatch.setattr(
127+
bf_io_bigquery,
128+
"start_query_with_client",
129+
lambda *args, **kwargs: (row_iterator, None),
130+
)
131+
98132
session = bigframes.session.Session(clients_provider=clients_provider)
99-
session._metrics.count_job_stats(query_job=query_job)
133+
session._loader.read_gbq_table = unittest.mock.MagicMock() # type: ignore[method-assign]
134+
135+
# Act
136+
session.read_gbq_query("SELECT 1")
100137

101138
df = session.execution_history()
102139

@@ -107,40 +144,78 @@ def test_execution_history_includes_query_job_metadata():
107144
assert "creation_time" in df.columns
108145

109146

110-
def test_execution_history_tracks_query_job_metrics():
147+
def test_execution_history_tracks_query_job_metrics(monkeypatch):
111148
query_job = unittest.mock.create_autospec(bigquery.QueryJob, instance=True)
112149
query_job.job_id = "job1"
113150
query_job.location = "US"
151+
query_job.project = "test-project"
114152
query_job.created = NOW
115-
query_job.started = NOW + datetime.timedelta(seconds=1)
116-
query_job.ended = NOW + datetime.timedelta(seconds=3)
153+
query_job.started = NOW
154+
query_job.ended = NOW + datetime.timedelta(seconds=1)
117155
query_job.state = "DONE"
118-
query_job.total_bytes_processed = 1024
119-
query_job.slot_millis = 100
120156
query_job.job_type = "query"
121157
query_job.error_result = None
122158
query_job.cache_hit = False
123159
query_job.query = "SELECT 1"
124160
query_job.configuration.dry_run = False
161+
query_job.statement_type = "SELECT"
162+
163+
bq_client = unittest.mock.create_autospec(bigquery.Client)
164+
bq_client.project = "test-project"
165+
bq_client.default_query_job_config = bigquery.QueryJobConfig()
166+
bq_client.query_and_wait.return_value = iter([[NOW]])
167+
bq_client.get_job.return_value = query_job
168+
169+
clients_provider = unittest.mock.create_autospec(
170+
bigframes.session.clients.ClientsProvider
171+
)
172+
clients_provider.bqclient = bq_client
173+
174+
row_iterator = unittest.mock.create_autospec(
175+
bigquery.table.RowIterator, instance=True
176+
)
177+
row_iterator.job_id = "job1"
178+
row_iterator.location = "US"
179+
row_iterator.project = "test-project"
180+
181+
import bigframes.session._io.bigquery as bf_io_bigquery
182+
183+
monkeypatch.setattr(
184+
bf_io_bigquery,
185+
"start_query_with_client",
186+
lambda *args, **kwargs: (row_iterator, None),
187+
)
188+
189+
import bigframes.session.metrics as bfm_metrics
190+
191+
monkeypatch.setattr(
192+
bfm_metrics,
193+
"get_performance_stats",
194+
lambda job: (10, 1024, 100, 3.0),
195+
)
196+
197+
session = bigframes.session.Session(clients_provider=clients_provider)
198+
session._loader.read_gbq_table = unittest.mock.MagicMock() # type: ignore[method-assign]
125199

126-
execution_metrics = metrics.ExecutionMetrics()
127-
execution_metrics.count_job_stats(query_job=query_job)
200+
# Act
201+
session.read_gbq_query("SELECT 1")
128202

129-
assert len(execution_metrics.jobs) == 1
130-
job = execution_metrics.jobs[0]
131-
assert job.job_id == "job1"
132-
assert job.status == "DONE"
133-
assert job.total_bytes_processed == 1024
134-
assert job.duration_seconds == 3.0
203+
df = session.execution_history()
135204

205+
assert len(df) == 1
206+
assert df.iloc[0]["job_id"] == "job1"
207+
assert df.iloc[0]["total_bytes_processed"] == 1024
208+
assert df.iloc[0]["duration_seconds"] == 3.0
136209

137-
def test_execution_history_tracks_row_iterator_metrics():
210+
211+
def test_execution_history_tracks_row_iterator_metrics(monkeypatch):
138212
row_iterator = unittest.mock.create_autospec(
139213
bigquery.table.RowIterator, instance=True
140214
)
141-
row_iterator.job_id = "job2"
215+
row_iterator.job_id = None # Force fallback to RowIterator metrics in loader.py
142216
row_iterator.query_id = "query2"
143217
row_iterator.location = "US"
218+
row_iterator.project = "test-project"
144219
row_iterator.created = NOW
145220
row_iterator.started = NOW + datetime.timedelta(seconds=1)
146221
row_iterator.ended = NOW + datetime.timedelta(seconds=2)
@@ -149,22 +224,43 @@ def test_execution_history_tracks_row_iterator_metrics():
149224
row_iterator.cache_hit = True
150225
row_iterator.query = "SELECT 2"
151226

152-
execution_metrics = metrics.ExecutionMetrics()
153-
execution_metrics.count_job_stats(row_iterator=row_iterator)
227+
bq_client = unittest.mock.create_autospec(bigquery.Client)
228+
bq_client.project = "test-project"
229+
bq_client.default_query_job_config = bigquery.QueryJobConfig()
230+
bq_client.query_and_wait.return_value = iter([[NOW]])
231+
232+
clients_provider = unittest.mock.create_autospec(
233+
bigframes.session.clients.ClientsProvider
234+
)
235+
clients_provider.bqclient = bq_client
236+
237+
import bigframes.session._io.bigquery as bf_io_bigquery
238+
239+
monkeypatch.setattr(
240+
bf_io_bigquery,
241+
"start_query_with_client",
242+
lambda *args, **kwargs: (row_iterator, None),
243+
)
244+
245+
session = bigframes.session.Session(clients_provider=clients_provider)
246+
247+
# Act
248+
session.read_gbq_query("SELECT 2")
249+
250+
df = session.execution_history()
154251

155-
assert len(execution_metrics.jobs) == 1
156-
job = execution_metrics.jobs[0]
157-
assert job.job_id == "job2"
158-
assert job.query_id == "query2"
159-
assert job.status == "DONE"
160-
assert job.cached is True
161-
assert job.duration_seconds == 2.0
252+
assert len(df) == 1
253+
assert df.iloc[0]["query_id"] == "query2"
254+
assert df.iloc[0]["total_bytes_processed"] == 512
255+
# duration is 2 - 0 = 2 seconds
256+
assert df.iloc[0]["duration_seconds"] == 2.0
162257

163258

164-
def test_execution_history_tracks_load_job_metrics():
259+
def test_execution_history_tracks_load_job_metrics(monkeypatch):
165260
load_job = unittest.mock.create_autospec(bigquery.LoadJob, instance=True)
166261
load_job.job_id = "job3"
167262
load_job.location = "US"
263+
load_job.project = "test-project"
168264
load_job.created = NOW
169265
load_job.started = NOW
170266
load_job.ended = NOW + datetime.timedelta(seconds=5)
@@ -179,17 +275,43 @@ def test_execution_history_tracks_load_job_metrics():
179275
load_job.source_uris = ["gs://bucket/file.csv"]
180276
load_job.configuration.source_format = "CSV"
181277

182-
execution_metrics = metrics.ExecutionMetrics()
183-
execution_metrics.count_job_stats(query_job=load_job)
184-
185-
assert len(execution_metrics.jobs) == 1
186-
job = execution_metrics.jobs[0]
187-
assert job.job_id == "job3"
188-
assert job.job_type == "load"
189-
assert job.duration_seconds == 5.0
190-
assert job.output_rows == 100
191-
assert job.input_files == 1
192-
assert job.input_bytes == 1024
193-
assert job.destination_table == "project.dataset.table"
194-
assert job.source_uris == ["gs://bucket/file.csv"]
195-
assert job.source_format == "CSV"
278+
bq_client = unittest.mock.create_autospec(bigquery.Client)
279+
bq_client.project = "test-project"
280+
bq_client.default_query_job_config = bigquery.QueryJobConfig()
281+
bq_client.query_and_wait.return_value = iter([[NOW]])
282+
bq_client.load_table_from_file.return_value = load_job
283+
284+
storage_manager = unittest.mock.MagicMock()
285+
storage_manager.create_temp_table.return_value = (
286+
bigquery.TableReference.from_string("project.dataset.table")
287+
)
288+
289+
clients_provider = unittest.mock.create_autospec(
290+
bigframes.session.clients.ClientsProvider
291+
)
292+
clients_provider.bqclient = bq_client
293+
294+
import bigframes.core.bq_data as bq_data
295+
296+
monkeypatch.setattr(
297+
bq_data.GbqNativeTable,
298+
"from_table",
299+
lambda *args, **kwargs: unittest.mock.MagicMock(),
300+
)
301+
monkeypatch.setattr(bq_data.BigqueryDataSource, "__post_init__", lambda self: None)
302+
303+
session = bigframes.session.Session(clients_provider=clients_provider)
304+
session._storage_manager = storage_manager
305+
306+
import io
307+
308+
# Act
309+
buffer = io.BytesIO(b"col1,col2\n1,2\n3,4")
310+
session.read_csv(buffer, write_engine="bigquery_load")
311+
312+
df = session.execution_history()
313+
314+
assert len(df) == 1
315+
assert df.iloc[0]["job_id"] == "job3"
316+
assert df.iloc[0]["job_type"] == "load"
317+
assert df.iloc[0]["duration_seconds"] == 5.0

0 commit comments

Comments
 (0)