|
33 | 33 | import pytest |
34 | 34 |
|
35 | 35 | import bigframes |
| 36 | +import bigframes.core.nodes as nodes |
36 | 37 | import bigframes.dataframe |
37 | 38 | import bigframes.dtypes |
38 | 39 | import bigframes.ml.linear_model |
@@ -640,6 +641,37 @@ def test_read_gbq_with_configuration( |
640 | 641 | assert df.shape == (9, 3) |
641 | 642 |
|
642 | 643 |
|
| 644 | +def test_read_gbq_query_w_allow_large_results(session: bigframes.Session): |
| 645 | + if not hasattr(session.bqclient, "default_job_creation_mode"): |
| 646 | + pytest.skip("Jobless query only available on newer google-cloud-bigquery.") |
| 647 | + |
| 648 | + query = "SELECT 1" |
| 649 | + |
| 650 | + # Make sure we don't get a cached table. |
| 651 | + configuration = {"query": {"useQueryCache": False}} |
| 652 | + |
| 653 | + # Very small results should wrap a local node. |
| 654 | + df_false = session.read_gbq( |
| 655 | + query, |
| 656 | + configuration=configuration, |
| 657 | + allow_large_results=False, |
| 658 | + ) |
| 659 | + assert df_false.shape == (1, 1) |
| 660 | + roots_false = df_false._get_block().expr.node.roots |
| 661 | + assert any(isinstance(node, nodes.ReadLocalNode) for node in roots_false) |
| 662 | + assert not any(isinstance(node, nodes.ReadTableNode) for node in roots_false) |
| 663 | + |
| 664 | + # Large results allowed should wrap a table. |
| 665 | + df_true = session.read_gbq( |
| 666 | + query, |
| 667 | + configuration=configuration, |
| 668 | + allow_large_results=True, |
| 669 | + ) |
| 670 | + assert df_true.shape == (1, 1) |
| 671 | + roots_true = df_true._get_block().expr.node.roots |
| 672 | + assert any(isinstance(node, nodes.ReadTableNode) for node in roots_true) |
| 673 | + |
| 674 | + |
643 | 675 | def test_read_gbq_with_custom_global_labels( |
644 | 676 | session: bigframes.Session, scalars_table_id: str |
645 | 677 | ): |
|
0 commit comments