def test_fetchall_w_bqstorage_client_v1beta1_fetch_success(self):
        from google.cloud.bigquery import dbapi
        from google.cloud.bigquery import table

        # use unordered data to also test any non-determenistic key order in dicts
        row_data = [
            table.Row([1.4, 1.1, 1.3, 1.2], {"bar": 3, "baz": 2, "foo": 1, "quux": 0}),
            table.Row([2.4, 2.1, 2.3, 2.2], {"bar": 3, "baz": 2, "foo": 1, "quux": 0}),
        ]
        bqstorage_streamed_rows = [
            {
                "bar": _to_pyarrow(1.2),
                "foo": _to_pyarrow(1.1),
                "quux": _to_pyarrow(1.4),
                "baz": _to_pyarrow(1.3),
            },
            {
                "bar": _to_pyarrow(2.2),
                "foo": _to_pyarrow(2.1),
                "quux": _to_pyarrow(2.4),
                "baz": _to_pyarrow(2.3),
            },
        ]

        mock_client = self._mock_client(rows=row_data)
        mock_bqstorage_client = self._mock_bqstorage_client(
            stream_count=1, rows=bqstorage_streamed_rows, v1beta1=True
        )

        connection = dbapi.connect(
            client=mock_client, bqstorage_client=mock_bqstorage_client,
        )
        cursor = connection.cursor()
        cursor.execute("SELECT foo, bar FROM some_table")

        with warnings.catch_warnings(record=True) as warned:
            rows = cursor.fetchall()

        # a deprecation warning should have been emitted
        expected_warnings = [
            warning
            for warning in warned
            if issubclass(warning.category, DeprecationWarning)
            and "v1beta1" in str(warning)
        ]
        self.assertEqual(len(expected_warnings), 1, "Deprecation warning not raised.")

        # the default client was not used
        mock_client.list_rows.assert_not_called()

        # check the data returned
        field_value = op.itemgetter(1)
        sorted_row_data = [sorted(row.items(), key=field_value) for row in rows]
        expected_row_data = [
            [("foo", 1.1), ("bar", 1.2), ("baz", 1.3), ("quux", 1.4)],
            [("foo", 2.1), ("bar", 2.2), ("baz", 2.3), ("quux", 2.4)],
        ]

        self.assertEqual(sorted_row_data, expected_row_data)
Example #2
0
    def test_fetchall_w_bqstorage_client_fetch_success(self):
        from google.cloud.bigquery import dbapi
        from google.cloud.bigquery import table

        # use unordered data to also test any non-determenistic key order in dicts
        row_data = [
            table.Row([1.4, 1.1, 1.3, 1.2], {"bar": 3, "baz": 2, "foo": 1, "quux": 0}),
            table.Row([2.4, 2.1, 2.3, 2.2], {"bar": 3, "baz": 2, "foo": 1, "quux": 0}),
        ]
        bqstorage_streamed_rows = [
            {
                "bar": _to_pyarrow(1.2),
                "foo": _to_pyarrow(1.1),
                "quux": _to_pyarrow(1.4),
                "baz": _to_pyarrow(1.3),
            },
            {
                "bar": _to_pyarrow(2.2),
                "foo": _to_pyarrow(2.1),
                "quux": _to_pyarrow(2.4),
                "baz": _to_pyarrow(2.3),
            },
        ]

        mock_client = self._mock_client(rows=row_data)
        mock_bqstorage_client = self._mock_bqstorage_client(
            stream_count=1, rows=bqstorage_streamed_rows,
        )
        mock_client._ensure_bqstorage_client.return_value = mock_bqstorage_client

        connection = dbapi.connect(
            client=mock_client, bqstorage_client=mock_bqstorage_client,
        )
        cursor = connection.cursor()
        cursor.execute("SELECT foo, bar FROM some_table")

        rows = cursor.fetchall()

        # the default client was not used
        mock_client.list_rows.assert_not_called()

        # check the data returned
        field_value = op.itemgetter(1)
        sorted_row_data = [sorted(row.items(), key=field_value) for row in rows]
        expected_row_data = [
            [("foo", 1.1), ("bar", 1.2), ("baz", 1.3), ("quux", 1.4)],
            [("foo", 2.1), ("bar", 2.2), ("baz", 2.3), ("quux", 2.4)],
        ]

        self.assertEqual(sorted_row_data, expected_row_data)
def test__run_query():
    magics.context._credentials = None

    job_id = "job_1234"
    sql = "SELECT 17"
    responses = [
        futures.TimeoutError,
        futures.TimeoutError,
        [table.Row((17, ), {"num": 0})],
    ]

    client_patch = mock.patch("google.cloud.bigquery.magics.bigquery.Client",
                              autospec=True)
    with client_patch as client_mock, io.capture_output() as captured:
        client_mock().query(sql).result.side_effect = responses
        client_mock().query(sql).job_id = job_id

        query_job = magics._run_query(client_mock(), sql)

    lines = re.split("\n|\r", captured.stdout)
    # Removes blanks & terminal code (result of display clearing)
    updates = list(filter(lambda x: bool(x) and x != "\x1b[2K", lines))

    assert query_job.job_id == job_id
    expected_first_line = "Executing query with job ID: {}".format(job_id)
    assert updates[0] == expected_first_line
    execution_updates = updates[1:-1]
    assert len(execution_updates) == 3  # one update per API response
    assert all(
        re.match("Query executing: .*s", line) for line in execution_updates)
    assert re.match("Query complete after .*s", updates[-1])
Example #4
0
    def test_fetchall_w_bqstorage_client_fetch_error_no_fallback(self):
        from google.cloud.bigquery import dbapi
        from google.cloud.bigquery import table

        row_data = [table.Row([1.1, 1.2], {"foo": 0, "bar": 1})]

        def fake_ensure_bqstorage_client(bqstorage_client=None, **kwargs):
            return bqstorage_client

        mock_client = self._mock_client(rows=row_data)
        mock_client._ensure_bqstorage_client.side_effect = fake_ensure_bqstorage_client
        mock_bqstorage_client = self._mock_bqstorage_client(
            stream_count=1, rows=row_data,
        )
        no_access_error = exceptions.Forbidden("invalid credentials")
        mock_bqstorage_client.create_read_session.side_effect = no_access_error

        connection = dbapi.connect(
            client=mock_client, bqstorage_client=mock_bqstorage_client,
        )
        cursor = connection.cursor()
        cursor.execute("SELECT foo, bar FROM some_table")

        with self.assertRaisesRegex(exceptions.Forbidden, "invalid credentials"):
            cursor.fetchall()

        # the default client was not used
        mock_client.list_rows.assert_not_called()
    def test_fetchall_w_bqstorage_client_no_arrow_compression(self):
        from google.cloud.bigquery import dbapi
        from google.cloud.bigquery import table

        # Use unordered data to also test any non-determenistic key order in dicts.
        row_data = [table.Row([1.2, 1.1], {"bar": 1, "foo": 0})]
        bqstorage_streamed_rows = [{
            "bar": _to_pyarrow(1.2),
            "foo": _to_pyarrow(1.1)
        }]

        mock_client = self._mock_client(rows=row_data)
        mock_bqstorage_client = self._mock_bqstorage_client(
            stream_count=1,
            rows=bqstorage_streamed_rows,
        )

        connection = dbapi.connect(
            client=mock_client,
            bqstorage_client=mock_bqstorage_client,
        )
        cursor = connection.cursor()
        cursor.execute("SELECT foo, bar FROM some_table")

        with mock.patch(
                "google.cloud.bigquery.dbapi.cursor._ARROW_COMPRESSION_SUPPORT",
                new=False):
            rows = cursor.fetchall()

        mock_client.list_rows.assert_not_called(
        )  # The default client was not used.

        # Check the BQ Storage session config.
        expected_session = bigquery_storage.ReadSession(
            table="projects/P/datasets/DS/tables/T",
            data_format=bigquery_storage.DataFormat.ARROW,
        )
        mock_bqstorage_client.create_read_session.assert_called_once_with(
            parent="projects/P",
            read_session=expected_session,
            max_stream_count=1)

        # Check the data returned.
        field_value = op.itemgetter(1)
        sorted_row_data = [
            sorted(row.items(), key=field_value) for row in rows
        ]
        expected_row_data = [[("foo", 1.1), ("bar", 1.2)]]

        self.assertEqual(sorted_row_data, expected_row_data)
Example #6
0
 def to_table_row(row):
     # NOTE: We fetch ARROW values, thus we need to convert them to Python
     # objects with as_py().
     values = tuple(value.as_py() for value in row.values())
     keys_to_index = {key: i for i, key in enumerate(row.keys())}
     return table.Row(values, keys_to_index)
Example #7
0
    def test_fetchall_w_bqstorage_client_fetch_error_fallback_on_client(self):
        from google.cloud.bigquery import dbapi
        from google.cloud.bigquery import table

        # use unordered data to also test any non-determenistic key order in dicts
        row_data = [
            table.Row([1.4, 1.1, 1.3, 1.2], {
                "bar": 3,
                "baz": 2,
                "foo": 1,
                "quux": 0
            }),
            table.Row([2.4, 2.1, 2.3, 2.2], {
                "bar": 3,
                "baz": 2,
                "foo": 1,
                "quux": 0
            }),
        ]
        bqstorage_streamed_rows = [
            {
                "bar": 1.2,
                "foo": 1.1,
                "quux": 1.4,
                "baz": 1.3
            },
            {
                "bar": 2.2,
                "foo": 2.1,
                "quux": 2.4,
                "baz": 2.3
            },
        ]

        mock_client = self._mock_client(rows=row_data)
        mock_bqstorage_client = self._mock_bqstorage_client(
            stream_count=1,
            rows=bqstorage_streamed_rows,
        )
        request_error = exceptions.BadRequest("BQ storage what??")
        mock_bqstorage_client.create_read_session.side_effect = request_error

        connection = dbapi.connect(
            client=mock_client,
            bqstorage_client=mock_bqstorage_client,
        )
        cursor = connection.cursor()
        cursor.execute("SELECT foo, bar FROM some_table")

        logger_patcher = mock.patch(
            "google.cloud.bigquery.dbapi.cursor._LOGGER")
        with logger_patcher as mock_logger:
            rows = cursor.fetchall()

        # both client were used
        mock_bqstorage_client.create_read_session.assert_called()
        mock_client.list_rows.assert_called()

        # fallback to default API should have been logged
        relevant_calls = [
            call for call in mock_logger.debug.call_args_list
            if call.args and "tabledata.list API" in call.args[0]
        ]
        self.assertTrue(relevant_calls)

        # check the data returned
        field_value = op.itemgetter(1)
        sorted_row_data = [
            sorted(row.items(), key=field_value) for row in rows
        ]
        expected_row_data = [
            [("foo", 1.1), ("bar", 1.2), ("baz", 1.3), ("quux", 1.4)],
            [("foo", 2.1), ("bar", 2.2), ("baz", 2.3), ("quux", 2.4)],
        ]

        self.assertEqual(sorted_row_data, expected_row_data)