def _try_fetch(self, size=None): """Try to start fetching data, if not yet started. Mutates self to indicate that iteration has started. """ if self._query_job is None: raise exceptions.InterfaceError( "No query results: execute() must be called before fetch.") if self._query_job.dry_run: self._query_data = iter([]) return is_dml = (self._query_job.statement_type and self._query_job.statement_type.upper() != "SELECT") if is_dml: self._query_data = iter([]) return if self._query_data is None: client = self.connection._client bqstorage_client = self.connection._bqstorage_client if bqstorage_client is not None: rows_iterable = self._bqstorage_fetch(bqstorage_client) self._query_data = _helpers.to_bq_table_rows(rows_iterable) return rows_iter = client.list_rows( self._query_job.destination, selected_fields=self._query_job._query_results.schema, page_size=self.arraysize, ) self._query_data = iter(rows_iter)
def fetchmany(self, size=None): """Fetch multiple results from the last ``execute*()`` call. .. note:: The size parameter is not used for the request/response size. Set the ``arraysize`` attribute before calling ``execute()`` to set the batch size. :type size: int :param size: (Optional) Maximum number of rows to return. Defaults to the ``arraysize`` property value. :rtype: List[tuple] :returns: A list of rows. :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` if called before ``execute()``. """ if self._query_data is None: raise exceptions.InterfaceError( 'No query results: execute() must be called before fetch.') if size is None: size = self.arraysize rows = [] for row in self._query_data: rows.append(row) if len(rows) >= size: break return rows
def fetchall(self): """Fetch all remaining results from the last ``execute*()`` call. :rtype: List[tuple] :returns: A list of all the rows in the results. :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` if called before ``execute()``. """ if self._query_data is None: raise exceptions.InterfaceError( 'No query results: execute() must be called before fetch.') return [row for row in self._query_data]
def _try_fetch(self, size=None): """Try to start fetching data, if not yet started. Mutates self to indicate that iteration has started. """ if self._query_results is None: raise exceptions.InterfaceError( 'No query results: execute() must be called before fetch.') if size is None: size = self.arraysize if self._query_data is None: self._query_data = iter( self._query_results.fetch_data(max_results=size))
def _try_fetch(self, size=None): """Try to start fetching data, if not yet started. Mutates self to indicate that iteration has started. """ if self._query_job is None: raise exceptions.InterfaceError( "No query results: execute() must be called before fetch.") is_dml = (self._query_job.statement_type and self._query_job.statement_type.upper() != "SELECT") if is_dml: self._query_data = iter([]) return if self._query_data is None: client = self.connection._client bqstorage_client = self.connection._bqstorage_client if bqstorage_client is not None: try: rows_iterable = self._bqstorage_fetch(bqstorage_client) self._query_data = _helpers.to_bq_table_rows(rows_iterable) return except google.api_core.exceptions.GoogleAPICallError as exc: # NOTE: Forbidden is a subclass of GoogleAPICallError if isinstance(exc, google.api_core.exceptions.Forbidden): # Don't hide errors such as insufficient permissions to create # a read session, or the API is not enabled. Both of those are # clearly problems if the developer has explicitly asked for # BigQuery Storage API support. raise # There is an issue with reading from small anonymous # query results tables. If such an error occurs, we silence # it in order to try again with the tabledata.list API. _LOGGER.debug( "Error fetching data with BigQuery Storage API, " "falling back to tabledata.list API.") rows_iter = client.list_rows( self._query_job.destination, selected_fields=self._query_job._query_results.schema, page_size=self.arraysize, ) self._query_data = iter(rows_iter)
def fetchone(self): """Fetch a single row from the results of the last ``execute*()`` call. :rtype: tuple :returns: A tuple representing a row or ``None`` if no more data is available. :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` if called before ``execute()``. """ if self._query_data is None: raise exceptions.InterfaceError( 'No query results: execute() must be called before fetch.') try: return six.next(self._query_data) except StopIteration: return None
def _try_fetch(self, size=None): """Try to start fetching data, if not yet started. Mutates self to indicate that iteration has started. """ if self._query_job is None: raise exceptions.InterfaceError( 'No query results: execute() must be called before fetch.') is_dml = (self._query_job.statement_type and self._query_job.statement_type.upper() != 'SELECT') if is_dml: self._query_data = iter([]) return if self._query_data is None: client = self.connection._client rows_iter = client.list_rows( self._query_job.destination, selected_fields=self._query_job._query_results.schema, page_size=self.arraysize) self._query_data = iter(rows_iter)
def _try_fetch(self, size=None): """Try to start fetching data, if not yet started. Mutates self to indicate that iteration has started. """ if self._query_job is None: raise exceptions.InterfaceError( "No query results: execute() must be called before fetch.") if self._query_job.dry_run: self._query_data = iter([]) return if self._query_data is None: bqstorage_client = self.connection._bqstorage_client if bqstorage_client is not None: rows_iterable = self._bqstorage_fetch(bqstorage_client) self._query_data = _helpers.to_bq_table_rows(rows_iterable) return rows_iter = self._query_job.result(page_size=self.arraysize) self._query_data = iter(rows_iter)