def to_pb(self): """Converts the :class:`TimestampRange` to a protobuf. :rtype: :class:`.data_v2_pb2.TimestampRange` :returns: The converted current object. """ timestamp_range_kwargs = {} if self.start is not None: timestamp_range_kwargs['start_timestamp_micros'] = ( _microseconds_from_datetime(self.start)) if self.end is not None: timestamp_range_kwargs['end_timestamp_micros'] = ( _microseconds_from_datetime(self.end)) return data_v2_pb2.TimestampRange(**timestamp_range_kwargs)
def to_pb(self): """Converts the :class:`TimestampRange` to a protobuf. :rtype: :class:`.data_v2_pb2.TimestampRange` :returns: The converted current object. """ timestamp_range_kwargs = {} if self.start is not None: timestamp_range_kwargs['start_timestamp_micros'] = ( _microseconds_from_datetime(self.start)) if self.end is not None: timestamp_range_kwargs['end_timestamp_micros'] = ( _microseconds_from_datetime(self.end)) return data_v2_pb2.TimestampRange(**timestamp_range_kwargs)
def _cells_to_pairs(cells, include_timestamp=False): """Converts list of cells to HappyBase format. For example:: >>> import datetime >>> from gcloud.bigtable.row_data import Cell >>> cell1 = Cell(b'val1', datetime.datetime.utcnow()) >>> cell2 = Cell(b'val2', datetime.datetime.utcnow()) >>> _cells_to_pairs([cell1, cell2]) [b'val1', b'val2'] >>> _cells_to_pairs([cell1, cell2], include_timestamp=True) [(b'val1', 1456361486255), (b'val2', 1456361491927)] :type cells: list :param cells: List of :class:`.Cell` returned from a read request. :type include_timestamp: bool :param include_timestamp: Flag to indicate if cell timestamps should be included with the output. :rtype: list :returns: List of values in the cell. If ``include_timestamp=True``, each value will be a pair, with the first part the bytes value in the cell and the second part the number of milliseconds in the timestamp on the cell. """ result = [] for cell in cells: if include_timestamp: ts_millis = _microseconds_from_datetime(cell.timestamp) // 1000 result.append((cell.value, ts_millis)) else: result.append(cell.value) return result
def _write_to_row(self, row1=None, row2=None, row3=None, row4=None): timestamp1 = datetime.datetime.utcnow().replace(tzinfo=UTC) timestamp1_micros = _microseconds_from_datetime(timestamp1) # Truncate to millisecond granularity. timestamp1_micros -= (timestamp1_micros % 1000) timestamp1 = _datetime_from_microseconds(timestamp1_micros) # 1000 microseconds is a millisecond timestamp2 = timestamp1 + datetime.timedelta(microseconds=1000) timestamp3 = timestamp1 + datetime.timedelta(microseconds=2000) timestamp4 = timestamp1 + datetime.timedelta(microseconds=3000) if row1 is not None: row1.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL1, timestamp=timestamp1) if row2 is not None: row2.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL2, timestamp=timestamp2) if row3 is not None: row3.set_cell(COLUMN_FAMILY_ID1, COL_NAME2, CELL_VAL3, timestamp=timestamp3) if row4 is not None: row4.set_cell(COLUMN_FAMILY_ID2, COL_NAME3, CELL_VAL4, timestamp=timestamp4) # Create the cells we will check. cell1 = Cell(CELL_VAL1, timestamp1) cell2 = Cell(CELL_VAL2, timestamp2) cell3 = Cell(CELL_VAL3, timestamp3) cell4 = Cell(CELL_VAL4, timestamp4) return cell1, cell2, cell3, cell4
def _get_expiration_seconds(expiration): """Convert 'expiration' to a number of seconds in the future. :type expiration: int, long, datetime.datetime, datetime.timedelta :param expiration: When the signed URL should expire. :raises TypeError: When expiration is not an integer. :rtype: int :returns: a timestamp as an absolute number of seconds. """ # If it's a timedelta, add it to `now` in UTC. if isinstance(expiration, datetime.timedelta): now = _NOW().replace(tzinfo=UTC) expiration = now + expiration # If it's a datetime, convert to a timestamp. if isinstance(expiration, datetime.datetime): micros = _microseconds_from_datetime(expiration) expiration = micros // 10**6 if not isinstance(expiration, six.integer_types): raise TypeError('Expected an integer timestamp, datetime, or ' 'timedelta. Got %s' % type(expiration)) return expiration
def _get_expiration_seconds(expiration): """Convert 'expiration' to a number of seconds in the future. :type expiration: int, long, datetime.datetime, datetime.timedelta :param expiration: When the signed URL should expire. :raises TypeError: When expiration is not an integer. :rtype: int :returns: a timestamp as an absolute number of seconds. """ # If it's a timedelta, add it to `now` in UTC. if isinstance(expiration, datetime.timedelta): now = _NOW().replace(tzinfo=UTC) expiration = now + expiration # If it's a datetime, convert to a timestamp. if isinstance(expiration, datetime.datetime): micros = _microseconds_from_datetime(expiration) expiration = micros // 10**6 if not isinstance(expiration, six.integer_types): raise TypeError('Expected an integer timestamp, datetime, or ' 'timedelta. Got %s' % type(expiration)) return expiration
def _cells_to_pairs(cells, include_timestamp=False): """Converts list of cells to HappyBase format. For example:: >>> import datetime >>> from gcloud.bigtable.row_data import Cell >>> cell1 = Cell(b'val1', datetime.datetime.utcnow()) >>> cell2 = Cell(b'val2', datetime.datetime.utcnow()) >>> _cells_to_pairs([cell1, cell2]) [b'val1', b'val2'] >>> _cells_to_pairs([cell1, cell2], include_timestamp=True) [(b'val1', 1456361486255), (b'val2', 1456361491927)] :type cells: list :param cells: List of :class:`.Cell` returned from a read request. :type include_timestamp: bool :param include_timestamp: Flag to indicate if cell timestamps should be included with the output. :rtype: list :returns: List of values in the cell. If ``include_timestamp=True``, each value will be a pair, with the first part the bytes value in the cell and the second part the number of milliseconds in the timestamp on the cell. """ result = [] for cell in cells: if include_timestamp: ts_millis = _microseconds_from_datetime(cell.timestamp) // 1000 result.append((cell.value, ts_millis)) else: result.append(cell.value) return result
def test_it(self): import datetime from gcloud._helpers import UTC from gcloud._helpers import _microseconds_from_datetime NOW = datetime.datetime(2015, 7, 29, 17, 45, 21, 123456, tzinfo=UTC) NOW_MICROS = _microseconds_from_datetime(NOW) self.assertEqual(self._callFUT(NOW_MICROS), NOW)
def set_cell(self, column_family_id, column, value, timestamp=None, state=None): """Sets a value in this row. The cell is determined by the ``row_key`` of the :class:`Row` and the ``column``. The ``column`` must be in an existing :class:`.column_family.ColumnFamily` (as determined by ``column_family_id``). .. note:: This method adds a mutation to the accumulated mutations on this :class:`Row`, but does not make an API request. To actually send an API request (with the mutations) to the Google Cloud Bigtable API, call :meth:`commit`. :type column_family_id: str :param column_family_id: The column family that contains the column. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. :type column: bytes :param column: The column within the column family where the cell is located. :type value: bytes or :class:`int` :param value: The value to set in the cell. If an integer is used, will be interpreted as a 64-bit big-endian signed integer (8 bytes). :type timestamp: :class:`datetime.datetime` :param timestamp: (Optional) The timestamp of the operation. :type state: bool :param state: (Optional) The state that the mutation should be applied in. Unset if the mutation is not conditional, otherwise :data:`True` or :data:`False`. """ column = _to_bytes(column) if isinstance(value, six.integer_types): value = _PACK_I64(value) value = _to_bytes(value) if timestamp is None: # Use -1 for current Bigtable server time. timestamp_micros = -1 else: timestamp_micros = _microseconds_from_datetime(timestamp) # Truncate to millisecond granularity. timestamp_micros -= (timestamp_micros % 1000) mutation_val = data_pb2.Mutation.SetCell( family_name=column_family_id, column_qualifier=column, timestamp_micros=timestamp_micros, value=value, ) mutation_pb = data_pb2.Mutation(set_cell=mutation_val) self._get_mutations(state).append(mutation_pb)
def test_it(self): import datetime from gcloud._helpers import UTC from gcloud._helpers import _microseconds_from_datetime NOW = datetime.datetime(2015, 7, 29, 17, 45, 21, 123456, tzinfo=UTC) NOW_MICROS = _microseconds_from_datetime(NOW) self.assertEqual(self._callFUT(NOW_MICROS), NOW)
def _pb_attr_value(val): """Given a value, return the protobuf attribute name and proper value. The Protobuf API uses different attribute names based on value types rather than inferring the type. This function simply determines the proper attribute name based on the type of the value provided and returns the attribute name as well as a properly formatted value. Certain value types need to be coerced into a different type (such as a `datetime.datetime` into an integer timestamp, or a `gcloud.datastore.key.Key` into a Protobuf representation. This function handles that for you. .. note:: Values which are "text" ('unicode' in Python2, 'str' in Python3) map to 'string_value' in the datastore; values which are "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. For example: >>> _pb_attr_value(1234) ('integer_value', 1234) >>> _pb_attr_value('my_string') ('string_value', 'my_string') :type val: `datetime.datetime`, :class:`gcloud.datastore.key.Key`, bool, float, integer, string :param val: The value to be scrutinized. :returns: A tuple of the attribute name and proper value type. """ if isinstance(val, datetime.datetime): name = 'timestamp_microseconds' value = _microseconds_from_datetime(val) elif isinstance(val, Key): name, value = 'key', val.to_protobuf() elif isinstance(val, bool): name, value = 'boolean', val elif isinstance(val, float): name, value = 'double', val elif isinstance(val, six.integer_types): INT_VALUE_CHECKER.CheckValue(val) # Raise an exception if invalid. name, value = 'integer', int(val) # Always cast to an integer. elif isinstance(val, six.text_type): name, value = 'string', val elif isinstance(val, (bytes, str)): name, value = 'blob', val elif isinstance(val, Entity): name, value = 'entity', val elif isinstance(val, list): name, value = 'list', val else: raise ValueError("Unknown protobuf attr type %s" % type(val)) return name + '_value', value
def _pb_attr_value(val): """Given a value, return the protobuf attribute name and proper value. The Protobuf API uses different attribute names based on value types rather than inferring the type. This function simply determines the proper attribute name based on the type of the value provided and returns the attribute name as well as a properly formatted value. Certain value types need to be coerced into a different type (such as a `datetime.datetime` into an integer timestamp, or a `gcloud.datastore.key.Key` into a Protobuf representation. This function handles that for you. .. note:: Values which are "text" ('unicode' in Python2, 'str' in Python3) map to 'string_value' in the datastore; values which are "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. For example: >>> _pb_attr_value(1234) ('integer_value', 1234) >>> _pb_attr_value('my_string') ('string_value', 'my_string') :type val: `datetime.datetime`, :class:`gcloud.datastore.key.Key`, bool, float, integer, string :param val: The value to be scrutinized. :returns: A tuple of the attribute name and proper value type. """ if isinstance(val, datetime.datetime): name = 'timestamp_microseconds' value = _microseconds_from_datetime(val) elif isinstance(val, Key): name, value = 'key', val.to_protobuf() elif isinstance(val, bool): name, value = 'boolean', val elif isinstance(val, float): name, value = 'double', val elif isinstance(val, six.integer_types): INT_VALUE_CHECKER.CheckValue(val) # Raise an exception if invalid. name, value = 'integer', int(val) # Always cast to an integer. elif isinstance(val, six.text_type): name, value = 'string', val elif isinstance(val, (bytes, str)): name, value = 'blob', val elif isinstance(val, Entity): name, value = 'entity', val elif isinstance(val, list): name, value = 'list', val else: raise ValueError("Unknown protobuf attr type %s" % type(val)) return name + '_value', value
def test_w_utc_datetime(self): import datetime from gcloud._helpers import UTC from gcloud._helpers import _microseconds_from_datetime NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) NOW_MICROS = _microseconds_from_datetime(NOW) MILLIS = NOW_MICROS // 1000 result = self._callFUT(NOW) self.assertTrue(isinstance(result, int)) self.assertEqual(result, MILLIS)
def test_w_utc_datetime(self): import datetime from gcloud._helpers import UTC from gcloud._helpers import _microseconds_from_datetime NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) NOW_MICROS = _microseconds_from_datetime(NOW) MILLIS = NOW_MICROS // 1000 result = self._callFUT(NOW) self.assertTrue(isinstance(result, int)) self.assertEqual(result, MILLIS)
def _set_cell(self, column_family_id, column, value, timestamp=None, state=None): """Helper for :meth:`set_cell` Adds a mutation to set the value in a specific cell. ``state`` is unused by :class:`DirectRow` but is used by subclasses. :type column_family_id: str :param column_family_id: The column family that contains the column. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. :type column: bytes :param column: The column within the column family where the cell is located. :type value: bytes or :class:`int` :param value: The value to set in the cell. If an integer is used, will be interpreted as a 64-bit big-endian signed integer (8 bytes). :type timestamp: :class:`datetime.datetime` :param timestamp: (Optional) The timestamp of the operation. :type state: bool :param state: (Optional) The state that is passed along to :meth:`_get_mutations`. """ column = _to_bytes(column) if isinstance(value, six.integer_types): value = _PACK_I64(value) value = _to_bytes(value) if timestamp is None: # Use -1 for current Bigtable server time. timestamp_micros = -1 else: timestamp_micros = _microseconds_from_datetime(timestamp) # Truncate to millisecond granularity. timestamp_micros -= (timestamp_micros % 1000) mutation_val = data_v2_pb2.Mutation.SetCell( family_name=column_family_id, column_qualifier=column, timestamp_micros=timestamp_micros, value=value, ) mutation_pb = data_v2_pb2.Mutation(set_cell=mutation_val) self._get_mutations(state).append(mutation_pb)
def test_w_naive_datetime(self): import datetime import six from gcloud._helpers import UTC from gcloud._helpers import _microseconds_from_datetime NOW = datetime.datetime.utcnow() UTC_NOW = NOW.replace(tzinfo=UTC) UTC_NOW_MICROS = _microseconds_from_datetime(UTC_NOW) MILLIS = UTC_NOW_MICROS // 1000 result = self._callFUT(NOW) self.assertTrue(isinstance(result, six.integer_types)) self.assertEqual(result, MILLIS)
def test_w_naive_datetime(self): import datetime import six from gcloud._helpers import UTC from gcloud._helpers import _microseconds_from_datetime NOW = datetime.datetime.utcnow() UTC_NOW = NOW.replace(tzinfo=UTC) UTC_NOW_MICROS = _microseconds_from_datetime(UTC_NOW) MILLIS = UTC_NOW_MICROS // 1000 result = self._callFUT(NOW) self.assertTrue(isinstance(result, six.integer_types)) self.assertEqual(result, MILLIS)
def _set_cell(self, column_family_id, column, value, timestamp=None, state=None): """Helper for :meth:`set_cell` Adds a mutation to set the value in a specific cell. ``state`` is unused by :class:`DirectRow` but is used by subclasses. :type column_family_id: str :param column_family_id: The column family that contains the column. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. :type column: bytes :param column: The column within the column family where the cell is located. :type value: bytes or :class:`int` :param value: The value to set in the cell. If an integer is used, will be interpreted as a 64-bit big-endian signed integer (8 bytes). :type timestamp: :class:`datetime.datetime` :param timestamp: (Optional) The timestamp of the operation. :type state: bool :param state: (Optional) The state that is passed along to :meth:`_get_mutations`. """ column = _to_bytes(column) if isinstance(value, six.integer_types): value = _PACK_I64(value) value = _to_bytes(value) if timestamp is None: # Use -1 for current Bigtable server time. timestamp_micros = -1 else: timestamp_micros = _microseconds_from_datetime(timestamp) # Truncate to millisecond granularity. timestamp_micros -= (timestamp_micros % 1000) mutation_val = data_v2_pb2.Mutation.SetCell( family_name=column_family_id, column_qualifier=column, timestamp_micros=timestamp_micros, value=value, ) mutation_pb = data_v2_pb2.Mutation(set_cell=mutation_val) self._get_mutations(state).append(mutation_pb)
def test_w_non_utc_datetime(self): import datetime from gcloud._helpers import _UTC from gcloud._helpers import _microseconds_from_datetime class CET(_UTC): _tzname = 'CET' _utcoffset = datetime.timedelta(hours=-1) zone = CET() NOW = datetime.datetime(2015, 7, 28, 16, 34, 47, tzinfo=zone) NOW_MICROS = _microseconds_from_datetime(NOW) MILLIS = NOW_MICROS // 1000 result = self._callFUT(NOW) self.assertTrue(isinstance(result, int)) self.assertEqual(result, MILLIS)
def test_w_non_utc_datetime(self): import datetime from gcloud._helpers import _UTC from gcloud._helpers import _microseconds_from_datetime class CET(_UTC): _tzname = 'CET' _utcoffset = datetime.timedelta(hours=-1) zone = CET() NOW = datetime.datetime(2015, 7, 28, 16, 34, 47, tzinfo=zone) NOW_MICROS = _microseconds_from_datetime(NOW) MILLIS = NOW_MICROS // 1000 result = self._callFUT(NOW) self.assertTrue(isinstance(result, int)) self.assertEqual(result, MILLIS)
def _flatten_cells(prd): # Match results format from JSON testcases. # Doesn't handle error cases. from gcloud._helpers import _bytes_to_unicode from gcloud._helpers import _microseconds_from_datetime for row_key, row in prd.rows.items(): for family_name, family in row.cells.items(): for qualifier, column in family.items(): for cell in column: yield { u'rk': _bytes_to_unicode(row_key), u'fm': family_name, u'qual': _bytes_to_unicode(qualifier), u'ts': _microseconds_from_datetime(cell.timestamp), u'value': _bytes_to_unicode(cell.value), u'label': u' '.join(cell.labels), u'error': False, }
def _flatten_cells(prd): # Match results format from JSON testcases. # Doesn't handle error cases. from gcloud._helpers import _bytes_to_unicode from gcloud._helpers import _microseconds_from_datetime for row_key, row in prd.rows.items(): for family_name, family in row.cells.items(): for qualifier, column in family.items(): for cell in column: yield { u'rk': _bytes_to_unicode(row_key), u'fm': family_name, u'qual': _bytes_to_unicode(qualifier), u'ts': _microseconds_from_datetime(cell.timestamp), u'value': _bytes_to_unicode(cell.value), u'label': u' '.join(cell.labels), u'error': False, }
def insert_data(self, rows, row_ids=None, skip_invalid_rows=None, ignore_unknown_values=None, template_suffix=None, client=None): """API call: insert table data via a POST request See: https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll :type rows: list of tuples :param rows: Row data to be inserted. Each tuple should contain data for each schema field on the current table and in the same order as the schema fields. :type row_ids: list of string :param row_ids: Unique ids, one per row being inserted. If not passed, no de-duplication occurs. :type skip_invalid_rows: boolean or ``NoneType`` :param skip_invalid_rows: skip rows w/ invalid data? :type ignore_unknown_values: boolean or ``NoneType`` :param ignore_unknown_values: ignore columns beyond schema? :type template_suffix: str or ``NoneType`` :param template_suffix: treat ``name`` as a template table and provide a suffix. BigQuery will create the table ``<name> + <template_suffix>`` based on the schema of the template table. See: https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. :rtype: list of mappings :returns: One mapping per row with insert errors: the "index" key identifies the row, and the "errors" key contains a list of the mappings describing one or more problems with the row. """ client = self._require_client(client) rows_info = [] data = {'rows': rows_info} for index, row in enumerate(rows): row_info = {} for field, value in zip(self._schema, row): if field.field_type == 'TIMESTAMP' and value is not None: # BigQuery stores TIMESTAMP data internally as a # UNIX timestamp with microsecond precision. # Specifies the number of seconds since the epoch. value = _microseconds_from_datetime(value) * 1e-6 row_info[field.name] = value info = {'json': row_info} if row_ids is not None: info['insertId'] = row_ids[index] rows_info.append(info) if skip_invalid_rows is not None: data['skipInvalidRows'] = skip_invalid_rows if ignore_unknown_values is not None: data['ignoreUnknownValues'] = ignore_unknown_values if template_suffix is not None: data['templateSuffix'] = template_suffix response = client.connection.api_request( method='POST', path='%s/insertAll' % self.path, data=data) errors = [] for error in response.get('insertErrors', ()): errors.append({'index': int(error['index']), 'errors': error['errors']}) return errors
def set_cell(self, column_family_id, column, value, timestamp=None, state=None): """Sets a value in this row. The cell is determined by the ``row_key`` of the :class:`Row` and the ``column``. The ``column`` must be in an existing :class:`.column_family.ColumnFamily` (as determined by ``column_family_id``). .. note:: This method adds a mutation to the accumulated mutations on this :class:`Row`, but does not make an API request. To actually send an API request (with the mutations) to the Google Cloud Bigtable API, call :meth:`commit`. :type column_family_id: str :param column_family_id: The column family that contains the column. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. :type column: bytes :param column: The column within the column family where the cell is located. :type value: bytes or :class:`int` :param value: The value to set in the cell. If an integer is used, will be interpreted as a 64-bit big-endian signed integer (8 bytes). :type timestamp: :class:`datetime.datetime` :param timestamp: (Optional) The timestamp of the operation. :type state: bool :param state: (Optional) The state that the mutation should be applied in. Unset if the mutation is not conditional, otherwise :data:`True` or :data:`False`. """ column = _to_bytes(column) if isinstance(value, six.integer_types): value = _PACK_I64(value) value = _to_bytes(value) if timestamp is None: # Use -1 for current Bigtable server time. timestamp_micros = -1 else: timestamp_micros = _microseconds_from_datetime(timestamp) # Truncate to millisecond granularity. timestamp_micros -= (timestamp_micros % 1000) mutation_val = data_pb2.Mutation.SetCell( family_name=column_family_id, column_qualifier=column, timestamp_micros=timestamp_micros, value=value, ) mutation_pb = data_pb2.Mutation(set_cell=mutation_val) self._get_mutations(state).append(mutation_pb)
def _callFUT(self, value): from gcloud._helpers import _microseconds_from_datetime return _microseconds_from_datetime(value)
def insert_data(self, rows, row_ids=None, skip_invalid_rows=None, ignore_unknown_values=None, client=None): """API call: insert table data via a POST request See: https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll :type rows: list of tuples :param rows: Row data to be inserted. Each tuple should contain data for each schema field on the current table and in the same order as the schema fields. :type row_ids: list of string :param row_ids: Unique ids, one per row being inserted. If not passed, no de-duplication occurs. :type skip_invalid_rows: boolean or ``NoneType`` :param skip_invalid_rows: skip rows w/ invalid data? :type ignore_unknown_values: boolean or ``NoneType`` :param ignore_unknown_values: ignore columns beyond schema? :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. :rtype: list of mappings :returns: One mapping per row with insert errors: the "index" key identifies the row, and the "errors" key contains a list of the mappings describing one or more problems with the row. """ client = self._require_client(client) rows_info = [] data = {'rows': rows_info} for index, row in enumerate(rows): row_info = {} for field, value in zip(self._schema, row): if field.field_type == 'TIMESTAMP' and value is not None: # BigQuery stores TIMESTAMP data internally as a # UNIX timestamp with microsecond precision. # Specifies the number of seconds since the epoch. value = _microseconds_from_datetime(value) * 1e-6 row_info[field.name] = value info = {'json': row_info} if row_ids is not None: info['insertId'] = row_ids[index] rows_info.append(info) if skip_invalid_rows is not None: data['skipInvalidRows'] = skip_invalid_rows if ignore_unknown_values is not None: data['ignoreUnknownValues'] = ignore_unknown_values response = client.connection.api_request( method='POST', path='%s/insertAll' % self.path, data=data) errors = [] for error in response.get('insertErrors', ()): errors.append({'index': int(error['index']), 'errors': error['errors']}) return errors
def _callFUT(self, value): from gcloud._helpers import _microseconds_from_datetime return _microseconds_from_datetime(value)