def _call_fut(self, mapping, schema):
        from google.cloud.bigquery.table import _row_from_mapping

        return _row_from_mapping(mapping, schema)
Beispiel #2
0
    async def insert_rows(self, table, rows, selected_fields=None, **kwargs):
        """__Asynchronous__ insertion of rows into a table via the streaming API

    Credit:
    http://google-cloud-python.readthedocs.io/en/latest/_modules/google/cloud
    /bigquery/client.html#Client.insert_rows
    https://cloud.google.com/bigquery/docs/reference/rest/v2/
    tabledata/insertAll

    :type table: One of:
                 :class:`~google.cloud.bigquery.table.Table`
                 :class:`~google.cloud.bigquery.table.TableReference`
    :param table: the destination table for the row data, or a reference
                  to it.

    :type rows: One of:
                list of tuples
                list of dictionaries
    :param rows: Row data to be inserted. If a list of tuples is given,
                 each tuple should contain data for each schema field on
                 the current table and in the same order as the schema
                 fields.  If a list of dictionaries is given, the keys must
                 include all required fields in the schema.  Keys which do
                 not correspond to a field in the schema are ignored.

    :type selected_fields:
        list of :class:`~google.cloud.bigquery.schema.SchemaField`
    :param selected_fields:
        The fields to return. Required if ``table`` is a
        :class:`~google.cloud.bigquery.table.TableReference`.

    :type kwargs: dict
    :param kwargs:
        Keyword arguments to
        :meth:`~google.cloud.bigquery.client.Client.insert_rows_json`

    :rtype: list of mappings
    :returns: One mapping per row with insert errors:  the "index" key
              identifies the row, and the "errors" key contains a list
              of the mappings describing one or more problems with the
              row.
    :raises: ValueError if table's schema is not set
    """
        if selected_fields is not None:
            schema = selected_fields
        elif isinstance(table, TableReference):
            raise ValueError('need selected_fields with TableReference')
        elif isinstance(table, Table):
            if len(table.schema) == 0:
                raise ValueError(_TABLE_HAS_NO_SCHEMA)
            schema = table.schema
        else:
            raise TypeError('table should be Table or TableReference')

        json_rows = []
        for index, row in enumerate(rows):
            if isinstance(row, dict):
                row = _row_from_mapping(row, schema)
            json_row = {}

            for field, value in zip(schema, row):
                converter = _SCALAR_VALUE_TO_JSON_ROW.get(field.field_type)
                if converter is not None:  # STRING doesn't need converting
                    value = converter(value)
                json_row[field.name] = value

            json_rows.append(json_row)

        return await self.insert_rows_json(table, json_rows, **kwargs)