Beispiel #1
0
    def append_cell_value(self, column_family_id, column, value):
        """Appends a value to an existing cell.

        .. note::

            This method adds a read-modify rule protobuf to the accumulated
            read-modify rules on this row, but does not make an API
            request. To actually send an API request (with the rules) to the
            Google Cloud Bigtable API, call :meth:`commit`.

        :type column_family_id: str
        :param column_family_id: The column family that contains the column.
                                 Must be of the form
                                 ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.

        :type column: bytes
        :param column: The column within the column family where the cell
                       is located.

        :type value: bytes
        :param value: The value to append to the existing value in the cell. If
                      the targeted cell is unset, it will be treated as
                      containing the empty string.
        """
        column = _to_bytes(column)
        value = _to_bytes(value)
        rule_pb = data_v2_pb2.ReadModifyWriteRule(
            family_name=column_family_id, column_qualifier=column, append_value=value
        )
        self._rule_pb_list.append(rule_pb)
    def to_pb(self):
        """Converts the row filter to a protobuf.

        First converts to a :class:`.data_v2_pb2.ColumnRange` and then uses it
        in the ``column_range_filter`` field.

        :rtype: :class:`.data_v2_pb2.RowFilter`
        :returns: The converted current object.
        """
        column_range_kwargs = {"family_name": self.column_family_id}
        if self.start_column is not None:
            if self.inclusive_start:
                key = "start_qualifier_closed"
            else:
                key = "start_qualifier_open"
            column_range_kwargs[key] = _to_bytes(self.start_column)
        if self.end_column is not None:
            if self.inclusive_end:
                key = "end_qualifier_closed"
            else:
                key = "end_qualifier_open"
            column_range_kwargs[key] = _to_bytes(self.end_column)

        column_range = data_v2_pb2.ColumnRange(**column_range_kwargs)
        return data_v2_pb2.RowFilter(column_range_filter=column_range)
    def drop_by_prefix(self, row_key_prefix, timeout=None):
        """
        :type row_prefix: bytes
        :param row_prefix: Delete all rows that start with this row key
                            prefix. Prefix cannot be zero length.

        :type timeout: float
        :param timeout: (Optional) The amount of time, in seconds, to wait
                        for the request to complete.

        :raise: google.api_core.exceptions.GoogleAPICallError: If the
                request failed for any reason.
                google.api_core.exceptions.RetryError: If the request failed
                due to a retryable error and retry attempts failed.
                ValueError: If the parameters are invalid.
        """
        client = self._instance._client
        table_admin_client = client.table_admin_client
        if timeout:
            table_admin_client.drop_row_range(
                self.name, row_key_prefix=_to_bytes(row_key_prefix),
                timeout=timeout)
        else:
            table_admin_client.drop_row_range(
                self.name, row_key_prefix=_to_bytes(row_key_prefix))
    def to_pb(self):
        """Converts the row filter to a protobuf.

        First converts to a :class:`.data_v2_pb2.ValueRange` and then uses
        it to create a row filter protobuf.

        :rtype: :class:`.data_v2_pb2.RowFilter`
        :returns: The converted current object.
        """
        value_range_kwargs = {}
        if self.start_value is not None:
            if self.inclusive_start:
                key = "start_value_closed"
            else:
                key = "start_value_open"
            value_range_kwargs[key] = _to_bytes(self.start_value)
        if self.end_value is not None:
            if self.inclusive_end:
                key = "end_value_closed"
            else:
                key = "end_value_open"
            value_range_kwargs[key] = _to_bytes(self.end_value)

        value_range = data_v2_pb2.ValueRange(**value_range_kwargs)
        return data_v2_pb2.RowFilter(value_range_filter=value_range)
Beispiel #5
0
def _create_row_request(table_name, row_key=None, start_key=None, end_key=None,
                        filter_=None, limit=None):
    """Creates a request to read rows in a table.

    :type table_name: str
    :param table_name: The name of the table to read from.

    :type row_key: bytes
    :param row_key: (Optional) The key of a specific row to read from.

    :type start_key: bytes
    :param start_key: (Optional) The beginning of a range of row keys to
                      read from. The range will include ``start_key``. If
                      left empty, will be interpreted as the empty string.

    :type end_key: bytes
    :param end_key: (Optional) The end of a range of row keys to read from.
                    The range will not include ``end_key``. If left empty,
                    will be interpreted as an infinite string.

    :type filter_: :class:`.RowFilter`
    :param filter_: (Optional) The filter to apply to the contents of the
                    specified row(s). If unset, reads the entire table.

    :type limit: int
    :param limit: (Optional) The read will terminate after committing to N
                  rows' worth of results. The default (zero) is to return
                  all results.

    :rtype: :class:`data_messages_v2_pb2.ReadRowsRequest`
    :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs.
    :raises: :class:`ValueError <exceptions.ValueError>` if both
             ``row_key`` and one of ``start_key`` and ``end_key`` are set
    """
    request_kwargs = {'table_name': table_name}
    if (row_key is not None and
            (start_key is not None or end_key is not None)):
        raise ValueError('Row key and row range cannot be '
                         'set simultaneously')
    range_kwargs = {}
    if start_key is not None or end_key is not None:
        if start_key is not None:
            range_kwargs['start_key_closed'] = _to_bytes(start_key)
        if end_key is not None:
            range_kwargs['end_key_open'] = _to_bytes(end_key)
    if filter_ is not None:
        request_kwargs['filter'] = filter_.to_pb()
    if limit is not None:
        request_kwargs['rows_limit'] = limit

    message = data_messages_v2_pb2.ReadRowsRequest(**request_kwargs)

    if row_key is not None:
        message.rows.row_keys.append(_to_bytes(row_key))

    if range_kwargs:
        message.rows.row_ranges.add(**range_kwargs)

    return message
Beispiel #6
0
    def _set_cell(self, column_family_id, column, value, timestamp=None,
                  state=None):
        """Helper for :meth:`set_cell`

        Adds a mutation to set the value in a specific cell.

        ``state`` is unused by :class:`DirectRow` but is used by
        subclasses.

        :type column_family_id: str
        :param column_family_id: The column family that contains the column.
                                 Must be of the form
                                 ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.

        :type column: bytes
        :param column: The column within the column family where the cell
                       is located.

        :type value: bytes or :class:`int`
        :param value: The value to set in the cell. If an integer is used,
                      will be interpreted as a 64-bit big-endian signed
                      integer (8 bytes).

        :type timestamp: :class:`datetime.datetime`
        :param timestamp: (Optional) The timestamp of the operation.

        :type state: bool
        :param state: (Optional) The state that is passed along to
                      :meth:`_get_mutations`.
        """
        column = _to_bytes(column)
        if isinstance(value, six.integer_types):
            value = _PACK_I64(value)
        value = _to_bytes(value)
        if timestamp is None:
            # Use -1 for current Bigtable server time.
            timestamp_micros = -1
        else:
            timestamp_micros = _microseconds_from_datetime(timestamp)
            # Truncate to millisecond granularity.
            timestamp_micros -= (timestamp_micros % 1000)

        mutation_val = data_v2_pb2.Mutation.SetCell(
            family_name=column_family_id,
            column_qualifier=column,
            timestamp_micros=timestamp_micros,
            value=value,
        )
        mutation_pb = data_v2_pb2.Mutation(set_cell=mutation_val)
        self._get_mutations(state).append(mutation_pb)
    def to_dict(self):
        """Convert the cells to a dictionary.

        This is intended to be used with HappyBase, so the column family and
        column qualiers are combined (with ``:``).

        :rtype: dict
        :returns: Dictionary containing all the data in the cells of this row.
        """
        result = {}
        for column_family_id, columns in six.iteritems(self._cells):
            for column_qual, cells in six.iteritems(columns):
                key = _to_bytes(column_family_id) + b":" + _to_bytes(column_qual)
                result[key] = cells
        return result
Beispiel #8
0
    def increment_cell_value(self, column_family_id, column, int_value):
        """Increments a value in an existing cell.

        Assumes the value in the cell is stored as a 64 bit integer
        serialized to bytes.

        .. note::

            This method adds a read-modify rule protobuf to the accumulated
            read-modify rules on this row, but does not make an API
            request. To actually send an API request (with the rules) to the
            Google Cloud Bigtable API, call :meth:`commit`.

        :type column_family_id: str
        :param column_family_id: The column family that contains the column.
                                 Must be of the form
                                 ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.

        :type column: bytes
        :param column: The column within the column family where the cell
                       is located.

        :type int_value: int
        :param int_value: The value to increment the existing value in the cell
                          by. If the targeted cell is unset, it will be treated
                          as containing a zero. Otherwise, the targeted cell
                          must contain an 8-byte value (interpreted as a 64-bit
                          big-endian signed integer), or the entire request
                          will fail.
        """
        column = _to_bytes(column)
        rule_pb = data_v2_pb2.ReadModifyWriteRule(
            family_name=column_family_id, column_qualifier=column, increment_amount=int_value
        )
        self._rule_pb_list.append(rule_pb)
Beispiel #9
0
    def test_sync_recognize_content_with_optional_params_no_gax(self):
        from base64 import b64encode
        from google.cloud._helpers import _to_bytes
        from google.cloud._helpers import _bytes_to_unicode

        from google.cloud._testing import _Monkey
        from google.cloud.speech import client as MUT
        from google.cloud import speech
        from google.cloud.speech.sample import Sample
        from google.cloud.speech.transcript import Transcript
        from unit_tests._fixtures import SYNC_RECOGNIZE_RESPONSE

        _AUDIO_CONTENT = _to_bytes(self.AUDIO_CONTENT)
        _B64_AUDIO_CONTENT = _bytes_to_unicode(b64encode(_AUDIO_CONTENT))
        RETURNED = SYNC_RECOGNIZE_RESPONSE
        REQUEST = {
            'config': {
                'encoding': 'FLAC',
                'maxAlternatives': 2,
                'sampleRate': 16000,
                'speechContext': {
                    'phrases': [
                        'hi',
                    ]
                },
                'languageCode': 'EN',
                'profanityFilter': True,
            },
            'audio': {
                'content': _B64_AUDIO_CONTENT,
            }
        }
        credentials = _Credentials()
        client = self._makeOne(credentials=credentials, use_gax=False)
        client.connection = _Connection(RETURNED)

        encoding = speech.Encoding.FLAC

        sample = Sample(content=self.AUDIO_CONTENT, encoding=encoding,
                        sample_rate=self.SAMPLE_RATE)
        with _Monkey(MUT, _USE_GAX=False):
            response = client.sync_recognize(sample,
                                             language_code='EN',
                                             max_alternatives=2,
                                             profanity_filter=True,
                                             speech_context=self.HINTS)

        self.assertEqual(len(client.connection._requested), 1)
        req = client.connection._requested[0]
        self.assertEqual(len(req), 3)
        self.assertEqual(req['data'], REQUEST)
        self.assertEqual(req['method'], 'POST')
        self.assertEqual(req['path'], 'speech:syncrecognize')

        alternative = SYNC_RECOGNIZE_RESPONSE['results'][0]['alternatives'][0]
        expected = Transcript.from_api_repr(alternative)
        self.assertEqual(len(response), 1)
        self.assertIsInstance(response[0], Transcript)
        self.assertEqual(response[0].transcript, expected.transcript)
        self.assertEqual(response[0].confidence, expected.confidence)
    def from_legacy_urlsafe(cls, urlsafe):
        """Convert urlsafe string to :class:`~google.cloud.datastore.key.Key`.

        This is intended to work with the "legacy" representation of a
        datastore "Key" used within Google App Engine (a so-called
        "Reference"). This assumes that ``urlsafe`` was created within an App
        Engine app via something like ``ndb.Key(...).urlsafe()``.

        :type urlsafe: bytes or unicode
        :param urlsafe: The base64 encoded (ASCII) string corresponding to a
                        datastore "Key" / "Reference".

        :rtype: :class:`~google.cloud.datastore.key.Key`.
        :returns: The key corresponding to ``urlsafe``.
        """
        urlsafe = _to_bytes(urlsafe, encoding="ascii")
        padding = b"=" * (-len(urlsafe) % 4)
        urlsafe += padding
        raw_bytes = base64.urlsafe_b64decode(urlsafe)

        reference = _app_engine_key_pb2.Reference()
        reference.ParseFromString(raw_bytes)

        project = _clean_app(reference.app)
        namespace = _get_empty(reference.name_space, u"")
        _check_database_id(reference.database_id)
        flat_path = _get_flat_path(reference.path)
        return cls(*flat_path, project=project, namespace=namespace)
Beispiel #11
0
    def _create_test_helper(self, initial_split_keys, column_families=()):
        from google.cloud._helpers import _to_bytes
        from unit_tests._testing import _FakeStub

        client = _Client()
        instance = _Instance(self.INSTANCE_NAME, client=client)
        table = self._makeOne(self.TABLE_ID, instance)

        # Create request_pb
        splits_pb = [_CreateTableRequestSplitPB(key=_to_bytes(key)) for key in initial_split_keys or ()]
        table_pb = None
        if column_families:
            table_pb = _TablePB()
            for cf in column_families:
                cf_pb = table_pb.column_families[cf.column_family_id]
                if cf.gc_rule is not None:
                    cf_pb.gc_rule.MergeFrom(cf.gc_rule.to_pb())
        request_pb = _CreateTableRequestPB(
            initial_splits=splits_pb, parent=self.INSTANCE_NAME, table_id=self.TABLE_ID, table=table_pb
        )

        # Create response_pb
        response_pb = _TablePB()

        # Patch the stub used by the API method.
        client._table_stub = stub = _FakeStub(response_pb)

        # Create expected_result.
        expected_result = None  # create() has no return value.

        # Perform the method and check the result.
        result = table.create(initial_split_keys=initial_split_keys, column_families=column_families)
        self.assertEqual(result, expected_result)
        self.assertEqual(stub.method_calls, [("CreateTable", (request_pb,), {})])
    def create(self, initial_split_keys=[], column_families={}):
        """Creates this table.

        .. note::

            A create request returns a
            :class:`._generated.table_pb2.Table` but we don't use
            this response.

        :type initial_split_keys: list
        :param initial_split_keys: (Optional) list of row keys in bytes that
                                   will be used to initially split the table
                                   into several tablets.

        :type column_families: dict
        :param column_failies: (Optional) A map columns to create.  The key is
                               the column_id str and the value is a
                               :class:`GarbageCollectionRule`
        """
        table_client = self._instance._client.table_admin_client
        instance_name = self._instance.name

        families = {id: ColumnFamily(id, self, rule).to_pb()
                    for (id, rule) in column_families.items()}
        table = admin_messages_v2_pb2.Table(column_families=families)

        split = table_admin_messages_v2_pb2.CreateTableRequest.Split
        splits = [split(key=_to_bytes(key)) for key in initial_split_keys]

        table_client.create_table(parent=instance_name, table_id=self.table_id,
                                  table=table, initial_splits=splits)
Beispiel #13
0
def _get_encryption_headers(key, source=False):
    """Builds customer encryption key headers

    :type key: bytes
    :param key: 32 byte key to build request key and hash.

    :type source: bool
    :param source: If true, return headers for the "source" blob; otherwise,
                   return headers for the "destination" blob.

    :rtype: dict
    :returns: dict of HTTP headers being sent in request.
    """
    if key is None:
        return {}

    key = _to_bytes(key)
    key_hash = hashlib.sha256(key).digest()
    key_hash = base64.b64encode(key_hash).rstrip()
    key = base64.b64encode(key).rstrip()

    if source:
        prefix = 'X-Goog-Copy-Source-Encryption-'
    else:
        prefix = 'X-Goog-Encryption-'

    return {
        prefix + 'Algorithm': 'AES256',
        prefix + 'Key': _bytes_to_unicode(key),
        prefix + 'Key-Sha256': _bytes_to_unicode(key_hash),
    }
    def _filter_row_ranges(self):
        """ Helper for :meth:`build_updated_request`"""
        new_row_ranges = []

        for row_range in self.message.rows.row_ranges:
            # if current end_key (open or closed) is set, return its value,
            # if not, set to empty string ('').
            # NOTE: Empty string in end_key means "end of table"
            end_key = self._end_key_set(row_range)
            # if end_key is already read, skip to the next row_range
            if end_key and self._key_already_read(end_key):
                continue

            # if current start_key (open or closed) is set, return its value,
            # if not, then set to empty string ('')
            # NOTE: Empty string in start_key means "beginning of table"
            start_key = self._start_key_set(row_range)

            # if start_key was already read or doesn't exist,
            # create a row_range with last_scanned_key as start_key_open
            # to be passed to retry request
            retry_row_range = row_range
            if self._key_already_read(start_key):
                retry_row_range = copy.deepcopy(row_range)
                retry_row_range.start_key_closed = _to_bytes("")
                retry_row_range.start_key_open = self.last_scanned_key

            new_row_ranges.append(retry_row_range)

        return new_row_ranges
Beispiel #15
0
def _message_pb_from_mapping(message):
    """Helper for :meth:`_PublisherAPI.topic_publish`.

    Performs "impedance matching" between the protobuf attrs and the keys
    expected in the JSON API.
    """
    return PubsubMessage(data=_to_bytes(message['data']),
                         attributes=message['attributes'])
    def get_range_kwargs(self):
        """ Convert row range object to dict which can be passed to
        google.bigtable.v2.RowRange add method.
        """
        range_kwargs = {}
        if self.start_key is not None:
            start_key_key = "start_key_open"
            if self.start_inclusive:
                start_key_key = "start_key_closed"
            range_kwargs[start_key_key] = _to_bytes(self.start_key)

        if self.end_key is not None:
            end_key_key = "end_key_open"
            if self.end_inclusive:
                end_key_key = "end_key_closed"
            range_kwargs[end_key_key] = _to_bytes(self.end_key)
        return range_kwargs
Beispiel #17
0
    def __init__(self, client, content=None, source_uri=None):
        self.client = client
        self._content = None
        self._source = None

        if source_uri:
            self._source = source_uri
        else:
            self._content = _bytes_to_unicode(b64encode(_to_bytes(content)))
Beispiel #18
0
    def __init__(self, image_source, client):
        self.client = client
        self._content = None
        self._source = None

        if _bytes_to_unicode(image_source).startswith('gs://'):
            self._source = image_source
        else:
            self._content = b64encode(_to_bytes(image_source))
Beispiel #19
0
    def _delete_cells(self, column_family_id, columns, time_range=None,
                      state=None):
        """Helper for :meth:`delete_cell` and :meth:`delete_cells`.

        ``state`` is unused by :class:`DirectRow` but is used by
        subclasses.

        :type column_family_id: str
        :param column_family_id: The column family that contains the column
                                 or columns with cells being deleted. Must be
                                 of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.

        :type columns: :class:`list` of :class:`str` /
                       :func:`unicode <unicode>`, or :class:`object`
        :param columns: The columns within the column family that will have
                        cells deleted. If :attr:`ALL_COLUMNS` is used then
                        the entire column family will be deleted from the row.

        :type time_range: :class:`TimestampRange`
        :param time_range: (Optional) The range of time within which cells
                           should be deleted.

        :type state: bool
        :param state: (Optional) The state that is passed along to
                      :meth:`_get_mutations`.
        """
        mutations_list = self._get_mutations(state)
        if columns is self.ALL_COLUMNS:
            mutation_val = data_v2_pb2.Mutation.DeleteFromFamily(
                family_name=column_family_id,
            )
            mutation_pb = data_v2_pb2.Mutation(delete_from_family=mutation_val)
            mutations_list.append(mutation_pb)
        else:
            delete_kwargs = {}
            if time_range is not None:
                delete_kwargs['time_range'] = time_range.to_pb()

            to_append = []
            for column in columns:
                column = _to_bytes(column)
                # time_range will never change if present, but the rest of
                # delete_kwargs will
                delete_kwargs.update(
                    family_name=column_family_id,
                    column_qualifier=column,
                )
                mutation_val = data_v2_pb2.Mutation.DeleteFromColumn(
                    **delete_kwargs)
                mutation_pb = data_v2_pb2.Mutation(
                    delete_from_column=mutation_val)
                to_append.append(mutation_pb)

            # We don't add the mutations until all columns have been
            # processed without error.
            mutations_list.extend(to_append)
    def _update_message_request(self, message):
        """Add row keys and row range to given request message

        :type message: class:`data_messages_v2_pb2.ReadRowsRequest`
        :param message: The ``ReadRowsRequest`` protobuf
        """
        for each in self.row_keys:
            message.rows.row_keys.append(_to_bytes(each))

        for each in self.row_ranges:
            r_kwrags = each.get_range_kwargs()
            message.rows.row_ranges.add(**r_kwrags)
Beispiel #21
0
    def append_cell_value(self, column_family_id, column, value):
        """Appends a value to an existing cell.

        .. note::

            This method adds a read-modify rule protobuf to the accumulated
            read-modify rules on this row, but does not make an API
            request. To actually send an API request (with the rules) to the
            Google Cloud Bigtable API, call :meth:`commit`.

        For example:

        .. literalinclude:: snippets_table.py
            :start-after: [START bigtable_row_append_cell_value]
            :end-before: [END bigtable_row_append_cell_value]
            :dedent: 4

        :type column_family_id: str
        :param column_family_id: The column family that contains the column.
                                 Must be of the form
                                 ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.

        :type column: bytes
        :param column: The column within the column family where the cell
                       is located.

        :type value: bytes
        :param value: The value to append to the existing value in the cell. If
                      the targeted cell is unset, it will be treated as
                      containing the empty string.
        """
        column = _to_bytes(column)
        value = _to_bytes(value)
        rule_pb = data_v2_pb2.ReadModifyWriteRule(
            family_name=column_family_id, column_qualifier=column, append_value=value
        )
        self._rule_pb_list.append(rule_pb)
Beispiel #22
0
class TestClient(unittest.TestCase):
    PROJECT = 'PROJECT'
    IMAGE_SOURCE = 'gs://some/image.jpg'
    IMAGE_CONTENT = _to_bytes('/9j/4QNURXhpZgAASUkq')
    B64_IMAGE_CONTENT = base64.b64encode(IMAGE_CONTENT)

    def _getTargetClass(self):
        from google.cloud.vision.client import Client
        return Client

    def _makeOne(self, *args, **kw):
        return self._getTargetClass()(*args, **kw)

    def test_ctor(self):
        creds = _Credentials()
        client = self._makeOne(project=self.PROJECT, credentials=creds)
        self.assertEqual(client.project, self.PROJECT)
        self.assertTrue('annotate' in dir(client))

    def test_face_annotation(self):
        from google.cloud.vision._fixtures import FACE_DETECTION_RESPONSE

        RETURNED = FACE_DETECTION_RESPONSE
        REQUEST = {
            "requests": [{
                "image": {
                    "content": self.B64_IMAGE_CONTENT
                },
                "features": [{
                    "maxResults": 3,
                    "type": "FACE_DETECTION"
                }]
            }]
        }
        credentials = _Credentials()
        client = self._makeOne(project=self.PROJECT, credentials=credentials)
        client.connection = _Connection(RETURNED)

        from google.cloud.vision.feature import Feature, FeatureTypes

        features = [
            Feature(feature_type=FeatureTypes.FACE_DETECTION, max_results=3)
        ]

        response = client.annotate(self.IMAGE_CONTENT, features)

        self.assertEqual(REQUEST, client.connection._requested[0]['data'])

        self.assertTrue('faceAnnotations' in response)
def _next_char(str_val, index):
    """Gets the next character based on a position in a string.

    :type str_val: str
    :param str_val: A string containing the character to update.

    :type index: int
    :param index: An integer index in ``str_val``.

    :rtype: str
    :returns: The next character after the character at ``index``
              in ``str_val``.
    """
    ord_val = six.indexbytes(str_val, index)
    return _to_bytes(chr(ord_val + 1), encoding='latin-1')
Beispiel #24
0
class TestVisionImage(unittest.TestCase):
    _IMAGE_SOURCE = 'gs://some/image.jpg'
    _IMAGE_CONTENT = _to_bytes('/9j/4QNURXhpZgAASUkq')
    _B64_IMAGE_CONTENT = base64.b64encode(_IMAGE_CONTENT)
    _CLIENT_MOCK = {'source': ''}

    def _getTargetClass(self):
        from google.cloud.vision.image import Image
        return Image

    def _makeOne(self, *args, **kw):
        return self._getTargetClass()(*args, **kw)

    def test_image_source_type_content(self):
        image = self._makeOne(self._IMAGE_CONTENT, self._CLIENT_MOCK)

        _AS_DICT = {
            'content': self._B64_IMAGE_CONTENT
        }

        self.assertEqual(self._B64_IMAGE_CONTENT, image.content)
        self.assertEqual(None, image.source)
        self.assertEqual(_AS_DICT, image.as_dict())

    def test_image_source_type_google_cloud_storage(self):
        image = self._makeOne(self._IMAGE_SOURCE, self._CLIENT_MOCK)

        _AS_DICT = {
            'source': {
                'gcs_image_uri': self._IMAGE_SOURCE
            }
        }

        self.assertEqual(self._IMAGE_SOURCE, image.source)
        self.assertEqual(None, image.content)
        self.assertEqual(_AS_DICT, image.as_dict())

    def test_cannot_set_both_source_and_content(self):
        image = self._makeOne(self._IMAGE_CONTENT, self._CLIENT_MOCK)

        self.assertEqual(self._B64_IMAGE_CONTENT, image.content)
        with self.assertRaises(AttributeError):
            image.source = self._IMAGE_SOURCE

        image = self._makeOne(self._IMAGE_SOURCE, self._CLIENT_MOCK)
        self.assertEqual(self._IMAGE_SOURCE, image.source)
        with self.assertRaises(AttributeError):
            image.content = self._IMAGE_CONTENT
Beispiel #25
0
def _generate_faux_mime_message(parser, response):
    """Convert response, content -> (multipart) email.message.

    Helper for _unpack_batch_response.
    """
    # We coerce to bytes to get consistent concat across
    # Py2 and Py3. Percent formatting is insufficient since
    # it includes the b in Py3.
    content_type = _helpers._to_bytes(response.headers.get("content-type", ""))

    faux_message = b"".join([
        b"Content-Type: ", content_type, b"\nMIME-Version: 1.0\n\n",
        response.content
    ])

    return parser.parsestr(faux_message.decode("utf-8"))
    def test__update_message_request(self):
        row_set = self._make_one()
        table_name = 'table_name'
        row_set.add_row_key("row_key1")
        row_range1 = RowRange(b"row_key21", b"row_key29")
        row_set.add_row_range(row_range1)

        request = _ReadRowsRequestPB(table_name=table_name)
        row_set._update_message_request(request)

        expected_request = _ReadRowsRequestPB(table_name=table_name)
        expected_request.rows.row_keys.append(_to_bytes("row_key1"))

        expected_request.rows.row_ranges.add(**row_range1.get_range_kwargs())

        self.assertEqual(request, expected_request)
Beispiel #27
0
def _set_encryption_headers(key, headers):
    """Builds customer encryption key headers

    :type key: str or bytes
    :param key: 32 byte key to build request key and hash.

    :type headers: dict
    :param headers: dict of HTTP headers being sent in request.
    """
    key = _to_bytes(key)
    sha256_key = hashlib.sha256(key).digest()
    key_hash = base64.b64encode(sha256_key).rstrip()
    encoded_key = base64.b64encode(key).rstrip()
    headers['X-Goog-Encryption-Algorithm'] = 'AES256'
    headers['X-Goog-Encryption-Key'] = _bytes_to_unicode(encoded_key)
    headers['X-Goog-Encryption-Key-Sha256'] = _bytes_to_unicode(key_hash)
Beispiel #28
0
    def test__update_message_request(self):
        row_set = self._make_one()
        table_name = 'table_name'
        row_set.add_row_key("row_key1")
        row_range1 = RowRange(b"row_key21", b"row_key29")
        row_set.add_row_range(row_range1)

        request = _ReadRowsRequestPB(table_name=table_name)
        row_set._update_message_request(request)

        expected_request = _ReadRowsRequestPB(table_name=table_name)
        expected_request.rows.row_keys.append(_to_bytes("row_key1"))

        expected_request.rows.row_ranges.add(**row_range1.get_range_kwargs())

        self.assertEqual(request, expected_request)
Beispiel #29
0
def _set_encryption_headers(key, headers):
    """Builds customer encryption key headers

    :type key: str or bytes
    :param key: 32 byte key to build request key and hash.

    :type headers: dict
    :param headers: dict of HTTP headers being sent in request.
    """
    key = _to_bytes(key)
    sha256_key = hashlib.sha256(key).digest()
    key_hash = base64.b64encode(sha256_key).rstrip()
    encoded_key = base64.b64encode(key).rstrip()
    headers['X-Goog-Encryption-Algorithm'] = 'AES256'
    headers['X-Goog-Encryption-Key'] = _bytes_to_unicode(encoded_key)
    headers['X-Goog-Encryption-Key-Sha256'] = _bytes_to_unicode(key_hash)
Beispiel #30
0
def _to_gapic_image(image):
    """Helper function to convert an ``Image`` to a gRPC ``Image``.

    :type image: :class:`~google.cloud.vision.image.Image`
    :param image: Local ``Image`` class to be converted to gRPC ``Image``.

    :rtype: :class:`~google.cloud.grpc.vision.v1.image_annotator_pb2.Image`
    :returns: gRPC ``Image`` converted from
              :class:`~google.cloud.vision.image.Image`.
    """
    if image.content is not None:
        return image_annotator_pb2.Image(content=_to_bytes(image.content))
    if image.source is not None:
        return image_annotator_pb2.Image(
            source=image_annotator_pb2.ImageSource(
                gcs_image_uri=image.source), )
    raise ValueError('No image content or source found.')
    def _create_test_helper(self, initial_split_keys, column_families=()):
        from google.cloud._helpers import _to_bytes
        from tests.unit._testing import _FakeStub

        client = _Client()
        instance = _Instance(self.INSTANCE_NAME, client=client)
        table = self._make_one(self.TABLE_ID, instance)

        # Create request_pb
        splits_pb = [
            _CreateTableRequestSplitPB(key=_to_bytes(key))
            for key in initial_split_keys or ()
        ]
        table_pb = None
        if column_families:
            table_pb = _TablePB()
            for cf in column_families:
                cf_pb = table_pb.column_families[cf.column_family_id]
                if cf.gc_rule is not None:
                    cf_pb.gc_rule.MergeFrom(cf.gc_rule.to_pb())
        request_pb = _CreateTableRequestPB(
            initial_splits=splits_pb,
            parent=self.INSTANCE_NAME,
            table_id=self.TABLE_ID,
            table=table_pb,
        )

        # Create response_pb
        response_pb = _TablePB()

        # Patch the stub used by the API method.
        client._table_stub = stub = _FakeStub(response_pb)

        # Create expected_result.
        expected_result = None  # create() has no return value.

        # Perform the method and check the result.
        result = table.create(initial_split_keys=initial_split_keys,
                              column_families=column_families)
        self.assertEqual(result, expected_result)
        self.assertEqual(stub.method_calls, [(
            'CreateTable',
            (request_pb, ),
            {},
        )])
def _generate_faux_mime_message(parser, response):
    """Convert response, content -> (multipart) email.message.

    Helper for _unpack_batch_response.
    """
    # We coerce to bytes to get consistent concat across
    # Py2 and Py3. Percent formatting is insufficient since
    # it includes the b in Py3.
    content_type = _helpers._to_bytes(response.headers.get("content-type", ""))

    faux_message = b"".join(
        [b"Content-Type: ", content_type, b"\nMIME-Version: 1.0\n\n", response.content]
    )

    if six.PY2:
        return parser.parsestr(faux_message)
    else:  # pragma: NO COVER  Python3
        return parser.parsestr(faux_message.decode("utf-8"))
Beispiel #33
0
    def increment_cell_value(self, column_family_id, column, int_value):
        """Increments a value in an existing cell.

        Assumes the value in the cell is stored as a 64 bit integer
        serialized to bytes.

        .. note::

            This method adds a read-modify rule protobuf to the accumulated
            read-modify rules on this row, but does not make an API
            request. To actually send an API request (with the rules) to the
            Google Cloud Bigtable API, call :meth:`commit`.

        For example:

        .. literalinclude:: snippets_table.py
            :start-after: [START bigtable_api_row_increment_cell_value]
            :end-before: [END bigtable_api_row_increment_cell_value]
            :dedent: 4

        :type column_family_id: str
        :param column_family_id: The column family that contains the column.
                                 Must be of the form
                                 ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.

        :type column: bytes
        :param column: The column within the column family where the cell
                       is located.

        :type int_value: int
        :param int_value: The value to increment the existing value in the cell
                          by. If the targeted cell is unset, it will be treated
                          as containing a zero. Otherwise, the targeted cell
                          must contain an 8-byte value (interpreted as a 64-bit
                          big-endian signed integer), or the entire request
                          will fail.
        """
        column = _to_bytes(column)
        rule_pb = data_v2_pb2.ReadModifyWriteRule(
            family_name=column_family_id,
            column_qualifier=column,
            increment_amount=int_value,
        )
        self._rule_pb_list.append(rule_pb)
Beispiel #34
0
    def __init__(self, client, content=None, filename=None, source_uri=None):
        sources = [source for source in (content, filename, source_uri)
                   if source is not None]
        if len(sources) != 1:
            raise ValueError(
                'Specify exactly one of "content", "filename", or '
                '"source_uri".')

        self.client = client

        if filename is not None:
            with open(filename, 'rb') as file_obj:
                content = file_obj.read()

        if content is not None:
            content = _bytes_to_unicode(b64encode(_to_bytes(content)))

        self._content = content
        self._source = source_uri
Beispiel #35
0
    def __init__(self, client, content=None, filename=None, source_uri=None):
        sources = [source for source in (content, filename, source_uri)
                   if source is not None]
        if len(sources) != 1:
            raise ValueError(
                'Specify exactly one of "content", "filename", or '
                '"source_uri".')

        self.client = client

        if filename is not None:
            with open(filename, 'rb') as file_obj:
                content = file_obj.read()

        if content is not None:
            content = _bytes_to_unicode(b64encode(_to_bytes(content)))

        self._content = content
        self._source = source_uri
Beispiel #36
0
def _to_gapic_image(image):
    """Helper function to convert an ``Image`` to a gRPC ``Image``.

    :type image: :class:`~google.cloud.vision.image.Image`
    :param image: Local ``Image`` class to be converted to gRPC ``Image``.

    :rtype: :class:`~google.cloud.grpc.vision.v1.image_annotator_pb2.Image`
    :returns: gRPC ``Image`` converted from
              :class:`~google.cloud.vision.image.Image`.
    """
    if image.content is not None:
        return image_annotator_pb2.Image(content=_to_bytes(image.content))
    if image.source is not None:
        return image_annotator_pb2.Image(
            source=image_annotator_pb2.ImageSource(
                gcs_image_uri=image.source
            ),
        )
    raise ValueError('No image content or source found.')
Beispiel #37
0
    def create(self, initial_split_keys=[], column_families={}):
        """Creates this table.

        For example:

        .. literalinclude:: snippets_table.py
            :start-after: [START bigtable_create_table]
            :end-before: [END bigtable_create_table]

        .. note::

            A create request returns a
            :class:`._generated.table_pb2.Table` but we don't use
            this response.

        :type initial_split_keys: list
        :param initial_split_keys: (Optional) list of row keys in bytes that
                                   will be used to initially split the table
                                   into several tablets.

        :type column_families: dict
        :param column_failies: (Optional) A map columns to create.  The key is
                               the column_id str and the value is a
                               :class:`GarbageCollectionRule`
        """
        table_client = self._instance._client.table_admin_client
        instance_name = self._instance.name

        families = {
            id: ColumnFamily(id, self, rule).to_pb()
            for (id, rule) in column_families.items()
        }
        table = admin_messages_v2_pb2.Table(column_families=families)

        split = table_admin_messages_v2_pb2.CreateTableRequest.Split
        splits = [split(key=_to_bytes(key)) for key in initial_split_keys]

        table_client.create_table(
            parent=instance_name,
            table_id=self.table_id,
            table=table,
            initial_splits=splits,
        )
def _sign_message(message, access_token, service_account_email):

    """Signs a message.

    :type message: str
    :param message: The message to be signed.

    :type access_token: str
    :param access_token: Access token for a service account.


    :type service_account_email: str
    :param service_account_email: E-mail address of the service account.

    :raises: :exc:`TransportError` if an `access_token` is unauthorized.

    :rtype: str
    :returns: The signature of the message.

    """
    message = _helpers._to_bytes(message)

    method = "POST"
    url = "https://iam.googleapis.com/v1/projects/-/serviceAccounts/{}:signBlob?alt=json".format(
        service_account_email
    )
    headers = {
        "Authorization": "Bearer " + access_token,
        "Content-type": "application/json",
    }
    body = json.dumps({"bytesToSign": base64.b64encode(message).decode("utf-8")})

    request = requests.Request()
    response = request(url=url, method=method, body=body, headers=headers)

    if response.status != six.moves.http_client.OK:
        raise exceptions.TransportError(
            "Error calling the IAM signBytes API: {}".format(response.data)
        )

    data = json.loads(response.data.decode("utf-8"))
    return data["signature"]
    def test_hit_w_content_as_unicode(self):
        import six
        from google.cloud._helpers import _to_bytes
        from google.cloud.exceptions import NotFound
        error_message = u'That\u2019s not found.'
        expected = u'404 %s' % (error_message,)

        response = _Response(404)
        content = u'{"error": {"message": "%s" }}' % (error_message,)

        exception = self._call_fut(response, content)
        if six.PY2:
            self.assertEqual(str(exception),
                             _to_bytes(expected, encoding='utf-8'))
        else:  # pragma: NO COVER
            self.assertEqual(str(exception), expected)

        self.assertIsInstance(exception, NotFound)
        self.assertEqual(exception.message, error_message)
        self.assertEqual(list(exception.errors), [])
Beispiel #40
0
def test_row_set__update_message_request():
    from google.cloud._helpers import _to_bytes
    from google.cloud.bigtable.row_set import RowRange
    from google.cloud.bigtable.row_set import RowSet

    row_set = RowSet()
    table_name = "table_name"
    row_set.add_row_key("row_key1")
    row_range1 = RowRange(b"row_key21", b"row_key29")
    row_set.add_row_range(row_range1)

    request = _ReadRowsRequestPB(table_name=table_name)
    row_set._update_message_request(request)

    expected_request = _ReadRowsRequestPB(table_name=table_name)
    expected_request.rows.row_keys.append(_to_bytes("row_key1"))

    expected_request.rows.row_ranges.append(row_range1.get_range_kwargs())

    assert request == expected_request
Beispiel #41
0
def _generate_faux_mime_message(parser, response):
    """Convert response, content -> (multipart) email.message.

    Helper for _unpack_batch_response.
    """
    # We coerce to bytes to get consistent concat across
    # Py2 and Py3. Percent formatting is insufficient since
    # it includes the b in Py3.
    content_type = _helpers._to_bytes(response.headers.get('content-type', ''))

    faux_message = b''.join([
        b'Content-Type: ',
        content_type,
        b'\nMIME-Version: 1.0\n\n',
        response.content,
    ])

    if six.PY2:
        return parser.parsestr(faux_message)
    else:  # pragma: NO COVER  Python3
        return parser.parsestr(faux_message.decode('utf-8'))
Beispiel #42
0
    def publish(self, message, **attrs):
        """Emulate publishing a message, but save it.

        :type message: bytes
        :param message: the message payload

        :type attrs: dict (string -> string)
        :param attrs: key-value pairs to send as message attributes
        """
        self.topic._timestamp_message(attrs)

        # Append the message to the list of messages..
        item = {'attributes': attrs, 'data': message}
        self.messages.append(item)

        # Determine the approximate size of the message, and increment
        # the current batch size appropriately.
        encoded = base64.b64encode(_to_bytes(message))
        encoded += base64.b64encode(
            json.dumps(attrs, ensure_ascii=False).encode('utf8'),
        )
        self._current_size += len(encoded)

        # If too much time has elapsed since the first message
        # was added, autocommit.
        now = time.time()
        if now - self._start_timestamp > self._max_interval:
            self.commit()
            return

        # If the number of messages on the list is greater than the
        # maximum allowed, autocommit (with the batch's client).
        if len(self.messages) >= self._max_messages:
            self.commit()
            return

        # If we have reached the max size, autocommit.
        if self._current_size >= self._max_size:
            self.commit()
            return
Beispiel #43
0
    def _configure_multipart_request(self, http_request):
        """Helper for 'configure_request': set up multipart request."""
        # This is a multipart/related upload.
        msg_root = mime_multipart.MIMEMultipart('related')
        # msg_root should not write out its own headers
        setattr(msg_root, '_write_headers', lambda self: None)

        # attach the body as one part
        msg = mime_nonmultipart.MIMENonMultipart(
            *http_request.headers['content-type'].split('/'))
        msg.set_payload(http_request.body)
        msg_root.attach(msg)

        # attach the media as the second part
        msg = mime_nonmultipart.MIMENonMultipart(*self.mime_type.split('/'))
        msg['Content-Transfer-Encoding'] = 'binary'
        msg.set_payload(self.stream.read())
        msg_root.attach(msg)

        # NOTE: generate multipart message as bytes, not text
        stream = six.BytesIO()
        if six.PY3:  # pragma: NO COVER  Python3
            generator_class = email_generator.BytesGenerator
        else:
            generator_class = email_generator.Generator
        generator = generator_class(stream, mangle_from_=False)
        generator.flatten(msg_root, unixfrom=False)
        http_request.body = stream.getvalue()

        multipart_boundary = msg_root.get_boundary()
        http_request.headers['content-type'] = (
            'multipart/related; boundary="%s"' % multipart_boundary)

        boundary_bytes = _to_bytes(multipart_boundary)
        body_components = http_request.body.split(boundary_bytes)
        headers, _, _ = body_components[-2].partition(b'\n\n')
        body_components[-2] = b'\n\n'.join([headers, b'<media body>\n\n--'])
        http_request.loggable_body = boundary_bytes.join(body_components)
Beispiel #44
0
    def _configure_multipart_request(self, http_request):
        """Helper for 'configure_request': set up multipart request."""
        # This is a multipart/related upload.
        msg_root = mime_multipart.MIMEMultipart('related')
        # msg_root should not write out its own headers
        setattr(msg_root, '_write_headers', lambda self: None)

        # attach the body as one part
        msg = mime_nonmultipart.MIMENonMultipart(
            *http_request.headers['content-type'].split('/'))
        msg.set_payload(http_request.body)
        msg_root.attach(msg)

        # attach the media as the second part
        msg = mime_nonmultipart.MIMENonMultipart(*self.mime_type.split('/'))
        msg['Content-Transfer-Encoding'] = 'binary'
        msg.set_payload(self.stream.read())
        msg_root.attach(msg)

        # NOTE: generate multipart message as bytes, not text
        stream = six.BytesIO()
        if six.PY3:  # pragma: NO COVER  Python3
            generator_class = email_generator.BytesGenerator
        else:
            generator_class = email_generator.Generator
        generator = generator_class(stream, mangle_from_=False)
        generator.flatten(msg_root, unixfrom=False)
        http_request.body = stream.getvalue()

        multipart_boundary = msg_root.get_boundary()
        http_request.headers['content-type'] = (
            'multipart/related; boundary="%s"' % multipart_boundary)

        boundary_bytes = _to_bytes(multipart_boundary)
        body_components = http_request.body.split(boundary_bytes)
        headers, _, _ = body_components[-2].partition(b'\n\n')
        body_components[-2] = b'\n\n'.join([headers, b'<media body>\n\n--'])
        http_request.loggable_body = boundary_bytes.join(body_components)
Beispiel #45
0
    def _filter_row_ranges(self):
        """ Helper for :meth:`build_updated_request`"""
        new_row_ranges = []

        for row_range in self.message.rows.row_ranges:
            if ((row_range.end_key_open
                 and self._key_already_read(row_range.end_key_open))
                    or (row_range.end_key_closed
                        and self._key_already_read(row_range.end_key_closed))):
                continue

            if ((row_range.start_key_open
                 and self._key_already_read(row_range.start_key_open)) or
                (row_range.start_key_closed
                 and self._key_already_read(row_range.start_key_closed))):
                row_range.start_key_closed = _to_bytes("")
                row_range.start_key_open = self.last_scanned_key

                new_row_ranges.append(row_range)
            else:
                new_row_ranges.append(row_range)

        return new_row_ranges
    def _filter_row_ranges(self):
        """ Helper for :meth:`build_updated_request`"""
        new_row_ranges = []

        for row_range in self.message.rows.row_ranges:
            if((row_range.end_key_open and
                self._key_already_read(row_range.end_key_open)) or
                (row_range.end_key_closed and
                 self._key_already_read(row_range.end_key_closed))):
                    continue

            if ((row_range.start_key_open and
                self._key_already_read(row_range.start_key_open)) or
                (row_range.start_key_closed and
                 self._key_already_read(row_range.start_key_closed))):
                row_range.start_key_closed = _to_bytes("")
                row_range.start_key_open = self.last_scanned_key

                new_row_ranges.append(row_range)
            else:
                new_row_ranges.append(row_range)

        return new_row_ranges
Beispiel #47
0
 def __init__(self, row_key, table):
     self._row_key = _to_bytes(row_key)
     self._table = table
Beispiel #48
0
def _build_request_data(sample,
                        language_code=None,
                        max_alternatives=None,
                        profanity_filter=None,
                        speech_context=None):
    """Builds the request data before making API request.

    :type sample: :class:`~google.cloud.speech.sample.Sample`
    :param sample: Instance of ``Sample`` containing audio information.

    :type language_code: str
    :param language_code: (Optional) The language of the supplied audio as
                          BCP-47 language tag. Example: ``'en-GB'``.
                          If omitted, defaults to ``'en-US'``.

    :type max_alternatives: int
    :param max_alternatives: (Optional) Maximum number of recognition
                             hypotheses to be returned. The server may
                             return fewer than maxAlternatives.
                             Valid values are 0-30. A value of 0 or 1
                             will return a maximum of 1. Defaults to 1

    :type profanity_filter: bool
    :param profanity_filter: If True, the server will attempt to filter
                             out profanities, replacing all but the
                             initial character in each filtered word with
                             asterisks, e.g. ``'f***'``. If False or
                             omitted, profanities won't be filtered out.

    :type speech_context: list
    :param speech_context: A list of strings (max 50) containing words and
                           phrases "hints" so that the speech recognition
                           is more likely to recognize them. This can be
                           used to improve the accuracy for specific words
                           and phrases. This can also be used to add new
                           words to the vocabulary of the recognizer.

    :rtype: dict
    :returns: Dictionary with required data for Google Speech API.
    """
    if sample.content is not None:
        audio = {
            'content': _bytes_to_unicode(b64encode(_to_bytes(sample.content)))
        }
    else:
        audio = {'uri': sample.source_uri}

    config = {'encoding': sample.encoding, 'sampleRate': sample.sample_rate}

    if language_code is not None:
        config['languageCode'] = language_code
    if max_alternatives is not None:
        config['maxAlternatives'] = max_alternatives
    if profanity_filter is not None:
        config['profanityFilter'] = profanity_filter
    if speech_context is not None:
        config['speechContext'] = {'phrases': speech_context}

    data = {
        'audio': audio,
        'config': config,
    }

    return data
 def __str__(self):
     result = u'%d %s' % (self.code, self.message)
     if six.PY2:
         result = _to_bytes(result, 'utf-8')
     return result
Beispiel #50
0
 def __init__(self, regex):
     self.regex = _to_bytes(regex)
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import base64
import unittest

from google.cloud._helpers import _to_bytes
from google.cloud._helpers import _bytes_to_unicode

IMAGE_CONTENT = _to_bytes('/9j/4QNURXhpZgAASUkq')
IMAGE_SOURCE = 'gs://some/image.jpg'
PROJECT = 'PROJECT'
B64_IMAGE_CONTENT = _bytes_to_unicode(base64.b64encode(IMAGE_CONTENT))


class TestClient(unittest.TestCase):
    @staticmethod
    def _get_target_class():
        from google.cloud.vision.client import Client
        return Client

    def _make_one(self, *args, **kw):
        return self._get_target_class()(*args, **kw)

    def test_ctor(self):
Beispiel #52
0
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


import unittest

from google.cloud._helpers import _to_bytes

_IMAGE_CONTENT = _to_bytes('/9j/4QNURXhpZgAASUkq')
_IMAGE_SOURCE = 'gs://some/image.jpg'


class TestClient(unittest.TestCase):
    import base64
    PROJECT = 'PROJECT'
    B64_IMAGE_CONTENT = base64.b64encode(_IMAGE_CONTENT)

    def _getTargetClass(self):
        from google.cloud.vision.client import Client
        return Client

    def _makeOne(self, *args, **kw):
        return self._getTargetClass()(*args, **kw)
    def detect_language(self, values):
        """Detect the language of a string or list of strings.

        See: https://cloud.google.com/translate/v2/\
        detecting-language-with-rest

        :type values: str or list
        :param values: String or list of strings that will have
                       language detected.

        :rtype: str or list
        :returns: A list of dictionaries for each queried value. Each
                  dictionary typically contains three keys

                  * ``confidence``: The confidence in language detection, a
                    float between 0 and 1.
                  * ``input``: The corresponding input value.
                  * ``language``: The detected language (as an ISO 639-1
                    language code).

                  though the key ``confidence`` may not always be present.

                  If only a single value is passed, then only a single
                  dictionary will be returned.
        :raises: :class:`ValueError <exceptions.ValueError>` if the number of
                 detections is not equal to the number of values.
                 :class:`ValueError <exceptions.ValueError>` if a value
                 produces a list of detections with 0 or multiple results
                 in it.
        """
        single_value = False
        if isinstance(values, six.string_types):
            single_value = True
            values = [values]

        query_params = [('key', self.api_key)]
        query_params.extend(
            ('q', _to_bytes(value, 'utf-8')) for value in values)
        response = self._connection.api_request(method='GET',
                                                path='/detect',
                                                query_params=query_params)
        detections = response.get('data', {}).get('detections', ())

        if len(values) != len(detections):
            raise ValueError('Expected same number of values and detections',
                             values, detections)

        for index, value in enumerate(values):
            # Empirically, even clearly ambiguous text like "no" only returns
            # a single detection, so we replace the list of detections with
            # the single detection contained.
            if len(detections[index]) == 1:
                detections[index] = detections[index][0]
            else:
                message = ('Expected a single detection per value, API '
                           'returned %d') % (len(detections[index]), )
                raise ValueError(message, value, detections[index])

            detections[index]['input'] = value
            # The ``isReliable`` field is deprecated.
            detections[index].pop('isReliable', None)

        if single_value:
            return detections[0]
        else:
            return detections
    def translate(self,
                  values,
                  target_language=None,
                  format_=None,
                  source_language=None,
                  customization_ids=()):
        """Translate a string or list of strings.

        See: https://cloud.google.com/translate/v2/\
        translating-text-with-rest

        :type values: str or list
        :param values: String or list of strings to translate.

        :type target_language: str
        :param target_language: The language to translate results into. This
                                is required by the API and defaults to
                                the target language of the current instance.

        :type format_: str
        :param format_: (Optional) One of ``text`` or ``html``, to specify
                        if the input text is plain text or HTML.

        :type source_language: str
        :param source_language: (Optional) The language of the text to
                                be translated.

        :type customization_ids: str or list
        :param customization_ids: (Optional) ID or list of customization IDs
                                  for translation. Sets the ``cid`` parameter
                                  in the query.

        :rtype: str or list list
        :returns: A list of dictionaries for each queried value. Each
                  dictionary typically contains three keys (though not
                  all will be present in all cases)

                  * ``detectedSourceLanguage``: The detected language (as an
                    ISO 639-1 language code) of the text.
                  * ``translatedText``: The translation of the text into the
                    target language.
                  * ``input``: The corresponding input value.

                  If only a single value is passed, then only a single
                  dictionary will be returned.
        :raises: :class:`ValueError <exceptions.ValueError>` if the number of
                 values and translations differ.
        """
        single_value = False
        if isinstance(values, six.string_types):
            single_value = True
            values = [values]

        if target_language is None:
            target_language = self.target_language
        if isinstance(customization_ids, six.string_types):
            customization_ids = [customization_ids]

        query_params = [('key', self.api_key), ('target', target_language)]
        query_params.extend(
            ('q', _to_bytes(value, 'utf-8')) for value in values)
        query_params.extend(('cid', cid) for cid in customization_ids)
        if format_ is not None:
            query_params.append(('format', format_))
        if source_language is not None:
            query_params.append(('source', source_language))

        response = self._connection.api_request(method='GET',
                                                path='',
                                                query_params=query_params)

        translations = response.get('data', {}).get('translations', ())
        if len(values) != len(translations):
            raise ValueError('Expected iterations to have same length', values,
                             translations)
        for value, translation in six.moves.zip(values, translations):
            translation['input'] = value

        if single_value:
            return translations[0]
        else:
            return translations
Beispiel #55
0
    def _call_fut(self, *args, **kwargs):
        from google.cloud._helpers import _to_bytes

        return _to_bytes(*args, **kwargs)
Beispiel #56
0
def _create_row_request(table_name, row_key=None, start_key=None, end_key=None,
                        filter_=None, limit=None, end_inclusive=False):
    """Creates a request to read rows in a table.

    :type table_name: str
    :param table_name: The name of the table to read from.

    :type row_key: bytes
    :param row_key: (Optional) The key of a specific row to read from.

    :type start_key: bytes
    :param start_key: (Optional) The beginning of a range of row keys to
                      read from. The range will include ``start_key``. If
                      left empty, will be interpreted as the empty string.

    :type end_key: bytes
    :param end_key: (Optional) The end of a range of row keys to read from.
                    The range will not include ``end_key``. If left empty,
                    will be interpreted as an infinite string.

    :type filter_: :class:`.RowFilter`
    :param filter_: (Optional) The filter to apply to the contents of the
                    specified row(s). If unset, reads the entire table.

    :type limit: int
    :param limit: (Optional) The read will terminate after committing to N
                  rows' worth of results. The default (zero) is to return
                  all results.

    :type end_inclusive: bool
    :param end_inclusive: (Optional) Whether the ``end_key`` should be
                  considered inclusive. The default is False (exclusive).

    :rtype: :class:`data_messages_v2_pb2.ReadRowsRequest`
    :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs.
    :raises: :class:`ValueError <exceptions.ValueError>` if both
             ``row_key`` and one of ``start_key`` and ``end_key`` are set
    """
    request_kwargs = {'table_name': table_name}
    if (row_key is not None and
            (start_key is not None or end_key is not None)):
        raise ValueError('Row key and row range cannot be '
                         'set simultaneously')
    range_kwargs = {}
    if start_key is not None or end_key is not None:
        if start_key is not None:
            range_kwargs['start_key_closed'] = _to_bytes(start_key)
        if end_key is not None:
            end_key_key = 'end_key_open'
            if end_inclusive:
                end_key_key = 'end_key_closed'
            range_kwargs[end_key_key] = _to_bytes(end_key)
    if filter_ is not None:
        request_kwargs['filter'] = filter_.to_pb()
    if limit is not None:
        request_kwargs['rows_limit'] = limit

    message = data_messages_v2_pb2.ReadRowsRequest(**request_kwargs)

    if row_key is not None:
        message.rows.row_keys.append(_to_bytes(row_key))

    if range_kwargs:
        message.rows.row_ranges.add(**range_kwargs)

    return message
def _bytes_from_json(value, field):
    """Base64-decode value"""
    if _not_null(value, field):
        return base64.standard_b64decode(_to_bytes(value))
Beispiel #58
0
 def __init__(self, etag=None, version=None):
     BasePolicy.__init__(self,
                         etag=etag if etag is None else _to_bytes(etag),
                         version=version)