Esempio n. 1
0
    def from_string(cls, key, key_id=None):
        """Construct an Signer instance from a private key in PEM format.

        Args:
            key (str): Private key in PEM format.
            key_id (str): An optional key id used to identify the private key.

        Returns:
            google.auth.crypt.Signer: The constructed signer.

        Raises:
            ValueError: If the key cannot be parsed as PKCS#1 or PKCS#8 in
                PEM format.
        """
        key = _helpers.from_bytes(key)  # PEM expects str in Python 3
        marker_id, key_bytes = pem.readPemBlocksFromFile(
            six.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER)

        # Key is in pkcs1 format.
        if marker_id == 0:
            private_key = rsa.key.PrivateKey.load_pkcs1(
                key_bytes, format='DER')
        # Key is in pkcs8.
        elif marker_id == 1:
            key_info, remaining = decoder.decode(
                key_bytes, asn1Spec=_PKCS8_SPEC)
            if remaining != b'':
                raise ValueError('Unused bytes', remaining)
            private_key_info = key_info.getComponentByName('privateKey')
            private_key = rsa.key.PrivateKey.load_pkcs1(
                private_key_info.asOctets(), format='DER')
        else:
            raise ValueError('No key could be detected.')

        return cls(private_key, key_id=key_id)
    def apply(self, headers, token=None):
        """Apply the token to the authentication header.

        Args:
            headers (Mapping): The HTTP request headers.
            token (Optional[str]): If specified, overrides the current access
                token.
        """
        headers['authorization'] = 'Bearer {}'.format(
            _helpers.from_bytes(token or self.token))
Esempio n. 3
0
def get(request, path, root=_METADATA_ROOT, recursive=False):
    """Fetch a resource from the metadata server.

    Args:
        request (google.auth.transport.Request): A callable used to make
            HTTP requests.
        path (str): The resource to retrieve. For example,
            ``'instance/service-accounts/default'``.
        root (str): The full path to the metadata server root.
        recursive (bool): Whether to do a recursive query of metadata. See
            https://cloud.google.com/compute/docs/metadata#aggcontents for more
            details.

    Returns:
        Union[Mapping, str]: If the metadata server returns JSON, a mapping of
            the decoded JSON is return. Otherwise, the response content is
            returned as a string.

    Raises:
        google.auth.exceptions.TransportError: if an error occurred while
            retrieving metadata.
    """
    base_url = urlparse.urljoin(root, path)
    query_params = {}

    if recursive:
        query_params['recursive'] = 'true'

    url = _helpers.update_query(base_url, query_params)

    response = request(url=url, method='GET', headers=_METADATA_HEADERS)

    if response.status == http_client.OK:
        content = _helpers.from_bytes(response.data)
        if response.headers['content-type'] == 'application/json':
            try:
                return json.loads(content)
            except ValueError as caught_exc:
                new_exc = exceptions.TransportError(
                    'Received invalid JSON from the Google Compute Engine'
                    'metadata service: {:.20}'.format(content))
                six.raise_from(new_exc, caught_exc)
        else:
            return content
    else:
        raise exceptions.TransportError(
            'Failed to retrieve {} from the Google Compute Engine'
            'metadata service. Status: {} Response:\n{}'.format(
                url, response.status, response.data), response)
Esempio n. 4
0
def test_from_bytes_with_bytes():
    value = b'string-val'
    decoded_value = u'string-val'
    assert _helpers.from_bytes(value) == decoded_value
Esempio n. 5
0
def test_from_bytes_with_nonstring_type():
    with pytest.raises(ValueError):
        _helpers.from_bytes(object())
Esempio n. 6
0
def test_from_bytes_with_unicode():
    value = u'bytes-val'
    assert _helpers.from_bytes(value) == value
def get(request, path, root=_METADATA_ROOT, recursive=False, retry_count=5):
    """Fetch a resource from the metadata server.

    Args:
        request (google.auth.transport.Request): A callable used to make
            HTTP requests.
        path (str): The resource to retrieve. For example,
            ``'instance/service-accounts/default'``.
        root (str): The full path to the metadata server root.
        recursive (bool): Whether to do a recursive query of metadata. See
            https://cloud.google.com/compute/docs/metadata#aggcontents for more
            details.
        retry_count (int): How many times to attempt connecting to metadata
            server using above timeout.

    Returns:
        Union[Mapping, str]: If the metadata server returns JSON, a mapping of
            the decoded JSON is return. Otherwise, the response content is
            returned as a string.

    Raises:
        google.auth.exceptions.TransportError: if an error occurred while
            retrieving metadata.
    """
    base_url = urlparse.urljoin(root, path)
    query_params = {}

    if recursive:
        query_params["recursive"] = "true"

    url = _helpers.update_query(base_url, query_params)

    retries = 0
    while retries < retry_count:
        try:
            response = request(url=url, method="GET", headers=_METADATA_HEADERS)
            break

        except exceptions.TransportError:
            _LOGGER.info(
                "Compute Engine Metadata server unavailable on" "attempt %s of %s",
                retries + 1,
                retry_count,
            )
            retries += 1
    else:
        raise exceptions.TransportError(
            "Failed to retrieve {} from the Google Compute Engine"
            "metadata service. Compute Engine Metadata server unavailable".format(url)
        )

    if response.status == http_client.OK:
        content = _helpers.from_bytes(response.data)
        if response.headers["content-type"] == "application/json":
            try:
                return json.loads(content)
            except ValueError as caught_exc:
                new_exc = exceptions.TransportError(
                    "Received invalid JSON from the Google Compute Engine"
                    "metadata service: {:.20}".format(content)
                )
                six.raise_from(new_exc, caught_exc)
        else:
            return content
    else:
        raise exceptions.TransportError(
            "Failed to retrieve {} from the Google Compute Engine"
            "metadata service. Status: {} Response:\n{}".format(
                url, response.status, response.data
            ),
            response,
        )
Esempio n. 8
0
def test_from_bytes_with_bytes():
    value = b"string-val"
    decoded_value = u"string-val"
    assert _helpers.from_bytes(value) == decoded_value
Esempio n. 9
0
    async def insert_rows_json(self,
                               table,
                               json_rows,
                               row_ids=None,
                               skip_invalid_rows=None,
                               ignore_unknown_values=None,
                               template_suffix=None,
                               retry=None):
        """__Asynchronous__ insertion of rows into a table via the streaming API

    Credit:
    http://google-cloud-python.readthedocs.io/en/latest/_modules/google/cloud
    /bigquery/client.html#Client.insert_rows

    :type table: One of:
                 :class:`~google.cloud.bigquery.table.Table`
                 :class:`~google.cloud.bigquery.table.TableReference`
    :param table: the destination table for the row data, or a reference
                  to it.

    :type json_rows: list of dictionaries
    :param json_rows: Row data to be inserted. Keys must match the table
                      schema fields and values must be JSON-compatible
                      representations.

    :type row_ids: list of string
    :param row_ids: (Optional)  Unique ids, one per row being inserted.
                    If omitted, unique IDs are created.

    :type skip_invalid_rows: bool
    :param skip_invalid_rows: (Optional)  Insert all valid rows of a
                              request, even if invalid rows exist.
                              The default value is False, which causes
                              the entire request to fail if any invalid
                              rows exist.

    :type ignore_unknown_values: bool
    :param ignore_unknown_values: (Optional) Accept rows that contain
                                  values that do not match the schema.
                                  The unknown values are ignored. Default
                                  is False, which treats unknown values as
                                  errors.

    :type template_suffix: str
    :param template_suffix:
        (Optional) treat ``name`` as a template table and provide a suffix.
        BigQuery will create the table ``<name> + <template_suffix>`` based
        on the schema of the template table. See
        https://cloud.google.com/bigquery/streaming-data-into-bigquery
        #template-tables

    :type retry: :class:`google.api_core.retry.Retry`
    :param retry: (Optional) How to retry the RPC.

    :rtype: list of mappings
    :returns: One mapping per row with insert errors:  the "index" key
              identifies the row, and the "errors" key contains a list
              of the mappings describing one or more problems with the
              row.
    """
        rows_info = []
        data = {'rows': rows_info}

        for index, row in enumerate(json_rows):
            info = {'json': row}
            if row_ids is not None:
                info['insertId'] = row_ids[index]
            else:
                info['insertId'] = str(uuid4())
            rows_info.append(info)

        if skip_invalid_rows is not None:
            data['skipInvalidRows'] = skip_invalid_rows

        if ignore_unknown_values is not None:
            data['ignoreUnknownValues'] = ignore_unknown_values

        if template_suffix is not None:
            data['templateSuffix'] = template_suffix

        headers = {'content-type': 'application/json'}

        path = "{}{}/insertAll".format(_API_BASE, table.path)
        body = json.dumps(data).encode('utf-8')

        if _helpers.utcnow() > self._token_expiration:
            await self._acquire_token()

        headers['authorization'] = 'Bearer {}'.format(
            _helpers.from_bytes(self.token['access_token']))

        async with aiohttp.ClientSession() as session:
            response = await session.post(url=path, headers=headers, data=body)
            if response.status != 200:
                raise AsyncBigQueryError("Unable to insert row(s)")
            content = await response.json()

        errors = []
        for error in content.get('insertErrors', ()):
            errors.append({
                'index': int(error['index']),
                'errors': error['errors']
            })

        return errors
 def test_from_string_pub_key_unicode(self):
     public_key = _helpers.from_bytes(PUBLIC_KEY_BYTES)
     verifier = es256.ES256Verifier.from_string(public_key)
     assert isinstance(verifier, es256.ES256Verifier)
     assert isinstance(verifier._pubkey, ec.EllipticCurvePublicKey)
 def test_from_string_pkcs1_unicode(self):
     key_bytes = _helpers.from_bytes(PKCS1_KEY_BYTES)
     signer = es256.ES256Signer.from_string(key_bytes)
     assert isinstance(signer, es256.ES256Signer)
     assert isinstance(signer._key, ec.EllipticCurvePrivateKey)
Esempio n. 12
0
 def test_from_string_pub_key_unicode(self):
     public_key = _helpers.from_bytes(PUBLIC_KEY_BYTES)
     verifier = _python_rsa.RSAVerifier.from_string(public_key)
     assert isinstance(verifier, _python_rsa.RSAVerifier)
     assert isinstance(verifier._pubkey, rsa.key.PublicKey)
Esempio n. 13
0
 def test_from_string_pkcs8_unicode(self):
     key_bytes = _helpers.from_bytes(PKCS8_KEY_BYTES)
     signer = _python_rsa.RSASigner.from_string(key_bytes)
     assert isinstance(signer, _python_rsa.RSASigner)
     assert isinstance(signer._key, rsa.key.PrivateKey)