Esempio n. 1
0
    def build_id_token_credentials(
        self,
        target_audience: str,
        token_uri: Optional[str] = None,
    ) -> Tuple[IDTokenCredentials, Request]:
        request = self._build_request()
        id_token_credentials = self._fetch_id_token_credentials(
            target_audience=target_audience)

        try:
            if not id_token_credentials or not id_token_credentials.valid:
                logger.debug(
                    'Fetch compute_engine.IDTokenCredentials and token refresh.'
                )

                id_token_credentials = compute_engine.IDTokenCredentials(
                    request=request,
                    target_audience=target_audience,
                    token_uri=token_uri if token_uri else DEFAULT_TOKEN_URI,
                )
                id_token_credentials.refresh(request=request)

                self._cache_id_token_credentials[
                    target_audience] = id_token_credentials

        except ServiceAccountConfigurationError:
            raise
        except RuntimeError as e:
            raise ServiceAccountConfigurationError(e)

        return (id_token_credentials, request)
Esempio n. 2
0
def _get_gce_credentials(
    target_audience: Optional[str],
    request: Optional[google.auth.transport.Request] = None
) -> Optional[google_auth_credentials.Credentials]:
    """Gets credentials and project ID from the GCE Metadata Service."""
    # Ping requires a transport, but we want application default credentials
    # to require no arguments. So, we'll use the _http_client transport which
    # uses http.client. This is only acceptable because the metadata server
    # doesn't do SSL and never requires proxies.

    # While this library is normally bundled with compute_engine, there are
    # some cases where it's not available, so we tolerate ImportError.
    try:
        from google.auth import compute_engine
        from google.auth.compute_engine import _metadata
    except ImportError:
        return None
    from google.auth.transport import _http_client

    if request is None:
        request = _http_client.Request()

    if _metadata.ping(request=request):
        return compute_engine.IDTokenCredentials(
            request, target_audience, use_metadata_identity_endpoint=True)

    return None
Esempio n. 3
0
def router(request):
    path = (request.path)
    method = (request.method)

    if (path == "/" and method == "GET"):
        session_uuid = uuid.uuid1()
        return render_template('index.html', session_uuid=session_uuid)

    elif (path == "/status"):
        # TODO
        return "status"

    elif (path == "/get_signed_url"):
        filename = request.args.get('filename')
        blob = bucket.blob(filename)
        print(f'Generating signed URL for filename: {filename}')

        # https://gist.github.com/jezhumble/91051485db4462add82045ef9ac2a0ec#file-python_cloud_function_get_signed_url-py-L12
        auth_request = google.auth.transport.requests.Request()
        signing_credentials = compute_engine.IDTokenCredentials(
            auth_request, "")

        url = blob.generate_signed_url(
            expiration=datetime.timedelta(minutes=60),
            method="PUT",
            version="v4",
            content_type="application/octet-stream",
            credentials=signing_credentials)
        print(f'Signed URL: {url}')

        return url

    else:
        abort(404)
Esempio n. 4
0
    def _generate_download_url(self, path_info, expires=3600):
        import google.auth
        from google.auth import compute_engine

        expiration = timedelta(seconds=int(expires))

        bucket = self.gs.bucket(path_info.bucket)
        blob = bucket.get_blob(path_info.path)
        if blob is None:
            raise FileNotFoundError

        if isinstance(
            blob.client._credentials,  # pylint: disable=protected-access
            google.auth.credentials.Signing,
        ):
            # sign if we're able to sign with credentials.
            return blob.generate_signed_url(expiration=expiration)

        auth_request = google.auth.transport.requests.Request()
        # create signing credentials with the default credentials
        # for use with Compute Engine and other environments where
        # Client credentials cannot sign.
        signing_credentials = compute_engine.IDTokenCredentials(
            auth_request, ""
        )
        return signing_credentials.signer.sign(blob)
Esempio n. 5
0
    def generateUploadPolicy(self, conditions):
        """
		Our implementation of bucket.generate_upload_policy - which works with default token credentials
		Create a signed upload policy for uploading objects.

		This method generates and signs a policy document. You can use
		`policy documents`_ to allow visitors to a website to upload files to
		Google Cloud Storage without giving them direct write access.

		For example:

		.. literalinclude:: snippets.py
			:start-after: [START policy_document]
			:end-before: [END policy_document]

		.. _policy documents:
			https://cloud.google.com/storage/docs/xml-api\
			/post-object#policydocument

		:type expiration: datetime
		:param expiration: Optional expiration in UTC. If not specified, the
						   policy will expire in 1 hour.

		:type conditions: list
		:param conditions: A list of conditions as described in the
						  `policy documents`_ documentation.

		:type client: :class:`~google.cloud.storage.client.Client`
		:param client: Optional. The client to use.  If not passed, falls back
					   to the ``client`` stored on the current bucket.

		:rtype: dict
		:returns: A dictionary of (form field name, form field value) of form
				  fields that should be added to your HTML upload form in order
				  to attach the signature.
		"""
        global credentials, bucket
        auth_request = requests.Request()
        sign_cred = compute_engine.IDTokenCredentials(
            auth_request,
            "",
            service_account_email=credentials.service_account_email)
        expiration = _NOW() + timedelta(hours=1)
        conditions = conditions + [{"bucket": bucket.name}]
        policy_document = {
            "expiration": _datetime_to_rfc3339(expiration),
            "conditions": conditions,
        }
        encoded_policy_document = base64.b64encode(
            json.dumps(policy_document).encode("utf-8"))
        signature = base64.b64encode(
            sign_cred.sign_bytes(encoded_policy_document))
        fields = {
            "bucket": bucket.name,
            "GoogleAccessId": sign_cred.signer_email,
            "policy": encoded_policy_document.decode("utf-8"),
            "signature": signature.decode("utf-8"),
        }
        return fields
Esempio n. 6
0
def GetIDTokenFromComputeEngine(target_audience):
    request = google.auth.transport.requests.Request()
    creds = compute_engine.IDTokenCredentials(
        request=request,
        target_audience=target_audience,
        use_metadata_identity_endpoint=True)
    creds.refresh(request)
    return creds.token
Esempio n. 7
0
def test_id_token_from_metadata(http_request):
    credentials = compute_engine.IDTokenCredentials(
        http_request, "target_audience", use_metadata_identity_endpoint=True)
    credentials.refresh(http_request)

    _, payload, _, _ = jwt._unverified_decode(credentials.token)
    assert payload["aud"] == "target_audience"
    assert payload["exp"] == credentials.expiry
def test_id_token_from_metadata(http_request):
    credentials = compute_engine.IDTokenCredentials(
        http_request, AUDIENCE, use_metadata_identity_endpoint=True)
    credentials.refresh(http_request)

    _, payload, _, _ = jwt._unverified_decode(credentials.token)
    assert credentials.valid
    assert payload["aud"] == AUDIENCE
    assert datetime.fromtimestamp(payload["exp"]) == credentials.expiry
Esempio n. 9
0
 def generate_new_audio_upload_url(cls, filename, content_type=None):
     client = cls._bucket_client()
     auth_request = requests.Request()
     signing_credentials = compute_engine.IDTokenCredentials(
         auth_request, "")
     return client.blob(filename,
                        chunk_size=262144 * 5).generate_signed_url(
                            expiration=timedelta(hours=1),
                            method="PUT",
                            content_type=content_type,
                            credentials=signing_credentials,
                        )
Esempio n. 10
0
def _get_signed_image_url(bucket, image_id):
    """
    Given bucket name and image id generates signed URL to that image.
    """
    auth_request = requests.Request()
    client = _get_storage_client()
    data_bucket = client.bucket(bucket)
    image_blob = data_bucket.blob(image_id)
    expires_at_ms = datetime.now() + timedelta(minutes=30)
    signing_credentials = compute_engine.IDTokenCredentials(
        auth_request, "", service_account_email=SERVICE_ACCOUNT_EMAIL)
    return image_blob.generate_signed_url(expires_at_ms,
                                          credentials=signing_credentials)
Esempio n. 11
0
    def _generate_url(self, package):
        """ Generate a signed url to the GCS file """
        blob = self._get_gcs_blob(package)

        if self.use_iam_signer:
            # Workaround for https://github.com/googleapis/google-auth-library-python/issues/50
            signing_credentials = compute_engine.IDTokenCredentials(
                requests.Request(), "")
        else:
            signing_credentials = None
        return blob.generate_signed_url(
            expiration=timedelta(seconds=self.expire_after),
            credentials=signing_credentials,
            version="v4",
        )
Esempio n. 12
0
    def download(self,
                 blobKey,
                 fileName="",
                 download="",
                 sig="",
                 *args,
                 **kwargs):
        """
		Download a file.
		:param blobKey: The unique blob key of the file.
		:type blobKey: str
		:param fileName: Optional filename to provide in the header.
		:type fileName: str
		:param download: Set header to attachment retrival, set explictly to "1" if download is wanted.
		:type download: str
		"""
        global credentials, bucket
        if not sig:
            raise errors.PreconditionFailed()
        # First, validate the signature, otherwise we don't need to proceed any further
        if not utils.hmacVerify(blobKey.encode("ASCII"), sig):
            raise errors.Forbidden()
        # Split the blobKey into the individual fields it should contain
        dlPath, validUntil = urlsafe_b64decode(blobKey).decode("UTF-8").split(
            "\0")
        if validUntil != "0" and datetime.strptime(
                validUntil, "%Y%m%d%H%M") < datetime.now():
            raise errors.Gone()
        # Create a signed url and redirect the user
        if isinstance(credentials, ServiceAccountCredentials
                      ):  # We run locally with an service-account.json
            blob = bucket.get_blob(dlPath)
            if not blob:
                raise errors.NotFound()
            signed_url = blob.generate_signed_url(datetime.now() +
                                                  timedelta(seconds=60))
        else:  # We are inside the appengine
            auth_request = requests.Request()
            signed_blob_path = bucket.blob(dlPath)
            expires_at_ms = datetime.now() + timedelta(seconds=60)
            signing_credentials = compute_engine.IDTokenCredentials(
                auth_request,
                "",
                service_account_email=credentials.service_account_email)
            signed_url = signed_blob_path.generate_signed_url(
                expires_at_ms, credentials=signing_credentials, version="v4")
        raise errors.Redirect(signed_url)
Esempio n. 13
0
def BAD_generate_download_signed_url_v4(name: str):
    """Generates a v4 signed URL for downloading a blob.

    Note that this method requires a service account key file. You can not use
    this if you are using Application Default Credentials from Google Compute
    Engine or from the Google Cloud SDK.
    """
    # bucket_name = 'your-bucket-name'
    # blob_name = 'your-object-name'
    import os

    KEYFILE = os.getenv("GOOGLE_APPLICATION_CREDENTIALS")
    PROJECT = 'curatedmetagenomics'
    print(KEYFILE)

    import google.auth
    from google.oauth2 import service_account
    from google.auth.transport import requests
    from google.auth import compute_engine

    credentials, project = google.auth.default()

    bucket_name='data-curatedmetagenomics'
    storage_client = storage.Client(project, credentials)
    bucket = storage_client.bucket(bucket_name)
    blob = bucket.blob(name)

    auth_request = requests.Request()

    signing_credentials = compute_engine.IDTokenCredentials(
        auth_request, "",
        service_account_email=credentials.service_account_email)

    url = blob.generate_signed_url(
        credentials = signing_credentials,
        version="v4",
        # This URL is valid for 15 minutes
        expiration=datetime.timedelta(minutes=15),
        # Allow GET requests using this URL.
        method="GET",
    )

    print("Generated GET signed URL:")
    print(url)
    print("You can use this URL with any user agent, for example:")
    print("curl '{}'".format(url))
    return url
Esempio n. 14
0
def sign_url(filename):
    import os, google.auth
    from google.auth.transport import requests
    from google.auth import compute_engine
    # from datetime import datetime, timedelta
    from google.cloud import storage
    import datetime

    auth_request = requests.Request()
    credentials, project = google.auth.default()
    storage_client = storage.Client(project, credentials)
    data_bucket = storage_client.lookup_bucket(os.getenv("BUCKET_NAME"))
    signed_blob_path = data_bucket.blob(filename)
    # expires_at_ms = datetime.now() + timedelta(minutes=60)
    signing_credentials = compute_engine.IDTokenCredentials(auth_request, "", service_account_email=storage_client._credentials.service_account_email,)
    signed_url = signed_blob_path.generate_signed_url(datetime.timedelta(minutes=60), credentials=signing_credentials, version="v4")
    return signed_url
Esempio n. 15
0
def sign_url(blob: storage.Blob, *args, **kwargs):
    """cloudstorage signed url to download cloudstorage object without login
    Docs : https://cloud.google.com/storage/docs/access-control?hl=bg#Signed-URLs
    API : https://cloud.google.com/storage/docs/reference-methods?hl=bg#getobject
    """
    if config.DEBUG:
        return blob.generate_signed_url(*args, **kwargs)

    auth_request = Request()
    signing_credentials = compute_engine.IDTokenCredentials(
        auth_request,
        "",
        service_account_email=
        "*****@*****.**",
    )
    return blob.generate_signed_url(*args,
                                    **kwargs,
                                    credentials=signing_credentials,
                                    version="v4")
Esempio n. 16
0
    def generate_signed_url(self,
                            cloud_path,
                            expiration=datetime.timedelta(days=7)):
        """Generate a signed URL for accessing the object at the given cloud path that expires after the given
        expiration date or period.

        :param str cloud_path: the path to the object to generate the signed URL for
        :param datetime.datetime|datetime.timedelta expiration: the datetime for the URL to expire at or the amount of time after which it should expire
        :return str:
        """
        if os.environ.get("STORAGE_EMULATOR_HOST"):
            api_access_endpoint = {
                "api_access_endpoint": os.environ["STORAGE_EMULATOR_HOST"]
            }
        else:
            api_access_endpoint = {}

        blob = self._blob(cloud_path)

        try:
            # Use compute engine credentials if running on e.g. Google Cloud Run, performing a refresh request to get
            # the access token of the credentials (otherwise it's `None`).
            credentials, _ = google.auth.default()
            request = google_requests.Request()
            credentials.refresh(request)

            signing_credentials = compute_engine.IDTokenCredentials(
                request,
                "",
                service_account_email=credentials.service_account_email,
            )

            return blob.generate_signed_url(
                expiration=expiration,
                credentials=signing_credentials,
                version="v4",
                **api_access_endpoint,
            )

        except google.auth.exceptions.RefreshError:
            # Use local service account key.
            return blob.generate_signed_url(expiration=expiration,
                                            **api_access_endpoint)
Esempio n. 17
0
def python_cloud_function_get_signed_url(bucket_name, blob_name):
    # https://gist.github.com/jezhumble/91051485db4462add82045ef9ac2a0ec
    # Copyright 2019 Google LLC.
    # SPDX-License-Identifier: Apache-2.0
    # This snippet shows you how to use Blob.generate_signed_url() from within compute engine / cloud functions
    # as described here: https://cloud.google.com/functions/docs/writing/http#uploading_files_via_cloud_storage
    # (without needing access to a private key)
    # Note: as described in that page, you need to run your function with a service account
    # with the permission roles/iam.serviceAccountTokenCreator
    auth_request = requests.Request()
    credentials, project = google.auth.default()
    storage_client = gs.Client(project, credentials)
    data_bucket = storage_client.lookup_bucket(bucket_name)
    signed_blob_path = data_bucket.blob(blob_name)
    expires_at_ms = datetime.now() + timedelta(minutes=PRESIGNED_URL_TTL_MINUTES)
    # This next line is the trick!
    signing_credentials = compute_engine.IDTokenCredentials(auth_request, "", service_account_email=credentials.service_account_email)
    signed_url = signed_blob_path.generate_signed_url(expires_at_ms, credentials=signing_credentials, version="v4")
    return signed_url
Esempio n. 18
0
    def download(self,
                 blobKey,
                 fileName="",
                 download="",
                 sig="",
                 *args,
                 **kwargs):
        """
		Download a file.
		:param blobKey: The unique blob key of the file.
		:type blobKey: str
		:param fileName: Optional filename to provide in the header.
		:type fileName: str
		:param download: Set header to attachment retrival, set explictly to "1" if download is wanted.
		:type download: str
		"""
        global credentials, bucket
        if not sig:
            # Check if the current user has the right to download *any* blob present in this application.
            # blobKey is then the path inside cloudstore - not a base64 encoded tuple
            usr = utils.getCurrentUser()
            if not usr:
                raise errors.Unauthorized()
            if "root" not in usr["access"] and "file-view" not in usr["access"]:
                raise errors.Forbidden()
            validUntil = "-1"  # Prevent this from being cached down below
            blob = bucket.get_blob(blobKey)
        else:
            # We got an request including a signature (probably a guest or a user without file-view access)
            # First, validate the signature, otherwise we don't need to proceed any further
            if not utils.hmacVerify(blobKey.encode("ASCII"), sig):
                raise errors.Forbidden()
            # Split the blobKey into the individual fields it should contain
            dlPath, validUntil = urlsafe_b64decode(blobKey).decode(
                "UTF-8").split("\0")
            if validUntil != "0" and datetime.strptime(
                    validUntil, "%Y%m%d%H%M") < datetime.now():
                raise errors.Gone()
            blob = bucket.get_blob(dlPath)
        if not blob:
            raise errors.Gone()
        if download:
            fileName = sanitizeFileName(blob.name.split("/")[-1])
            contentDisposition = "attachment; filename=%s" % fileName
        else:
            contentDisposition = None
        if isinstance(credentials, ServiceAccountCredentials
                      ):  # We run locally with an service-account.json
            expiresAt = datetime.now() + timedelta(seconds=60)
            signedUrl = blob.generate_signed_url(
                expiresAt,
                response_disposition=contentDisposition,
                version="v4")
            raise errors.Redirect(signedUrl)
        elif utils.isLocalDevelopmentServer:  # No Service-Account to sign with - Serve everything directly
            response = utils.currentRequest.get().response
            response.headers["Content-Type"] = blob.content_type
            if contentDisposition:
                response.headers["Content-Disposition"] = contentDisposition
            return blob.download_as_bytes()
        else:  # We are inside the appengine
            if validUntil == "0":  # Its an indefinitely valid URL
                if blob.size < 5 * 1024 * 1024:  # Less than 5 MB - Serve directly and push it into the ede caches
                    response = utils.currentRequest.get().response
                    response.headers["Content-Type"] = blob.content_type
                    response.headers[
                        "Cache-Control"] = "public, max-age=604800"  # 7 Days
                    if contentDisposition:
                        response.headers[
                            "Content-Disposition"] = contentDisposition
                    return blob.download_as_bytes()
            # Default fallback - create a signed URL and redirect
            authRequest = requests.Request()
            expiresAt = datetime.now() + timedelta(seconds=60)
            signing_credentials = compute_engine.IDTokenCredentials(
                authRequest, "")
            signedUrl = blob.generate_signed_url(
                expiresAt,
                credentials=signing_credentials,
                response_disposition=contentDisposition,
                version="v4")
            raise errors.Redirect(signedUrl)
Esempio n. 19
0
def fetch_id_token(request, audience):
    """Fetch the ID Token from the current environment.

    This function acquires ID token from the environment in the following order.
    See https://google.aip.dev/auth/4110.

    1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
       to the path of a valid service account JSON file, then ID token is
       acquired using this service account credentials.
    2. If the application is running in Compute Engine, App Engine or Cloud Run,
       then the ID token are obtained from the metadata server.
    3. If metadata server doesn't exist and no valid service account credentials
       are found, :class:`~google.auth.exceptions.DefaultCredentialsError` will
       be raised.

    Example::

        import google.oauth2.id_token
        import google.auth.transport.requests

        request = google.auth.transport.requests.Request()
        target_audience = "https://pubsub.googleapis.com"

        id_token = google.oauth2.id_token.fetch_id_token(request, target_audience)

    Args:
        request (google.auth.transport.Request): A callable used to make
            HTTP requests.
        audience (str): The audience that this ID token is intended for.

    Returns:
        str: The ID token.

    Raises:
        ~google.auth.exceptions.DefaultCredentialsError:
            If metadata server doesn't exist and no valid service account
            credentials are found.
    """
    # 1. Try to get credentials from the GOOGLE_APPLICATION_CREDENTIALS environment
    # variable.
    credentials_filename = os.environ.get(environment_vars.CREDENTIALS)
    if credentials_filename:
        if not (os.path.exists(credentials_filename)
                and os.path.isfile(credentials_filename)):
            raise exceptions.DefaultCredentialsError(
                "GOOGLE_APPLICATION_CREDENTIALS path is either not found or invalid."
            )

        try:
            with open(credentials_filename, "r") as f:
                from google.oauth2 import service_account

                info = json.load(f)
                if info.get("type") == "service_account":
                    credentials = service_account.IDTokenCredentials.from_service_account_info(
                        info, target_audience=audience)
                    credentials.refresh(request)
                    return credentials.token
        except ValueError as caught_exc:
            new_exc = exceptions.DefaultCredentialsError(
                "GOOGLE_APPLICATION_CREDENTIALS is not valid service account credentials.",
                caught_exc,
            )
            raise new_exc from caught_exc

    # 2. Try to fetch ID token from metada server if it exists. The code
    # works for GAE and Cloud Run metadata server as well.
    try:
        from google.auth import compute_engine
        from google.auth.compute_engine import _metadata

        if _metadata.ping(request):
            credentials = compute_engine.IDTokenCredentials(
                request, audience, use_metadata_identity_endpoint=True)
            credentials.refresh(request)
            return credentials.token
    except (ImportError, exceptions.TransportError):
        pass

    raise exceptions.DefaultCredentialsError(
        "Neither metadata server or valid service account credentials are found."
    )
Esempio n. 20
0
PACKAGES_BUCKET_NAME: Final[str] = os.getenv(
    'PACKAGES_BUCKET_NAME') or sys.exit(
        'You MUST set PACKAGES_BUCKET_NAME environment variable.')
TOKEN_NAME: Final[str] = os.getenv('TOKEN_NAME') or sys.exit(
    'You MUST set TOKEN_NAME environment variable (ex. "projects/123/secrets/pypi-token/versions/1").'
)
EXPIRES_MINUTES: Final[int] = 30

auth_request: TransportRequest = TransportRequest()
credentials, project = default()
storage_client: Client = Client(project, credentials)
STATIC_BUCKET: Final[Bucket] = storage_client.lookup_bucket(
    STATIC_BUCKET_NAME) or sys.exit('Static website bucket was not found')
PACKAGES_BUCKET: Final[Bucket] = storage_client.lookup_bucket(
    PACKAGES_BUCKET_NAME) or sys.exit('Packages bucket was not found')
SIGNING_CREDENTIALS: Final[Credentials] = compute_engine.IDTokenCredentials(
    auth_request, '', service_account_email=credentials.service_account_email)

HEADERS: Final[Dict[str, str]] = {
    'WWW-Authenticate': 'Basic realm="Restricted Area"',
}

NOT_FOUND_RESPONSE: Final[Response] = JSONResponse({
    'code':
    int(HTTPStatus.NOT_FOUND),
    'message':
    HTTPStatus.NOT_FOUND.phrase
})


@requires('authenticated')
async def html_homepage(request: Request) -> Response:
Esempio n. 21
0
    'image/png',
    'image/svg+xml',
    'application/msword',
    'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
    'text/html',
    'application/vnd.ms-powerpoint',
    'application\
        /vnd.openxmlformats-officedocument.presentationml.presentation',
    'application/rtf',
    'text/plain',
    'text/rtf',
}

if not DEV:
    auth_request = requests.Request()
    signing_credentials = compute_engine.IDTokenCredentials(auth_request, "")
else:
    signing_credentials = None


def generate_upload_url(mimetype: str) -> tuple:
    fileuuid = uuid.uuid4().hex
    current_date = datetime.datetime.now().strftime("%d-%m-%y")
    blob_name = '{}/{}/source'.format(current_date, fileuuid)

    blob: storage.Blob = bucket.blob(blob_name)

    url = blob.generate_signed_url(
        version="v4",
        expiration=datetime.timedelta(minutes=15),
        method="PUT",
Esempio n. 22
0
def process_key(message, context):
    if context:
        print("""This Function was triggered by messageId {} published at {}""".format(context.event_id,
                                                                                       context.timestamp))

    if CLOUD_FUNC_KEY_DATA in message:
        data = base64.b64decode(message[CLOUD_FUNC_KEY_DATA]).decode('utf-8')
        log(f"event dict data key has value: {data}", severity=LOG_SEVERITY_DEBUG)
    else:
        raise LookupError(f"event dict does not contain data key: {message}")

    SA_key_not_rotated = json.loads(data)
    key_resource = "resource"
    if key_resource not in SA_key_not_rotated:
        raise LookupError(f"Security command center finding dict does not contain key {key_resource}")

    resource = SA_key_not_rotated[key_resource]
    key_parentDisplayName = "parentDisplayName"
    if key_parentDisplayName not in resource:
        raise LookupError(f"Security command center finding dict does not contain key {key_parentDisplayName}")

    # extract SA from parentDisplayName
    project_SA_REGEX = 'projects/([\w-]+)/serviceAccounts/' + SA_regex
    parentDisplayName = resource[key_parentDisplayName]
    parentDisplayName_regex_search_result = re.search(project_SA_REGEX, parentDisplayName)
    if not parentDisplayName_regex_search_result or len(parentDisplayName_regex_search_result.groups()) != 2:
        raise ValueError(
            f"finding.{key_resource}.{key_parentDisplayName} failed to match regular expression {project_SA_REGEX}")

    GCP_SA = parentDisplayName_regex_search_result.group(2) + SA_postfix
    project_id = parentDisplayName_regex_search_result.group(1)
    log(f"extracted Google service account is {GCP_SA} and project ID is {project_id}", severity=LOG_SEVERITY_DEBUG)

    result = verify_config(GCP_SA, project_id, get_config_func=get_or_init_config)
    if result[key_abort_level] != 0:
        return result[key_payload]

    secret_project_id = result[key_payload][key_secret_project_id]

    # create an authenticated request to Cloud run GSA key manager for key rotation
    google_oauth_request = google.auth.transport.requests.Request()
    target_audience = result[key_payload][key_cloud_run_url]
    url = f"{result[key_payload][key_cloud_run_url]}/{result[key_payload][key_cloud_run_subpath]}"

    # local debugging vs executing in Google cloud
    if 'GCP_SA' in os.environ:
        creds = service_account.IDTokenCredentials.from_service_account_file(
            os.environ['GOOGLE_APPLICATION_CREDENTIALS'], target_audience=target_audience)
    else:
        creds = compute_engine.IDTokenCredentials(google_oauth_request,
                                                  target_audience=target_audience,
                                                  use_metadata_identity_endpoint=True)

    authed_session = AuthorizedSession(creds)
    form_data = {
        'GCP_SAs': GCP_SA,
        # 'secret_name_prefix': 'hil_0_',
        key_secret_project_id: secret_project_id
    }
    sa_key_manager_response = authed_session.request(result[key_payload][key_cloud_run_verb], url=url, data=form_data)

    func_response = {
        'content': sa_key_manager_response.content.decode('utf-8'),
        'reason': sa_key_manager_response.reason,
        'status_code': sa_key_manager_response.status_code,
    }

    # on success, response has json method
    if sa_key_manager_response.status_code == HTTPStatus.OK:
        func_response['content'] = sa_key_manager_response.json()

    # show security tokens at local debugging
    if 'GCP_SA' in os.environ:
        google_oauth_request = google.auth.transport.requests.Request()
        func_response['identity_token'] = creds.token
        func_response['Open ID Connect token verification'] = id_token.verify_token(creds.token,
                                                                                    google_oauth_request)
    else:
        log(f"Calling {result[key_payload][key_cloud_run_url]} returns: {json.dumps(func_response)}", severity='DEBUG')

    return func_response