Beispiel #1
0
def generate_signed_url(blob):
    """Generate signed URL for storage blob"""
    credentials = default(
        scopes=["https://www.googleapis.com/auth/cloud-platform"])[0]
    signer = iam.Signer(
        request=requests.Request(),
        credentials=credentials,
        service_account_email=os.getenv("FUNCTION_IDENTITY"),
    )
    # Create token-based service account credentials for signing
    signing_credentials = service_account.IDTokenCredentials(
        signer=signer,
        token_uri="https://www.googleapis.com/oauth2/v4/token",
        target_audience="",
        service_account_email=os.getenv("FUNCTION_IDENTITY"),
    )
    # Cloud Functions service account must have Service Account Token Creator role
    try:
        url = blob.generate_signed_url(
            version="v4",
            expiration=datetime.timedelta(
                hours=int(get_env('SIGNED_URL_EXPIRATION'))),
            method="GET",
            credentials=signing_credentials)
    except exceptions.TransportError:
        logging.error(
            RuntimeError("Service account running the function must have IAM "
                         "roles/iam.serviceAccountTokenCreator."))
    else:
        print("Generated signed URL.")
        return url
Beispiel #2
0
    def build_id_token_credentials(
        self,
        target_audience: str,
        token_uri: Optional[str] = None,
    ) -> Tuple[IDTokenCredentials, Request]:
        request = self._build_request()
        id_token_credentials = self._fetch_id_token_credentials(
            target_audience=target_audience)

        try:
            if not id_token_credentials or not id_token_credentials.valid:
                logger.debug(
                    'Build service_account.IDTokenCredentials and token refresh.'
                )

                _credential = self.build_credentials()
                id_token_credentials = service_account.IDTokenCredentials(
                    signer=_credential.signer,
                    service_account_email=_credential.service_account_email,
                    token_uri=token_uri if token_uri else DEFAULT_TOKEN_URI,
                    target_audience=target_audience,
                )
                id_token_credentials.refresh(request=request)

                self._cache_id_token_credentials[
                    target_audience] = id_token_credentials

            return (id_token_credentials, request)
        except ServiceAccountConfigurationError:
            raise
        except RuntimeError as e:
            raise ServiceAccountConfigurationError(e)
Beispiel #3
0
 def _get_id_credentials(self):
     credentials = service_account.IDTokenCredentials(
         signer=self.credentials.signer,
         service_account_email=self.credentials.service_account_email,
         token_uri=
         "https://oauth2.googleapis.com/token",  # TODO: fetch from credentials
         target_audience=self.audience,
     )
     if not credentials.valid:
         credentials.refresh(request=Request())
     return credentials
 def make_credentials(cls):
     return service_account.IDTokenCredentials(SIGNER,
                                               cls.SERVICE_ACCOUNT_EMAIL,
                                               cls.TOKEN_URI,
                                               cls.TARGET_AUDIENCE)
Beispiel #5
0
def main():
    """Sends an email with a Google Cloud Storage signed URL of BQ Query results.
    Creates BQ table, runs a SQL query, and exports the results to Cloud Storage as a CSV. 
    Generates signing credentials for the CSV and sends an email with a link to the signed URL. 
    
    Args:
        None
            
    Returns:
        None
    """

    #Create BQ and Storage Client
    bq_client = bigquery.Client(credentials=credentials())
    storage_client = storage.Client(credentials=credentials())

    # Set variables
    timestr = time.strftime("%Y%m%d%I%M%S")
    project = "report-scheduling"
    dataset_id = "bq_exports"
    file_name = "daily_export_" + timestr
    csv_name = file_name + ".csv"
    table_id = "report-scheduling.bq_exports." + file_name
    bucket_name = "bq_email_exports"
    from_email = "*****@*****.**"
    to_emails = "*****@*****.**"

    # Create a BQ table
    schema = [
        bigquery.SchemaField("url", "STRING", mode="REQUIRED"),
        bigquery.SchemaField("view_count", "INTEGER", mode="REQUIRED"),
    ]

    # Make an API request to create table
    table = bq_client.create_table(bigquery.Table(table_id, schema=schema))
    print("Created table {}.{}.{}".format(table.project, table.dataset_id,
                                          table.table_id))

    # Run query on that table
    job_config = bigquery.QueryJobConfig(destination=table_id)

    # Define the SQL query
    sql = """
        SELECT
        CONCAT(
            'https://stackoverflow.com/questions/',
            CAST(id as STRING)) as url,
        view_count
        FROM `bigquery-public-data.stackoverflow.posts_questions`
        WHERE tags like '%google-bigquery%'
        ORDER BY view_count DESC
        LIMIT 10
    """

    # Start the query, passing in the extra configuration
    query_job = bq_client.query(sql, job_config=job_config)

    # Wait for the job to complete
    query_job.result()
    print("Query results loaded to the table {}".format(table_id))

    # Export table data as CSV to GCS
    destination_uri = "gs://{}/{}".format(bucket_name, csv_name)
    dataset_ref = bq_client.dataset(dataset_id, project=project)
    table_ref = dataset_ref.table(file_name)

    extract_job = bq_client.extract_table(
        table_ref,
        destination_uri,
        # Location must match that of the source table
        location="US",
    )

    # Waits for job to complete
    extract_job.result()
    print("Exported {}:{}.{} to {}".format(project, dataset_id, table_id,
                                           destination_uri))

    # Generate a v4 signed URL for downloading a blob
    bucket = storage_client.bucket(bucket_name)
    blob = bucket.blob(csv_name)

    signing_credentials = None
    # If running on GCF, generate signing credentials
    # Service account running the GCF must have Service Account Token Creator role
    if os.getenv("IS_LOCAL") is None:
        signer = iam.Signer(
            request=requests.Request(),
            credentials=credentials(),
            service_account_email=os.getenv("FUNCTION_IDENTITY"),
        )
        # Create Token-based service account credentials for signing
        signing_credentials = service_account.IDTokenCredentials(
            signer=signer,
            token_uri="https://www.googleapis.com/oauth2/v4/token",
            target_audience="",
            service_account_email=os.getenv("FUNCTION_IDENTITY"),
        )

    url = blob.generate_signed_url(
        version="v4",
        # This URL is valid for 24 hours, until the next email
        expiration=datetime.timedelta(hours=24),
        # Allow GET requests using this URL
        method="GET",
        # Signing credentials; if None falls back to json credentials in local environment
        credentials=signing_credentials,
    )
    print("Generated GET signed URL.")

    # Create email message through SendGrid with link to signed URL
    message = Mail(
        from_email=from_email,
        to_emails=to_emails,
        subject="Daily BQ export",
        html_content="<p> Your daily BigQuery export from Google Cloud Platform \
            is linked <a href={}>here</a>.</p>".format(url),
    )

    # Send email
    try:
        sg = SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
        response = sg.send(message)
        print(response.status_code)
    except Exception as e:
        print(e.message)