Esempio n. 1
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to)

        if '*' in self.source_object:
            wildcard_position = self.source_object.index('*')
            objects = hook.list(
                self.source_bucket,
                prefix=self.source_object[:wildcard_position],
                delimiter=self.source_object[wildcard_position + 1:])
            for source_object in objects:
                self.log.info('Executing copy of gs://{0}/{1} to '
                              'gs://{2}/{3}/{1}'.format(
                                  self.source_bucket, source_object,
                                  self.destination_bucket,
                                  self.destination_object, source_object))
                hook.copy(
                    self.source_bucket, source_object, self.destination_bucket,
                    "{}/{}".format(self.destination_object, source_object))
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            self.log.info('Executing copy: %s, %s, %s, %s', self.source_bucket,
                          self.source_object, self.destination_bucket
                          or self.source_bucket, self.destination_object
                          or self.source_object)
            hook.copy(self.source_bucket, self.source_object,
                      self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
Esempio n. 2
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to
        )

        if '*' in self.source_object:
            wildcard_position = self.source_object.index('*')
            objects = hook.list(self.source_bucket,
                                prefix=self.source_object[:wildcard_position],
                                delimiter=self.source_object[wildcard_position + 1:])
            for source_object in objects:
                self.log.info('Executing copy of gs://{0}/{1} to '
                              'gs://{2}/{3}/{1}'.format(self.source_bucket,
                                                        source_object,
                                                        self.destination_bucket,
                                                        self.destination_object,
                                                        source_object))
                hook.copy(self.source_bucket, source_object,
                          self.destination_bucket, "{}/{}".format(self.destination_object,
                                                                  source_object))
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            self.log.info('Executing copy: %s, %s, %s, %s', self.source_bucket,
                          self.source_object,
                          self.destination_bucket or self.source_bucket,
                          self.destination_object or self.source_object)
            hook.copy(self.source_bucket, self.source_object,
                      self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
Esempio n. 3
0
    def execute(self, context):
        self.log.info('Exporting data to Cloud Storage bucket ' + self.bucket)

        if self.overwrite_existing and self.namespace:
            gcs_hook = GoogleCloudStorageHook(self.cloud_storage_conn_id)
            objects = gcs_hook.list(self.bucket, prefix=self.namespace)
            for o in objects:
                gcs_hook.delete(self.bucket, o)

        ds_hook = DatastoreHook(self.datastore_conn_id, self.delegate_to)
        result = ds_hook.export_to_storage_bucket(
            bucket=self.bucket,
            namespace=self.namespace,
            entity_filter=self.entity_filter,
            labels=self.labels)
        operation_name = result['name']
        result = ds_hook.poll_operation_until_done(
            operation_name, self.polling_interval_in_seconds)

        state = result['metadata']['common']['state']
        if state != 'SUCCESSFUL':
            raise AirflowException(
                'Operation failed: result={}'.format(result))

        return result
Esempio n. 4
0
class GoogleDisplayVideo360ERFToBigQueryOperator(BaseOperator):
    """Upload Multiple Entity Read Files to specified big query dataset.
    """
    def __init__(self,
                 gcp_conn_id='google_cloud_default',
                 report_body=None,
                 yesterday=False,
                 entity_type=None,
                 file_creation_date=None,
                 cloud_project_id=None,
                 bq_table=None,
                 schema=None,
                 gcs_bucket=None,
                 erf_bucket=None,
                 partner_ids=[],
                 write_disposition='WRITE_TRUNCATE',
                 *args,
                 **kwargs):
        super(GoogleDisplayVideo360ERFToBigQueryOperator,
              self).__init__(*args, **kwargs)
        self.gcp_conn_id = gcp_conn_id
        self.service = None
        self.bq_hook = None
        self.gcs_hook = None
        self.report_body = report_body
        self.erf_bucket = erf_bucket
        self.yesterday = yesterday
        self.cloud_project_id = cloud_project_id
        self.bq_table = bq_table
        self.gcs_bucket = gcs_bucket
        self.schema = schema
        self.entity_type = entity_type
        self.erf_object = 'entity/%s.0.%s.json' % (file_creation_date,
                                                   entity_type)
        self.partner_ids = partner_ids
        self.write_disposition = write_disposition
        self.file_creation_date = file_creation_date

    def execute(self, context):
        if self.gcs_hook is None:
            self.gcs_hook = GoogleCloudStorageHook(
                google_cloud_storage_conn_id=self.gcp_conn_id)
        if self.bq_hook is None:
            self.bq_hook = BigQueryHook(bigquery_conn_id=self.gcp_conn_id)

        for i, partner_id in enumerate(self.partner_ids):
            filename = erf_utils.download_and_transform_erf(self, partner_id)
            entity_read_file_ndj = 'gs://%s/%s' % (self.gcs_bucket, filename)
            if i > 0:
                self.write_disposition = 'WRITE_APPEND'

            bq_base_cursor = self.bq_hook.get_conn().cursor()
            bq_base_cursor.run_load(
                destination_project_dataset_table=self.bq_table,
                schema_fields=self.schema,
                source_uris=[entity_read_file_ndj],
                source_format='NEWLINE_DELIMITED_JSON',
                write_disposition=self.write_disposition)
            self.gcs_hook.delete(self.gcs_bucket, filename)
Esempio n. 5
0
def move_objects(source_bucket=None, destination_bucket=None, prefix=None, **kwargs):
    storage_objects = kwargs["ti"].xcom_pull(task_ids="list_files")
    hook = GoogleCloudStorageHook()
    for storage_object in storage_objects:
        destination_object = storage_object
        if prefix:
            destination_object = "{}/{}".format(prefix, storage_object)
        hook.copy(source_bucket, storage_object, destination_bucket, destination_object)
        hook.delete(source_bucket, storage_object)
Esempio n. 6
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to)
        log_message = 'Executing copy of gs://{0}/{1} to gs://{2}/{3}'

        if self.wildcard in self.source_object:
            prefix, delimiter = self.source_object.split(self.wildcard, 1)
            objects = hook.list(self.source_bucket,
                                prefix=prefix,
                                delimiter=delimiter)

            for source_object in objects:
                if self.last_modified_time is not None:
                    # Check to see if object was modified after last_modified_time
                    if hook.is_updated_after(self.source_bucket, source_object,
                                             self.last_modified_time):
                        pass
                    else:
                        continue
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(
                        prefix, self.destination_object, 1)
                self.log.info(
                    log_message.format(self.source_bucket, source_object,
                                       self.destination_bucket,
                                       destination_object))

                hook.rewrite(self.source_bucket, source_object,
                             self.destination_bucket, destination_object)
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            if self.last_modified_time is not None:
                if hook.is_updated_after(self.source_bucket,
                                         self.source_object,
                                         self.last_modified_time):
                    pass
                else:
                    return

            self.log.info(
                log_message.format(
                    self.source_bucket, self.source_object,
                    self.destination_bucket or self.source_bucket,
                    self.destination_object or self.source_object))
            hook.rewrite(self.source_bucket, self.source_object,
                         self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
Esempio n. 7
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to
        )
        log_message = 'Executing copy of gs://{0}/{1} to gs://{2}/{3}'

        if self.wildcard in self.source_object:
            prefix, delimiter = self.source_object.split(self.wildcard, 1)
            objects = hook.list(self.source_bucket, prefix=prefix, delimiter=delimiter)

            for source_object in objects:
                if self.last_modified_time is not None:
                    # Check to see if object was modified after last_modified_time
                    if hook.is_updated_after(self.source_bucket, source_object,
                                             self.last_modified_time):
                        pass
                    else:
                        continue
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(prefix,
                                                               self.destination_object, 1)
                self.log.info(
                    log_message.format(self.source_bucket, source_object,
                                       self.destination_bucket, destination_object)
                )

                hook.rewrite(self.source_bucket, source_object,
                             self.destination_bucket, destination_object)
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            if self.last_modified_time is not None:
                if hook.is_updated_after(self.source_bucket,
                                         self.source_object,
                                         self.last_modified_time):
                    pass
                else:
                    return

            self.log.info(
                log_message.format(self.source_bucket, self.source_object,
                                   self.destination_bucket or self.source_bucket,
                                   self.destination_object or self.source_object)
            )
            hook.rewrite(self.source_bucket, self.source_object,
                         self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
Esempio n. 8
0
    def execute(self, context):
        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to)

        if self.objects:
            objects = self.objects
        else:
            objects = hook.list(bucket_name=self.bucket_name,
                                prefix=self.prefix)

        self.log.info("Deleting %s objects from %s", len(objects),
                      self.bucket_name)
        for object_name in objects:
            hook.delete(bucket_name=self.bucket_name, object_name=object_name)
Esempio n. 9
0
def clean_buckets(google_cloud_storage_conn_id, private_bucket, shared_bucket):
    import logging

    hook = GoogleCloudStorageHook(google_cloud_storage_conn_id)
    total = 0

    # clean the entire bucket
    private = [(private_bucket, name) for name in hook.list(private_bucket)]
    shared = [(shared_bucket, name) for name in hook.list(shared_bucket)]

    for bucket_name, object_name in private + shared:
        logging.info(f"Deleting gs://{bucket_name}/{object_name}")
        hook.delete(bucket_name, object_name)
        total += 1
    logging.info(f"Deleted {total} objects")
Esempio n. 10
0
def move_objects(source_bucket=None,
                 destination_bucket=None,
                 prefix=None,
                 **kwargs):
    storage_objects = kwargs['ti'].xcom_pull(task_ids='list_files')

    hook = GoogleCloudStorageHook()

    for storage_object in storage_objects:
        destination_object = storage_object
        if prefix:
            destination_object = f'{prefix}/{storage_object}'
        hook.copy(source_bucket, storage_object, destination_bucket,
                  destination_object)
        hook.delete(source_bucket, storage_object)
def cleanup(**kwargs):
    ti = kwargs['ti']
    gcs = GoogleCloudStorageHook()
    fileName = ti.xcom_pull(task_ids="censor")
    gcs.delete("workflowstorage", fileName)
    fileName = ti.xcom_pull(task_ids="compression")
    gcs.delete("workflowstorage", fileName)
    fileName = ti.xcom_pull(task_ids="conversion")
    gcs.delete("workflowstorage", fileName)
    fileName = ti.xcom_pull(task_ids="text2speech")
    gcs.delete("workflowstorage", fileName)
Esempio n. 12
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to)
        log_message = 'Executing copy of gs://{0}/{1} to gs://{2}/{3}'

        if self.wildcard in self.source_object:
            prefix, delimiter = self.source_object.split(self.wildcard, 1)
            objects = hook.list(self.source_bucket,
                                prefix=prefix,
                                delimiter=delimiter)

            for source_object in objects:
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(
                        prefix, self.destination_object, 1)
                self.log.info(
                    log_message.format(self.source_bucket, source_object,
                                       self.destination_bucket,
                                       destination_object))

                hook.copy(self.source_bucket, source_object,
                          self.destination_bucket, destination_object)
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            self.log.info(
                log_message.format(
                    self.source_bucket, self.source_object,
                    self.destination_bucket or self.source_bucket,
                    self.destination_object or self.source_object))
            hook.copy(self.source_bucket, self.source_object,
                      self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
Esempio n. 13
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to
        )
        log_message = 'Executing copy of gs://{0}/{1} to gs://{2}/{3}'

        if self.wildcard in self.source_object:
            prefix, delimiter = self.source_object.split(self.wildcard, 1)
            objects = hook.list(self.source_bucket, prefix=prefix, delimiter=delimiter)

            for source_object in objects:
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(prefix,
                                                               self.destination_object, 1)
                self.log.info(
                    log_message.format(self.source_bucket, source_object,
                                       self.destination_bucket, destination_object)
                )

                hook.rewrite(self.source_bucket, source_object,
                             self.destination_bucket, destination_object)
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            self.log.info(
                log_message.format(self.source_bucket, self.source_object,
                                   self.destination_bucket or self.source_bucket,
                                   self.destination_object or self.source_object)
            )
            hook.rewrite(self.source_bucket, self.source_object,
                         self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
    def execute(self, context):
        self.log.info('Exporting data to Cloud Storage bucket ' + self.bucket)

        if self.overwrite_existing and self.namespace:
            gcs_hook = GoogleCloudStorageHook(self.cloud_storage_conn_id)
            objects = gcs_hook.list(self.bucket, prefix=self.namespace)
            for o in objects:
                gcs_hook.delete(self.bucket, o)

        ds_hook = DatastoreHook(self.datastore_conn_id, self.delegate_to)
        result = ds_hook.export_to_storage_bucket(bucket=self.bucket,
                                                  namespace=self.namespace,
                                                  entity_filter=self.entity_filter,
                                                  labels=self.labels)
        operation_name = result['name']
        result = ds_hook.poll_operation_until_done(operation_name,
                                                   self.polling_interval_in_seconds)

        state = result['metadata']['common']['state']
        if state != 'SUCCESSFUL':
            raise AirflowException('Operation failed: result={}'.format(result))

        return result
Esempio n. 15
0
class GcsToGDriveOperator(BaseOperator):
    """
    Copies objects from a Google Cloud Storage service service to Google Drive service, with renaming
    if requested.

    Using this operator requires the following OAuth 2.0 scope:

    .. code-block:: none

        https://www.googleapis.com/auth/drive

    :param source_bucket: The source Google Cloud Storage bucket where the object is. (templated)
    :type source_bucket: str
    :param source_object: The source name of the object to copy in the Google cloud
        storage bucket. (templated)
        You can use only one wildcard for objects (filenames) within your bucket. The wildcard can appear
        inside the object name or at the end of the object name. Appending a wildcard to the bucket name
        is unsupported.
    :type source_object: str
    :param destination_object: The destination name of the object in the destination Google Drive
        service. (templated)
        If a wildcard is supplied in the source_object argument, this is the prefix that will be prepended
        to the final destination objects' paths.
        Note that the source path's part before the wildcard will be removed;
        if it needs to be retained it should be appended to destination_object.
        For example, with prefix ``foo/*`` and destination_object ``blah/``, the file ``foo/baz`` will be
        copied to ``blah/baz``; to retain the prefix write the destination_object as e.g. ``blah/foo``, in
        which case the copied file will be named ``blah/foo/baz``.
    :type destination_object: str
    :param move_object: When move object is True, the object is moved instead of copied to the new location.
        This is the equivalent of a mv command as opposed to a cp command.
    :type move_object: bool
    :param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud Platform.
    :type gcp_conn_id: str
    :param delegate_to: The account to impersonate, if any.
        For this to work, the service account making the request must have domain-wide delegation enabled.
    :type delegate_to: str
    """

    template_fields = ("source_bucket", "source_object", "destination_object")
    ui_color = "#f0eee4"

    @apply_defaults
    def __init__(self,
                 source_bucket,
                 source_object,
                 destination_object=None,
                 move_object=False,
                 gcp_conn_id="google_cloud_default",
                 delegate_to=None,
                 *args,
                 **kwargs):
        super(GcsToGDriveOperator, self).__init__(*args, **kwargs)

        self.source_bucket = source_bucket
        self.source_object = source_object
        self.destination_object = destination_object
        self.move_object = move_object
        self.gcp_conn_id = gcp_conn_id
        self.delegate_to = delegate_to
        self.gcs_hook = None  # type: Optional[GoogleCloudStorageHook]
        self.gdrive_hook = None  # type: Optional[GoogleDriveHook]

    def execute(self, context):

        self.gcs_hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to)
        self.gdrive_hook = GoogleDriveHook(gcp_conn_id=self.gcp_conn_id,
                                           delegate_to=self.delegate_to)

        if WILDCARD in self.source_object:
            total_wildcards = self.source_object.count(WILDCARD)
            if total_wildcards > 1:
                error_msg = (
                    "Only one wildcard '*' is allowed in source_object parameter. "
                    "Found {} in {}.".format(total_wildcards,
                                             self.source_object))

                raise AirflowException(error_msg)

            prefix, delimiter = self.source_object.split(WILDCARD, 1)
            objects = self.gcs_hook.list(self.source_bucket,
                                         prefix=prefix,
                                         delimiter=delimiter)

            for source_object in objects:
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(
                        prefix, self.destination_object, 1)

                self._copy_single_object(source_object=source_object,
                                         destination_object=destination_object)
        else:
            self._copy_single_object(
                source_object=self.source_object,
                destination_object=self.destination_object)

    def _copy_single_object(self, source_object, destination_object):
        self.log.info(
            "Executing copy of gs://%s/%s to gdrive://%s",
            self.source_bucket,
            source_object,
            destination_object,
        )

        with tempfile.NamedTemporaryFile() as file:
            filename = file.name
            self.gcs_hook.download(bucket=self.source_bucket,
                                   object=source_object,
                                   filename=filename)
            self.gdrive_hook.upload_file(local_location=filename,
                                         remote_location=destination_object)

        if self.move_object:
            self.gcs_hook.delete(self.source_bucket, source_object)
Esempio n. 16
0
class GoogleDisplayVideo360SDFToBigQueryOperator(BaseOperator):
    """Make a request to SDF API and upload the data to BQ."""

    DEFAULT_SDF_TABLE_NAMES = {
        'LINE_ITEM': 'SDFLineItem',
        'AD_GROUP': 'SDFAdGroup',
        'AD': 'SDFAd',
        'INSERTION_ORDER': 'SDFInsertionOrder',
        'CAMPAIGN': 'SDFCampaign'
    }

    SDF_API_RESPONSE_KEYS = {
        'LINE_ITEM': 'lineItems',
        'AD_GROUP': 'adGroups',
        'AD': 'ads',
        'INSERTION_ORDER': 'insertionOrders',
        'CAMPAIGN': 'campaigns'
    }

    def __init__(self,
                 gcp_conn_id='google_cloud_default',
                 gcs_bucket=None,
                 schema=None,
                 bq_dataset=None,
                 write_disposition=None,
                 cloud_project_id=None,
                 file_types=None,
                 filter_ids=None,
                 api_version=None,
                 filter_type=None,
                 table_names=DEFAULT_SDF_TABLE_NAMES,
                 sdf_api_response_keys=SDF_API_RESPONSE_KEYS,
                 *args,
                 **kwargs):
        super(GoogleDisplayVideo360SDFToBigQueryOperator,
              self).__init__(*args, **kwargs)
        self.gcp_conn_id = gcp_conn_id
        self.service = None
        self.hook = None
        self.bq_hook = None
        self.gcs_hook = None
        self.gcs_bucket = gcs_bucket
        self.schema = schema
        self.bq_dataset = bq_dataset
        self.write_disposition = write_disposition
        self.cloud_project_id = cloud_project_id
        self.file_types = file_types
        self.filter_ids = filter_ids
        self.api_version = api_version
        self.filter_type = filter_type
        self.table_names = table_names
        self.sdf_api_response_keys = sdf_api_response_keys

    def execute(self, context):
        if self.hook is None:
            self.hook = GoogleDisplayVideo360Hook(gcp_conn_id=self.gcp_conn_id)
        if self.bq_hook is None:
            self.bq_hook = BigQueryHook(bigquery_conn_id=self.gcp_conn_id)
        if self.gcs_hook is None:
            self.gcs_hook = GoogleCloudStorageHook(
                google_cloud_storage_conn_id=self.gcp_conn_id)

        request_body = {
            'fileTypes': self.file_types,
            'filterType': self.filter_type,
            'filterIds': self.filter_ids,
            'version': self.api_version
        }

        logger.info('Request body: %s ' % request_body)
        request = self.hook.get_service().sdf().download(body=request_body)
        response = request.execute()

        for file_type in self.file_types:
            temp_file = None
            try:
                logger.info('Uploading SDF to GCS')
                temp_file = tempfile.NamedTemporaryFile(delete=False)
                response_key = self.sdf_api_response_keys.get(file_type)
                temp_file.write(response[response_key].encode('utf-8'))
                temp_file.close()
                filename = '%d_%s_%s_%s.json' % (time.time() * 1e+9,
                                                 randint(1, 1000000),
                                                 response_key, 'sdf')
                self.gcs_hook.upload(self.gcs_bucket, filename, temp_file.name)
                logger.info('SDF upload to GCS complete')
            finally:
                if temp_file:
                    temp_file.close()
                os.unlink(temp_file.name)

            sdf_file = 'gs://%s/%s' % (self.gcs_bucket, filename)

            bq_table = self.table_names.get(file_type)
            bq_table = '%s.%s' % (self.bq_dataset, bq_table)
            schema = SDF_VERSIONED_SCHEMA_TYPES.get(
                self.api_version).get(file_type)
            try:
                bq_base_cursor = self.bq_hook.get_conn().cursor()
                logger.info('Uploading SDF to BigQuery')
                bq_base_cursor.run_load(
                    destination_project_dataset_table=bq_table,
                    schema_fields=schema,
                    source_uris=[sdf_file],
                    source_format='CSV',
                    skip_leading_rows=1,
                    write_disposition=self.write_disposition)
            finally:
                logger.info('Deleting SDF from GCS')
                self.gcs_hook.delete(self.gcs_bucket, filename)