示例#1
0
    def execute(self, context):
        gcs_hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )

        sftp_hook = SFTPHook(self.sftp_conn_id)

        if WILDCARD in self.source_object:
            total_wildcards = self.source_object.count(WILDCARD)
            if total_wildcards > 1:
                raise AirflowException(
                    "Only one wildcard '*' is allowed in source_object parameter. "
                    "Found {} in {}.".format(total_wildcards, self.source_object)
                )

            prefix, delimiter = self.source_object.split(WILDCARD, 1)
            objects = gcs_hook.list(self.source_bucket, prefix=prefix, delimiter=delimiter)

            for source_object in objects:
                destination_path = os.path.join(self.destination_path, source_object)
                self._copy_single_object(gcs_hook, sftp_hook, source_object, destination_path)

            self.log.info("Done. Uploaded '%d' files to %s", len(objects), self.destination_path)
        else:
            destination_path = os.path.join(self.destination_path, self.source_object)
            self._copy_single_object(gcs_hook, sftp_hook, self.source_object, destination_path)
            self.log.info("Done. Uploaded '%s' file to %s", self.source_object, destination_path)
示例#2
0
    def execute(self, context) -> dict:
        self.log.info('Exporting data to Cloud Storage bucket %s', self.bucket)

        if self.overwrite_existing and self.namespace:
            gcs_hook = GCSHook(self.cloud_storage_conn_id,
                               impersonation_chain=self.impersonation_chain)
            objects = gcs_hook.list(self.bucket, prefix=self.namespace)
            for obj in objects:
                gcs_hook.delete(self.bucket, obj)

        ds_hook = DatastoreHook(
            self.datastore_conn_id,
            self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )
        result = ds_hook.export_to_storage_bucket(
            bucket=self.bucket,
            namespace=self.namespace,
            entity_filter=self.entity_filter,
            labels=self.labels,
            project_id=self.project_id,
        )
        operation_name = result['name']
        result = ds_hook.poll_operation_until_done(
            operation_name, self.polling_interval_in_seconds)

        state = result['metadata']['common']['state']
        if state != 'SUCCESSFUL':
            raise AirflowException(f'Operation failed: result={result}')
        return result
示例#3
0
文件: gcs.py 项目: stu12345/airflow
 def poke(self, context):
     self.log.info('Sensor checks existence of objects: %s, %s',
                   self.bucket, self.prefix)
     hook = GCSHook(google_cloud_storage_conn_id=self.google_cloud_conn_id,
                    delegate_to=self.delegate_to)
     self._matches = hook.list(self.bucket, prefix=self.prefix)
     return bool(self._matches)
示例#4
0
    def execute(self, context: "Context") -> list:

        hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )

        self.log.info(
            'Getting list of the files. Bucket: %s; Delimiter: %s; Prefix: %s',
            self.bucket,
            self.delimiter,
            self.prefix,
        )

        StorageLink.persist(
            context=context,
            task_instance=self,
            uri=self.bucket,
            project_id=hook.project_id,
        )

        return hook.list(bucket_name=self.bucket,
                         prefix=self.prefix,
                         delimiter=self.delimiter)
示例#5
0
    def execute(self, context: 'Context') -> List[str]:
        # list all files in an Google Cloud Storage bucket
        hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.google_impersonation_chain,
        )

        self.log.info(
            'Getting list of the files. Bucket: %s; Delimiter: %s; Prefix: %s',
            self.bucket,
            self.delimiter,
            self.prefix,
        )

        files = hook.list(bucket_name=self.bucket,
                          prefix=self.prefix,
                          delimiter=self.delimiter)

        s3_hook = S3Hook(aws_conn_id=self.dest_aws_conn_id,
                         verify=self.dest_verify,
                         extra_args=self.dest_s3_extra_args)

        if not self.replace:
            # if we are not replacing -> list all files in the S3 bucket
            # and only keep those files which are present in
            # Google Cloud Storage and not in S3
            bucket_name, prefix = S3Hook.parse_s3_url(self.dest_s3_key)
            # look for the bucket and the prefix to avoid look into
            # parent directories/keys
            existing_files = s3_hook.list_keys(bucket_name, prefix=prefix)
            # in case that no files exists, return an empty array to avoid errors
            existing_files = existing_files if existing_files is not None else []
            # remove the prefix for the existing files to allow the match
            existing_files = [
                file.replace(prefix, '', 1) for file in existing_files
            ]
            files = list(set(files) - set(existing_files))

        if files:

            for file in files:
                with hook.provide_file(
                        object_name=file,
                        bucket_name=self.bucket) as local_tmp_file:
                    dest_key = self.dest_s3_key + file
                    self.log.info("Saving file to %s", dest_key)

                    s3_hook.load_file(
                        filename=local_tmp_file.name,
                        key=dest_key,
                        replace=self.replace,
                        acl_policy=self.s3_acl_policy,
                    )

            self.log.info("All done, uploaded %d files to S3", len(files))
        else:
            self.log.info("In sync, no files needed to be uploaded to S3")

        return files
    def execute(self, context: 'Context'):
        self._check_inputs()
        azure_fileshare_hook = AzureFileShareHook(self.azure_fileshare_conn_id)
        files = azure_fileshare_hook.list_files(
            share_name=self.share_name, directory_name=self.directory_name
        )

        gcs_hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.google_impersonation_chain,
        )

        dest_gcs_bucket, dest_gcs_object_prefix = _parse_gcs_url(self.dest_gcs)

        if not self.replace:
            # if we are not replacing -> list all files in the GCS bucket
            # and only keep those files which are present in
            # S3 and not in Google Cloud Storage
            existing_files_prefixed = gcs_hook.list(dest_gcs_bucket, prefix=dest_gcs_object_prefix)

            existing_files = []

            # Remove the object prefix itself, an empty directory was found
            if dest_gcs_object_prefix in existing_files_prefixed:
                existing_files_prefixed.remove(dest_gcs_object_prefix)

            # Remove the object prefix from all object string paths
            for file in existing_files_prefixed:
                if file.startswith(dest_gcs_object_prefix):
                    existing_files.append(file[len(dest_gcs_object_prefix) :])
                else:
                    existing_files.append(file)

            files = list(set(files) - set(existing_files))

        if files:
            self.log.info('%s files are going to be synced.', len(files))
            if self.directory_name is None:
                raise RuntimeError("The directory_name must be set!.")
            for file in files:
                with NamedTemporaryFile() as temp_file:
                    azure_fileshare_hook.get_file_to_stream(
                        stream=temp_file,
                        share_name=self.share_name,
                        directory_name=self.directory_name,
                        file_name=file,
                    )
                    temp_file.flush()

                    # There will always be a '/' before file because it is
                    # enforced at instantiation time
                    dest_gcs_object = dest_gcs_object_prefix + file
                    gcs_hook.upload(dest_gcs_bucket, dest_gcs_object, temp_file.name, gzip=self.gzip)
            self.log.info("All done, uploaded %d files to Google Cloud Storage.", len(files))
        else:
            self.log.info('There are no new files to sync. Have a nice day!')
            self.log.info('In sync, no files needed to be uploaded to Google Cloud Storage')

        return files
示例#7
0
 def poke(self, context: dict) -> bool:
     self.log.info('Sensor checks existence of objects: %s, %s', self.bucket, self.prefix)
     hook = GCSHook(
         gcp_conn_id=self.google_cloud_conn_id,
         delegate_to=self.delegate_to,
         impersonation_chain=self.impersonation_chain,
     )
     self._matches = hook.list(self.bucket, prefix=self.prefix)
     return bool(self._matches)
示例#8
0
    def execute(self, context):

        hook = GCSHook(google_cloud_storage_conn_id=self.gcp_conn_id,
                       delegate_to=self.delegate_to)

        self.log.info(
            'Getting list of the files. Bucket: %s; Delimiter: %s; Prefix: %s',
            self.bucket, self.delimiter, self.prefix)

        return hook.list(bucket_name=self.bucket,
                         prefix=self.prefix,
                         delimiter=self.delimiter)
示例#9
0
    def execute(self, context):
        hook = GCSHook(google_cloud_storage_conn_id=self.gcp_conn_id,
                       delegate_to=self.delegate_to)

        if self.objects:
            objects = self.objects
        else:
            objects = hook.list(bucket_name=self.bucket_name,
                                prefix=self.prefix)

        self.log.info("Deleting %s objects from %s", len(objects),
                      self.bucket_name)
        for object_name in objects:
            hook.delete(bucket_name=self.bucket_name, object_name=object_name)
示例#10
0
    def execute(self, context):
        hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )

        if self.objects:
            objects = self.objects
        else:
            objects = hook.list(bucket_name=self.bucket_name, prefix=self.prefix)

        self.log.info("Deleting %s objects from %s", len(objects), self.bucket_name)
        for object_name in objects:
            hook.delete(bucket_name=self.bucket_name, object_name=object_name)
示例#11
0
    def execute(self, context) -> list:

        hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )

        self.log.info(
            'Getting list of the files. Bucket: %s; Delimiter: %s; Prefix: %s',
            self.bucket,
            self.delimiter,
            self.prefix,
        )

        return hook.list(bucket_name=self.bucket, prefix=self.prefix, delimiter=self.delimiter)
示例#12
0
    def execute(self, context: 'Context'):
        # use the super to list all files in an Azure Data Lake path
        files = super().execute(context)
        g_hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.google_impersonation_chain,
        )

        if not self.replace:
            # if we are not replacing -> list all files in the ADLS path
            # and only keep those files which are present in
            # ADLS and not in Google Cloud Storage
            bucket_name, prefix = _parse_gcs_url(self.dest_gcs)
            existing_files = g_hook.list(bucket_name=bucket_name,
                                         prefix=prefix)
            files = list(set(files) - set(existing_files))

        if files:
            hook = AzureDataLakeHook(
                azure_data_lake_conn_id=self.azure_data_lake_conn_id)

            for obj in files:
                with NamedTemporaryFile(mode='wb', delete=True) as f:
                    hook.download_file(local_path=f.name, remote_path=obj)
                    f.flush()
                    dest_gcs_bucket, dest_gcs_prefix = _parse_gcs_url(
                        self.dest_gcs)
                    dest_path = os.path.join(dest_gcs_prefix, obj)
                    self.log.info("Saving file to %s", dest_path)

                    g_hook.upload(bucket_name=dest_gcs_bucket,
                                  object_name=dest_path,
                                  filename=f.name,
                                  gzip=self.gzip)

            self.log.info("All done, uploaded %d files to GCS", len(files))
        else:
            self.log.info("In sync, no files needed to be uploaded to GCS")

        return files
示例#13
0
    def execute(self, context):

        hook = GCSHook(
            google_cloud_storage_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to
        )

        if self.destination_bucket is None:
            self.log.warning(
                'destination_bucket is None. Defaulting it to source_bucket (%s)',
                self.source_bucket)
            self.destination_bucket = self.source_bucket

        if WILDCARD in self.source_object:
            total_wildcards = self.source_object.count(WILDCARD)
            if total_wildcards > 1:
                error_msg = "Only one wildcard '*' is allowed in source_object parameter. " \
                            "Found {} in {}.".format(total_wildcards, self.source_object)

                raise AirflowException(error_msg)

            prefix, delimiter = self.source_object.split(WILDCARD, 1)
            objects = hook.list(self.source_bucket, prefix=prefix, delimiter=delimiter)

            for source_object in objects:
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(prefix,
                                                               self.destination_object, 1)

                self._copy_single_object(hook=hook, source_object=source_object,
                                         destination_object=destination_object)
        else:
            self._copy_single_object(hook=hook, source_object=self.source_object,
                                     destination_object=self.destination_object)
示例#14
0
    def execute(self, context):
        # use the super method to list all the files in an S3 bucket/key
        files = super().execute(context)

        gcs_hook = GCSHook(google_cloud_storage_conn_id=self.gcp_conn_id,
                           delegate_to=self.delegate_to)

        # pylint: disable=too-many-nested-blocks
        if not self.replace:
            # if we are not replacing -> list all files in the GCS bucket
            # and only keep those files which are present in
            # S3 and not in Google Cloud Storage
            bucket_name, object_prefix = _parse_gcs_url(self.dest_gcs)
            existing_files_prefixed = gcs_hook.list(bucket_name,
                                                    prefix=object_prefix)

            existing_files = []

            if existing_files_prefixed:
                # Remove the object prefix itself, an empty directory was found
                if object_prefix in existing_files_prefixed:
                    existing_files_prefixed.remove(object_prefix)

                # Remove the object prefix from all object string paths
                for f in existing_files_prefixed:
                    if f.startswith(object_prefix):
                        existing_files.append(f[len(object_prefix):])
                    else:
                        existing_files.append(f)

            files = list(set(files) - set(existing_files))
            if len(files) > 0:
                self.log.info('%s files are going to be synced: %s.',
                              len(files), files)
            else:
                self.log.info(
                    'There are no new files to sync. Have a nice day!')

        if files:
            hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)

            for file in files:
                # GCS hook builds its own in-memory file so we have to create
                # and pass the path
                file_object = hook.get_key(file, self.bucket)
                with NamedTemporaryFile(mode='wb', delete=True) as f:
                    file_object.download_fileobj(f)
                    f.flush()

                    dest_gcs_bucket, dest_gcs_object_prefix = _parse_gcs_url(
                        self.dest_gcs)
                    # There will always be a '/' before file because it is
                    # enforced at instantiation time
                    dest_gcs_object = dest_gcs_object_prefix + file

                    # Sync is sequential and the hook already logs too much
                    # so skip this for now
                    # self.log.info(
                    #     'Saving file {0} from S3 bucket {1} in GCS bucket {2}'
                    #     ' as object {3}'.format(file, self.bucket,
                    #                             dest_gcs_bucket,
                    #                             dest_gcs_object))

                    gcs_hook.upload(dest_gcs_bucket,
                                    dest_gcs_object,
                                    f.name,
                                    gzip=self.gzip)

            self.log.info(
                "All done, uploaded %d files to Google Cloud Storage",
                len(files))
        else:
            self.log.info(
                'In sync, no files needed to be uploaded to Google Cloud'
                'Storage')

        return files
示例#15
0
class GCSToGoogleDriveOperator(BaseOperator):
    """
    Copies objects from a Google Cloud Storage service service to Google Drive service, with renaming
    if requested.

    Using this operator requires the following OAuth 2.0 scope:

    .. code-block:: none

        https://www.googleapis.com/auth/drive

    .. seealso::
        For more information on how to use this operator, take a look at the guide:
        :ref:`howto/operator:GCSToGoogleDriveOperator`

    :param source_bucket: The source Google Cloud Storage bucket where the object is. (templated)
    :type source_bucket: str
    :param source_object: The source name of the object to copy in the Google cloud
        storage bucket. (templated)
        You can use only one wildcard for objects (filenames) within your bucket. The wildcard can appear
        inside the object name or at the end of the object name. Appending a wildcard to the bucket name
        is unsupported.
    :type source_object: str
    :param destination_object: The destination name of the object in the destination Google Drive
        service. (templated)
        If a wildcard is supplied in the source_object argument, this is the prefix that will be prepended
        to the final destination objects' paths.
        Note that the source path's part before the wildcard will be removed;
        if it needs to be retained it should be appended to destination_object.
        For example, with prefix ``foo/*`` and destination_object ``blah/``, the file ``foo/baz`` will be
        copied to ``blah/baz``; to retain the prefix write the destination_object as e.g. ``blah/foo``, in
        which case the copied file will be named ``blah/foo/baz``.
    :type destination_object: str
    :param move_object: When move object is True, the object is moved instead of copied to the new location.
        This is the equivalent of a mv command as opposed to a cp command.
    :type move_object: bool
    :param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud Platform.
    :type gcp_conn_id: str
    :param delegate_to: The account to impersonate, if any.
        For this to work, the service account making the request must have domain-wide delegation enabled.
    :type delegate_to: str
    """

    template_fields = ("source_bucket", "source_object", "destination_object")
    ui_color = "#f0eee4"

    @apply_defaults
    def __init__(self,
                 source_bucket: str,
                 source_object: str,
                 destination_object: Optional[str] = None,
                 move_object: bool = False,
                 gcp_conn_id: str = "google_cloud_default",
                 delegate_to: Optional[str] = None,
                 *args,
                 **kwargs):
        super().__init__(*args, **kwargs)

        self.source_bucket = source_bucket
        self.source_object = source_object
        self.destination_object = destination_object
        self.move_object = move_object
        self.gcp_conn_id = gcp_conn_id
        self.delegate_to = delegate_to
        self.gcs_hook = None  # type: Optional[GCSHook]
        self.gdrive_hook = None  # type: Optional[GoogleDriveHook]

    def execute(self, context):

        self.gcs_hook = GCSHook(google_cloud_storage_conn_id=self.gcp_conn_id,
                                delegate_to=self.delegate_to)
        self.gdrive_hook = GoogleDriveHook(gcp_conn_id=self.gcp_conn_id,
                                           delegate_to=self.delegate_to)

        if WILDCARD in self.source_object:
            total_wildcards = self.source_object.count(WILDCARD)
            if total_wildcards > 1:
                error_msg = (
                    "Only one wildcard '*' is allowed in source_object parameter. "
                    "Found {} in {}.".format(total_wildcards,
                                             self.source_object))

                raise AirflowException(error_msg)

            prefix, delimiter = self.source_object.split(WILDCARD, 1)
            objects = self.gcs_hook.list(self.source_bucket,
                                         prefix=prefix,
                                         delimiter=delimiter)

            for source_object in objects:
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(
                        prefix, self.destination_object, 1)

                self._copy_single_object(source_object=source_object,
                                         destination_object=destination_object)
        else:
            self._copy_single_object(
                source_object=self.source_object,
                destination_object=self.destination_object)

    def _copy_single_object(self, source_object, destination_object):
        self.log.info(
            "Executing copy of gs://%s/%s to gdrive://%s",
            self.source_bucket,
            source_object,
            destination_object,
        )

        with tempfile.NamedTemporaryFile() as file:
            filename = file.name
            self.gcs_hook.download(bucket_name=self.source_bucket,
                                   object_name=source_object,
                                   filename=filename)
            self.gdrive_hook.upload_file(local_location=filename,
                                         remote_location=destination_object)

        if self.move_object:
            self.gcs_hook.delete(self.source_bucket, source_object)
示例#16
0
文件: gcs.py 项目: stu12345/airflow
 def poke(self, context):
     hook = GCSHook()
     return self.is_bucket_updated(
         len(hook.list(self.bucket, prefix=self.prefix)))