Esempio n. 1
0
def clean_buckets(google_cloud_storage_conn_id, private_bucket, shared_bucket):
    import logging

    hook = GoogleCloudStorageHook(google_cloud_storage_conn_id)
    total = 0

    # clean the entire bucket
    private = [(private_bucket, name) for name in hook.list(private_bucket)]
    shared = [(shared_bucket, name) for name in hook.list(shared_bucket)]

    for bucket_name, object_name in private + shared:
        logging.info(f"Deleting gs://{bucket_name}/{object_name}")
        hook.delete(bucket_name, object_name)
        total += 1
    logging.info(f"Deleted {total} objects")
Esempio n. 2
0
 def poke(self, context):
     self.log.info('Sensor checks existence of objects: %s, %s',
                   self.bucket, self.prefix)
     hook = GoogleCloudStorageHook(
         google_cloud_storage_conn_id=self.google_cloud_conn_id,
         delegate_to=self.delegate_to)
     return bool(hook.list(self.bucket, prefix=self.prefix))
Esempio n. 3
0
    def execute(self, context):
        self.log.info('Exporting data to Cloud Storage bucket ' + self.bucket)

        if self.overwrite_existing and self.namespace:
            gcs_hook = GoogleCloudStorageHook(self.cloud_storage_conn_id)
            objects = gcs_hook.list(self.bucket, prefix=self.namespace)
            for o in objects:
                gcs_hook.delete(self.bucket, o)

        ds_hook = DatastoreHook(self.datastore_conn_id, self.delegate_to)
        result = ds_hook.export_to_storage_bucket(
            bucket=self.bucket,
            namespace=self.namespace,
            entity_filter=self.entity_filter,
            labels=self.labels)
        operation_name = result['name']
        result = ds_hook.poll_operation_until_done(
            operation_name, self.polling_interval_in_seconds)

        state = result['metadata']['common']['state']
        if state != 'SUCCESSFUL':
            raise AirflowException(
                'Operation failed: result={}'.format(result))

        return result
Esempio n. 4
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to
        )

        if '*' in self.source_object:
            wildcard_position = self.source_object.index('*')
            objects = hook.list(self.source_bucket,
                                prefix=self.source_object[:wildcard_position],
                                delimiter=self.source_object[wildcard_position + 1:])
            for source_object in objects:
                self.log.info('Executing copy of gs://{0}/{1} to '
                              'gs://{2}/{3}/{1}'.format(self.source_bucket,
                                                        source_object,
                                                        self.destination_bucket,
                                                        self.destination_object,
                                                        source_object))
                hook.copy(self.source_bucket, source_object,
                          self.destination_bucket, "{}/{}".format(self.destination_object,
                                                                  source_object))
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            self.log.info('Executing copy: %s, %s, %s, %s', self.source_bucket,
                          self.source_object,
                          self.destination_bucket or self.source_bucket,
                          self.destination_object or self.source_object)
            hook.copy(self.source_bucket, self.source_object,
                      self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
Esempio n. 5
0
    def execute(self, context):
        # use the super to list all files in an Azure Data Lake path
        files = super(AdlsToGoogleCloudStorageOperator, self).execute(context)
        g_hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to)

        if not self.replace:
            # if we are not replacing -> list all files in the ADLS path
            # and only keep those files which are present in
            # ADLS and not in Google Cloud Storage
            bucket_name, prefix = _parse_gcs_url(self.dest_gcs)
            existing_files = g_hook.list(bucket=bucket_name, prefix=prefix)
            files = set(files) - set(existing_files)

        if files:
            hook = AzureDataLakeHook(
                azure_data_lake_conn_id=self.azure_data_lake_conn_id
            )

            for obj in files:
                with NamedTemporaryFile(mode='wb', delete=True) as f:
                    hook.download_file(local_path=f.name, remote_path=obj)
                    f.flush()
                    dest_gcs_bucket, dest_gcs_prefix = _parse_gcs_url(self.dest_gcs)
                    dest_path = os.path.join(dest_gcs_prefix, obj)
                    self.log.info("Saving file to %s", dest_path)

                    g_hook.upload(bucket=dest_gcs_bucket, object=dest_path, filename=f.name)

            self.log.info("All done, uploaded %d files to GCS", len(files))
        else:
            self.log.info("In sync, no files needed to be uploaded to GCS")

        return files
Esempio n. 6
0
    def execute(self, context):
        # pass
        gcshook = GoogleCloudStorageHook(self.gcp_conn_id)

        self.log.info(gcshook.list("testcovidlinh"))

        for state in self.state_code:
            state_geo = 'US-' + state
            print(state_geo)

            query_results = self.GetQueryVolumes(queries=self.keywords,
                                                 start_date=self.startdate,
                                                 end_date=self.enddate,
                                                 geo=state_geo,
                                                 geo_level='region',
                                                 frequency='day')

            # Example of writing one of these files out as a CSV file to STDOUT.
            filename = "tmp/" + state_geo + ".csv"
            with open(filename, "w") as csvfile:
                outwriter = csv.writer(csvfile)

                for row in query_results:
                    outwriter.writerow(row)

            # Upload file to GCS
            object_name = state_geo + "/" + self.startdate + "_" + self.enddate + ".csv"

            gcshook.upload(bucket=self.gcs_bucket,
                           object=object_name,
                           filename=filename)
Esempio n. 7
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to)

        if '*' in self.source_object:
            wildcard_position = self.source_object.index('*')
            objects = hook.list(
                self.source_bucket,
                prefix=self.source_object[:wildcard_position],
                delimiter=self.source_object[wildcard_position + 1:])
            for source_object in objects:
                self.log.info('Executing copy of gs://{0}/{1} to '
                              'gs://{2}/{3}/{1}'.format(
                                  self.source_bucket, source_object,
                                  self.destination_bucket,
                                  self.destination_object, source_object))
                hook.copy(
                    self.source_bucket, source_object, self.destination_bucket,
                    "{}/{}".format(self.destination_object, source_object))
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            self.log.info('Executing copy: %s, %s, %s, %s', self.source_bucket,
                          self.source_object, self.destination_bucket
                          or self.source_bucket, self.destination_object
                          or self.source_object)
            hook.copy(self.source_bucket, self.source_object,
                      self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
Esempio n. 8
0
    def execute(self, context):

        self.log.info('Executing copy - Source_Bucket: %s, Source_directory: %s, '
                      'Destination_bucket: %s, Destination_directory: %s',
                      self.source_bucket, self.source_object,
                      self.destination_bucket or self.source_bucket,
                      self.destination_directory or self.source_object)

        hook = GoogleCloudStorageHook(google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
                                      delegate_to=self.delegate_to)

        self.log.info('Getting list of the files to copy. Source Bucket: %s; Source Object: %s',
                      self.source_bucket, self.source_object)

        # Create a list of objects to copy from Source bucket. The function uses prefix keyword to pass the name of
        # the object to copy.
        self.files_to_copy = hook.list(bucket=self.source_bucket, prefix=self.source_object,
                                       delimiter=self.source_files_delimiter)

        # Log the names of all objects to be copied
        self.log.info('Files to copy: %s', self.files_to_copy)

        if self.files_to_copy is not None:
            for file_to_copy in self.files_to_copy:
                self.log.info('Source_Bucket: %s, Source_Object: %s, '
                              'Destination_bucket: %s, Destination_Directory: %s',
                              self.source_bucket, file_to_copy,
                              self.destination_bucket or self.source_bucket,
                              self.destination_directory + file_to_copy)
                hook.copy(self.source_bucket, file_to_copy,
                          self.destination_bucket, self.destination_directory + file_to_copy)
        else:
            self.log.info('No Files to copy.')
Esempio n. 9
0
 def poke(self, context):
     self.log.info('Sensor checks existence of objects: %s, %s',
                   self.bucket, self.prefix)
     hook = GoogleCloudStorageHook(
         google_cloud_storage_conn_id=self.google_cloud_conn_id,
         delegate_to=self.delegate_to)
     objects = []
     for prefix in self.prefixes:
         objects.extend(list(hook.list(self.bucket, prefix=prefix)))
     self.log.info(f'Objects list: {objects}')
     names, files, objects = names_match(objects)
     if names:
         ti = context['ti']
         self.__download(hook, objects, files, ti)
         ti.xcom_push(key='names', value=names)
         ti.xcom_push(key='files', value=files)
         ti.xcom_push(key='objects', value=objects)
         for name, fil in zip(names, files):
             ti.xcom_push(key=f'{name}', value=fil)
         self.log.info(
             f'names: {names}\nfiles: {files}\nobjects: {objects}')
         data_timestamp = current_datetime().isoformat()
         ti.xcom_push(key='data_timestamp', value=data_timestamp)
         return True
     return False
def do_copy_model_to_final(**kwargs):
    gcs = GoogleCloudStorageHook()

    # Returns all the objects within the bucket. All sub-buckets are considered
    # as prefix of the leaves. List does not differentiate files from subbuckets
    all_jobs_files = gcs.list(
        bucket=COMPOSER_BUCKET_NAME,
        prefix='{}/export/estimate'.format(PREFIX_JOBS_EXPORT)
    )

    # Extract the latest model bucket parent of variables/ and saved_model.pbtxt
    # The max() string contains the latest model folders in 1234567, we need to
    # extract that using regex
    # ex: jobs/clv-composer/export/estimate/1234567890/variables/variables.index
    # returns /1234567890/
    latest_model_bucket = re.findall(r'/\d+/', max(all_jobs_files))[0]

    # List all the files that needs to be copied (only files in the latest bucket
    # and skip the ones that are not files but sub buckets)
    for c in [f for f in all_jobs_files
              if latest_model_bucket in f and f[-1] != '/']:

        # The model used for training is saved into a 'final' sub bucket of the
        # export bucket.
        dest_object = c.split(latest_model_bucket)[1]
        dest_object = '{}/{}'.format(PREFIX_FINAL_MODEL, dest_object)

        logging.info("Copying {} to {} ...".format(dest_object, COMPOSER_BUCKET_NAME))

        gcs.copy(
            source_bucket=COMPOSER_BUCKET_NAME,
            source_object=c,
            destination_object=dest_object
        )
Esempio n. 11
0
 def poke(self, context):
     self.log.info('Sensor checks existence of objects: %s, %s',
                   self.bucket, self.prefix)
     hook = GoogleCloudStorageHook(
         google_cloud_storage_conn_id=self.google_cloud_conn_id,
         delegate_to=self.delegate_to)
     return bool(hook.list(self.bucket, prefix=self.prefix))
Esempio n. 12
0
 def poke(self, context):
     self.log.info('Sensor checks existence of: <gs://%s/%s>', self.bucket,
                   self.prefix)
     hook = GoogleCloudStorageHook(
         google_cloud_storage_conn_id=self.google_cloud_conn_id,
         delegate_to=self.delegate_to)
     return len(hook.list(self.bucket, prefix=self.prefix)) > 0
Esempio n. 13
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to)
        log_message = 'Executing copy of gs://{0}/{1} to gs://{2}/{3}'

        if self.wildcard in self.source_object:
            prefix, delimiter = self.source_object.split(self.wildcard, 1)
            objects = hook.list(self.source_bucket,
                                prefix=prefix,
                                delimiter=delimiter)

            for source_object in objects:
                if self.last_modified_time is not None:
                    # Check to see if object was modified after last_modified_time
                    if hook.is_updated_after(self.source_bucket, source_object,
                                             self.last_modified_time):
                        pass
                    else:
                        continue
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(
                        prefix, self.destination_object, 1)
                self.log.info(
                    log_message.format(self.source_bucket, source_object,
                                       self.destination_bucket,
                                       destination_object))

                hook.rewrite(self.source_bucket, source_object,
                             self.destination_bucket, destination_object)
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            if self.last_modified_time is not None:
                if hook.is_updated_after(self.source_bucket,
                                         self.source_object,
                                         self.last_modified_time):
                    pass
                else:
                    return

            self.log.info(
                log_message.format(
                    self.source_bucket, self.source_object,
                    self.destination_bucket or self.source_bucket,
                    self.destination_object or self.source_object))
            hook.rewrite(self.source_bucket, self.source_object,
                         self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
Esempio n. 14
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to
        )
        log_message = 'Executing copy of gs://{0}/{1} to gs://{2}/{3}'

        if self.wildcard in self.source_object:
            prefix, delimiter = self.source_object.split(self.wildcard, 1)
            objects = hook.list(self.source_bucket, prefix=prefix, delimiter=delimiter)

            for source_object in objects:
                if self.last_modified_time is not None:
                    # Check to see if object was modified after last_modified_time
                    if hook.is_updated_after(self.source_bucket, source_object,
                                             self.last_modified_time):
                        pass
                    else:
                        continue
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(prefix,
                                                               self.destination_object, 1)
                self.log.info(
                    log_message.format(self.source_bucket, source_object,
                                       self.destination_bucket, destination_object)
                )

                hook.rewrite(self.source_bucket, source_object,
                             self.destination_bucket, destination_object)
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            if self.last_modified_time is not None:
                if hook.is_updated_after(self.source_bucket,
                                         self.source_object,
                                         self.last_modified_time):
                    pass
                else:
                    return

            self.log.info(
                log_message.format(self.source_bucket, self.source_object,
                                   self.destination_bucket or self.source_bucket,
                                   self.destination_object or self.source_object)
            )
            hook.rewrite(self.source_bucket, self.source_object,
                         self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to
        )

        self.log.info('Getting list of the files. Bucket: %s; Delimiter: %s; Prefix: %s',
                      self.bucket, self.delimiter, self.prefix)

        return hook.list(bucket=self.bucket,
                         prefix=self.prefix,
                         delimiter=self.delimiter)
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to
        )

        self.log.info('Getting list of the files. Bucket: %s; Delimiter: %s; Prefix: %s',
                      self.bucket, self.delimiter, self.prefix)

        return hook.list(bucket=self.bucket,
                         prefix=self.prefix,
                         delimiter=self.delimiter)
Esempio n. 17
0
    def execute(self, context):
        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to)

        if self.objects:
            objects = self.objects
        else:
            objects = hook.list(bucket_name=self.bucket_name,
                                prefix=self.prefix)

        self.log.info("Deleting %s objects from %s", len(objects),
                      self.bucket_name)
        for object_name in objects:
            hook.delete(bucket_name=self.bucket_name, object_name=object_name)
Esempio n. 18
0
    def execute(self, context):
        self.log.info('Executing download: %s, %s, %s', self.bucket,
                      self.prefix, self.bucket)
        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to)

        downloaded_files = []
        for object in hook.list(bucket=self.bucket, prefix=self.prefix):
            self.log.info('Downloading object: %s', object)
            filename = os.path.join(self.directory, object.replace('/', '_'))
            hook.download(bucket=self.bucket, object=object, filename=filename)
            downloaded_files.append(filename)

        task_instance = context['task_instance']
        task_instance.xcom_push('downloaded_files', downloaded_files)
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to)

        self.log.info(
            'Getting list of the files. Bucket: %s; Delimiter: %s; Prefix: %s',
            self.bucket, self.delimiter, self.prefix)
        listed = hook.list(bucket=self.bucket,
                           prefix=self.prefix,
                           delimiter=self.delimiter)
        object = listed[0]
        full_path = 'gs://{bucket}/{object}'.format(bucket=self.bucket,
                                                    object=object)
        context['task_instance'].xcom_push(key='open_air_input_file_path',
                                           value=full_path)
Esempio n. 20
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to,
        )

        if self.destination_bucket is None:
            self.log.warning(
                "destination_bucket is None. Defaulting it to source_bucket (%s)",
                self.source_bucket,
            )
            self.destination_bucket = self.source_bucket

        if WILDCARD in self.source_object:
            total_wildcards = self.source_object.count(WILDCARD)
            if total_wildcards > 1:
                error_msg = (
                    "Only one wildcard '*' is allowed in source_object parameter. "
                    "Found {} in {}.".format(total_wildcards,
                                             self.source_object))

                raise AirflowException(error_msg)

            prefix, delimiter = self.source_object.split(WILDCARD, 1)
            objects = hook.list(self.source_bucket,
                                prefix=prefix,
                                delimiter=delimiter)

            for source_object in objects:
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = self.destination_object

                self._copy_single_object(
                    hook=hook,
                    source_object=source_object,
                    destination_object=destination_object,
                )
        else:
            self._copy_single_object(
                hook=hook,
                source_object=self.source_object,
                destination_object=self.destination_object,
            )
Esempio n. 21
0
    def execute(self, context):
        gcshook = GoogleCloudStorageHook(self.gcp_conn_id)
        self.log.info(gcshook.list("testcovidlinh"))     

        # Create a temporary folder
        # print(os.path.)
        if not path.exists("tmp"):
            os.mkdir("tmp")
        
        # Track failure 
        failure_count = 0

        # Passing filename to next job
        file_list = []

        # Consume API
        for state in self.state_code: 
            URL = "https://covidtracking.com/api/v1/states/" + state.lower() + "/daily.json"
            # self.log.info(URL)
            response = requests.get(URL).json()
            
            try:
                # If we have any message error
                self.log.info(response["message"])
                failure_count += 1
                continue
            except:
                # The response is successfully
                filename = "tmp/"+state+".json"
                # self.log.info(filename)
                with open(filename,'w', encoding='utf-8') as f:
                    dict2str = [json.dumps(i,sort_keys=True) for i in response]
                    json_output = "\n".join(dict2str)
                    f.write(json_output)
                    # json.dump(response, f, ensure_ascii=False)
                
                object_name = 'US-' + state + "/" + "covidstat.json"
                file_list.append(object_name)
                gcshook.upload(bucket=self.gcs_bucket, object=object_name, filename=filename)
        
        self.log.info("Number of failure cases: "+str(failure_count))

        task_instance = context['task_instance']
        task_instance.xcom_push(self.xcom_task_id_key, file_list)
Esempio n. 22
0
    def execute(self, context):
        gcs_hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to
        )
        s3_hook = S3Hook(aws_conn_id=self.dest_aws_conn_id, verify=self.dest_verify)

        gcs_source_objects = gcs_hook.list(bucket=self.gcs_source_bucket, prefix=self.gcs_source_prefix, maxResults=1000)
        if gcs_source_objects is None or len(gcs_source_objects) == 0:
            self.log.warn('SKIP: No objects found matching the prefix "%s"', self.gcs_source_prefix)
            return

        self.log.info('Number of object to compose: %d', len(gcs_source_objects))

        for gcs_uri in gcs_source_objects:
            tmp = tempfile.NamedTemporaryFile()
            if gcs_hook.exists(self.gcs_source_bucket, gcs_uri) is False:
                if self.fail_on_missing is True:
                    self.log.error('Execution will fail Object not found: gs://%s/%s', self.gcs_source_bucket, gcs_uri)
                    self.is_failed = True
                else:
                    self.log.warning('Skipping. Object not found: gs://%s/%s', self.gcs_source_bucket, gcs_uri)
                continue

            self.log.info('Download gs://%s/%s', self.gcs_source_bucket, gcs_uri)
            gcs_hook.download(
                bucket=self.gcs_source_bucket,
                object=gcs_uri,
                filename=tmp.name
            )
            self.log.info('Upload s3://%s/%s', self.s3_destination_bucket, gcs_uri)
            s3_hook.load_file(
                filename=tmp.name,
                bucket_name=self.s3_destination_bucket,
                key=gcs_uri,
                replace=True,
                acl_policy=self.s3_acl_policy
            )
            tmp.close()
            if self.is_failed:
                raise AirflowException('Some object were not found at the source.')
Esempio n. 23
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to)
        log_message = 'Executing copy of gs://{0}/{1} to gs://{2}/{3}'

        if self.wildcard in self.source_object:
            prefix, delimiter = self.source_object.split(self.wildcard, 1)
            objects = hook.list(self.source_bucket,
                                prefix=prefix,
                                delimiter=delimiter)

            for source_object in objects:
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(
                        prefix, self.destination_object, 1)
                self.log.info(
                    log_message.format(self.source_bucket, source_object,
                                       self.destination_bucket,
                                       destination_object))

                hook.copy(self.source_bucket, source_object,
                          self.destination_bucket, destination_object)
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            self.log.info(
                log_message.format(
                    self.source_bucket, self.source_object,
                    self.destination_bucket or self.source_bucket,
                    self.destination_object or self.source_object))
            hook.copy(self.source_bucket, self.source_object,
                      self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
Esempio n. 24
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to
        )
        log_message = 'Executing copy of gs://{0}/{1} to gs://{2}/{3}'

        if self.wildcard in self.source_object:
            prefix, delimiter = self.source_object.split(self.wildcard, 1)
            objects = hook.list(self.source_bucket, prefix=prefix, delimiter=delimiter)

            for source_object in objects:
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(prefix,
                                                               self.destination_object, 1)
                self.log.info(
                    log_message.format(self.source_bucket, source_object,
                                       self.destination_bucket, destination_object)
                )

                hook.rewrite(self.source_bucket, source_object,
                             self.destination_bucket, destination_object)
                if self.move_object:
                    hook.delete(self.source_bucket, source_object)

        else:
            self.log.info(
                log_message.format(self.source_bucket, self.source_object,
                                   self.destination_bucket or self.source_bucket,
                                   self.destination_object or self.source_object)
            )
            hook.rewrite(self.source_bucket, self.source_object,
                         self.destination_bucket, self.destination_object)

            if self.move_object:
                hook.delete(self.source_bucket, self.source_object)
Esempio n. 25
0
def do_list_predictions_files(**kwargs):
    """ Retrieves all the predictions files that should be loaded to BigQuery.
    Can not do a GoogleCloudStorageToBigQueryOperator directly due to the possible
    multiple files.
    """
    # List all relevant files
    # TODO Add when Composer is on Airflow 2.0
    # predictions_files = gcs_list_operator.GoogleCloudStorageListOperator(
    #     task_id='predictions_files',
    #     bucket=COMPOSER_BUCKET_NAME,
    #     prefix='predictions/output/prediction.results-'
    # )
    # TODO Remove when Composer on Airflow 2.0
    gcs = GoogleCloudStorageHook()
    predictions_files = gcs.list(
        bucket=COMPOSER_BUCKET_NAME,
        prefix='predictions/output/prediction.results-')

    logging.info("Predictions files are: {}".format(predictions_files))

    # Create a variable that can be used in the next task
    kwargs['ti'].xcom_push(key='predictions_files', value=predictions_files)
    def execute(self, context):
        self.log.info('Exporting data to Cloud Storage bucket ' + self.bucket)

        if self.overwrite_existing and self.namespace:
            gcs_hook = GoogleCloudStorageHook(self.cloud_storage_conn_id)
            objects = gcs_hook.list(self.bucket, prefix=self.namespace)
            for o in objects:
                gcs_hook.delete(self.bucket, o)

        ds_hook = DatastoreHook(self.datastore_conn_id, self.delegate_to)
        result = ds_hook.export_to_storage_bucket(bucket=self.bucket,
                                                  namespace=self.namespace,
                                                  entity_filter=self.entity_filter,
                                                  labels=self.labels)
        operation_name = result['name']
        result = ds_hook.poll_operation_until_done(operation_name,
                                                   self.polling_interval_in_seconds)

        state = result['metadata']['common']['state']
        if state != 'SUCCESSFUL':
            raise AirflowException('Operation failed: result={}'.format(result))

        return result
Esempio n. 27
0
    def check_data(**context):
        """Check if images exist.
        """

        # skip if testing workflow
        if TEST_MODE:
            return

        source = context["dag_run"].conf.get("source")
        image = context["dag_run"].conf.get("image")
        minz = context["dag_run"].conf.get("minz")
        maxz = context["dag_run"].conf.get("maxz")

        # grab all files in raw
        ghook = GoogleCloudStorageHook()  # uses default gcp connection
        file_names = ghook.list(source, prefix="")
        file_names = set(file_names)

        for slice in range(minz, maxz):
            if ((image % slice)) not in file_names:
                raise AirflowException(
                    f"raw data not loaded properly.  Missing raw/{image % slice}"
                )
Esempio n. 28
0
    def execute(self, context):

        hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
            delegate_to=self.delegate_to
        )

        if self.destination_bucket is None:
            self.log.warning(
                'destination_bucket is None. Defaulting it to source_bucket (%s)',
                self.source_bucket)
            self.destination_bucket = self.source_bucket

        if WILDCARD in self.source_object:
            total_wildcards = self.source_object.count(WILDCARD)
            if total_wildcards > 1:
                error_msg = "Only one wildcard '*' is allowed in source_object parameter. " \
                            "Found {} in {}.".format(total_wildcards, self.source_object)

                raise AirflowException(error_msg)

            prefix, delimiter = self.source_object.split(WILDCARD, 1)
            objects = hook.list(self.source_bucket, prefix=prefix, delimiter=delimiter)

            for source_object in objects:
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(prefix,
                                                               self.destination_object, 1)

                self._copy_single_object(hook=hook, source_object=source_object,
                                         destination_object=destination_object)
        else:
            self._copy_single_object(hook=hook, source_object=self.source_object,
                                     destination_object=self.destination_object)
 def execute(self, context):
     gcshook = GoogleCloudStorageHook(self.gcp_conn_id)
     self.log.info(gcshook.list("testcovidlinh"))
Esempio n. 30
0
 def poke(self, context):
     hook = GoogleCloudStorageHook()
     return self.is_bucket_updated(
         len(hook.list(self.bucket, prefix=self.prefix)))
Esempio n. 31
0
    def execute(self, context):
        # use the super method to list all the files in an S3 bucket/key
        files = super(S3ToGoogleCloudStorageOperator, self).execute(context)

        gcs_hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.dest_gcs_conn_id,
            delegate_to=self.delegate_to)

        if not self.replace:
            # if we are not replacing -> list all files in the GCS bucket
            # and only keep those files which are present in
            # S3 and not in Google Cloud Storage
            bucket_name, object_prefix = _parse_gcs_url(self.dest_gcs)
            existing_files_prefixed = gcs_hook.list(bucket_name,
                                                    prefix=object_prefix)

            existing_files = []

            if existing_files_prefixed:
                # Remove the object prefix itself, an empty directory was found
                if object_prefix in existing_files_prefixed:
                    existing_files_prefixed.remove(object_prefix)

                # Remove the object prefix from all object string paths
                for f in existing_files_prefixed:
                    if f.startswith(object_prefix):
                        existing_files.append(f[len(object_prefix):])
                    else:
                        existing_files.append(f)

            files = list(set(files) - set(existing_files))
            if len(files) > 0:
                self.log.info('{0} files are going to be synced: {1}.'.format(
                    len(files), files))
            else:
                self.log.info(
                    'There are no new files to sync. Have a nice day!')

        if files:
            hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)

            for file in files:
                # GCS hook builds its own in-memory file so we have to create
                # and pass the path
                file_object = hook.get_key(file, self.bucket)
                with NamedTemporaryFile(mode='wb', delete=True) as f:
                    file_object.download_fileobj(f)
                    f.flush()

                    dest_gcs_bucket, dest_gcs_object_prefix = _parse_gcs_url(
                        self.dest_gcs)
                    # There will always be a '/' before file because it is
                    # enforced at instantiation time
                    dest_gcs_object = dest_gcs_object_prefix + file

                    # Sync is sequential and the hook already logs too much
                    # so skip this for now
                    # self.log.info(
                    #     'Saving file {0} from S3 bucket {1} in GCS bucket {2}'
                    #     ' as object {3}'.format(file, self.bucket,
                    #                             dest_gcs_bucket,
                    #                             dest_gcs_object))

                    gcs_hook.upload(dest_gcs_bucket, dest_gcs_object, f.name)

            self.log.info(
                "All done, uploaded %d files to Google Cloud Storage",
                len(files))
        else:
            self.log.info(
                'In sync, no files needed to be uploaded to Google Cloud'
                'Storage')

        return files
Esempio n. 32
0
def list_objects(bucket=None):
    hook = GoogleCloudStorageHook()
    storage_objects = hook.list(bucket)
    # return automatically kwargs["ti"].xcom_push(value=storage_objects)
    return storage_objects
Esempio n. 33
0
def list_gcs_objects():
    hook_gcs = GoogleCloudStorageHook("gcs_connection_hook")
    list_of_objs = hook_gcs.list(bucket="zdatasets1")
    return str(list_of_objs)
Esempio n. 34
0
def list_objects(bucket=None):
    hook = GoogleCloudStorageHook()
    storage_objects = hook.list(bucket)

    return storage_objects
Esempio n. 35
0
 def poke(self, context):
     hook = GoogleCloudStorageHook()
     return self.is_bucket_updated(len(hook.list(self.bucket, prefix=self.prefix)))
Esempio n. 36
0
class GcsToGDriveOperator(BaseOperator):
    """
    Copies objects from a Google Cloud Storage service service to Google Drive service, with renaming
    if requested.

    Using this operator requires the following OAuth 2.0 scope:

    .. code-block:: none

        https://www.googleapis.com/auth/drive

    :param source_bucket: The source Google Cloud Storage bucket where the object is. (templated)
    :type source_bucket: str
    :param source_object: The source name of the object to copy in the Google cloud
        storage bucket. (templated)
        You can use only one wildcard for objects (filenames) within your bucket. The wildcard can appear
        inside the object name or at the end of the object name. Appending a wildcard to the bucket name
        is unsupported.
    :type source_object: str
    :param destination_object: The destination name of the object in the destination Google Drive
        service. (templated)
        If a wildcard is supplied in the source_object argument, this is the prefix that will be prepended
        to the final destination objects' paths.
        Note that the source path's part before the wildcard will be removed;
        if it needs to be retained it should be appended to destination_object.
        For example, with prefix ``foo/*`` and destination_object ``blah/``, the file ``foo/baz`` will be
        copied to ``blah/baz``; to retain the prefix write the destination_object as e.g. ``blah/foo``, in
        which case the copied file will be named ``blah/foo/baz``.
    :type destination_object: str
    :param move_object: When move object is True, the object is moved instead of copied to the new location.
        This is the equivalent of a mv command as opposed to a cp command.
    :type move_object: bool
    :param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud Platform.
    :type gcp_conn_id: str
    :param delegate_to: The account to impersonate, if any.
        For this to work, the service account making the request must have domain-wide delegation enabled.
    :type delegate_to: str
    """

    template_fields = ("source_bucket", "source_object", "destination_object")
    ui_color = "#f0eee4"

    @apply_defaults
    def __init__(self,
                 source_bucket,
                 source_object,
                 destination_object=None,
                 move_object=False,
                 gcp_conn_id="google_cloud_default",
                 delegate_to=None,
                 *args,
                 **kwargs):
        super(GcsToGDriveOperator, self).__init__(*args, **kwargs)

        self.source_bucket = source_bucket
        self.source_object = source_object
        self.destination_object = destination_object
        self.move_object = move_object
        self.gcp_conn_id = gcp_conn_id
        self.delegate_to = delegate_to
        self.gcs_hook = None  # type: Optional[GoogleCloudStorageHook]
        self.gdrive_hook = None  # type: Optional[GoogleDriveHook]

    def execute(self, context):

        self.gcs_hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to)
        self.gdrive_hook = GoogleDriveHook(gcp_conn_id=self.gcp_conn_id,
                                           delegate_to=self.delegate_to)

        if WILDCARD in self.source_object:
            total_wildcards = self.source_object.count(WILDCARD)
            if total_wildcards > 1:
                error_msg = (
                    "Only one wildcard '*' is allowed in source_object parameter. "
                    "Found {} in {}.".format(total_wildcards,
                                             self.source_object))

                raise AirflowException(error_msg)

            prefix, delimiter = self.source_object.split(WILDCARD, 1)
            objects = self.gcs_hook.list(self.source_bucket,
                                         prefix=prefix,
                                         delimiter=delimiter)

            for source_object in objects:
                if self.destination_object is None:
                    destination_object = source_object
                else:
                    destination_object = source_object.replace(
                        prefix, self.destination_object, 1)

                self._copy_single_object(source_object=source_object,
                                         destination_object=destination_object)
        else:
            self._copy_single_object(
                source_object=self.source_object,
                destination_object=self.destination_object)

    def _copy_single_object(self, source_object, destination_object):
        self.log.info(
            "Executing copy of gs://%s/%s to gdrive://%s",
            self.source_bucket,
            source_object,
            destination_object,
        )

        with tempfile.NamedTemporaryFile() as file:
            filename = file.name
            self.gcs_hook.download(bucket=self.source_bucket,
                                   object=source_object,
                                   filename=filename)
            self.gdrive_hook.upload_file(local_location=filename,
                                         remote_location=destination_object)

        if self.move_object:
            self.gcs_hook.delete(self.source_bucket, source_object)
    def execute(self, context):
        # use the super method to list all the files in an S3 bucket/key
        files = super().execute(context)

        gcs_hook = GoogleCloudStorageHook(
            google_cloud_storage_conn_id=self.dest_gcs_conn_id,
            delegate_to=self.delegate_to)

        if not self.replace:
            # if we are not replacing -> list all files in the GCS bucket
            # and only keep those files which are present in
            # S3 and not in Google Cloud Storage
            bucket_name, object_prefix = _parse_gcs_url(self.dest_gcs)
            existing_files_prefixed = gcs_hook.list(
                bucket_name, prefix=object_prefix)

            existing_files = []

            if existing_files_prefixed:
                # Remove the object prefix itself, an empty directory was found
                if object_prefix in existing_files_prefixed:
                    existing_files_prefixed.remove(object_prefix)

                # Remove the object prefix from all object string paths
                for f in existing_files_prefixed:
                    if f.startswith(object_prefix):
                        existing_files.append(f[len(object_prefix):])
                    else:
                        existing_files.append(f)

            files = list(set(files) - set(existing_files))
            if len(files) > 0:
                self.log.info(
                    '%s files are going to be synced: %s.', len(files), files
                )
            else:
                self.log.info(
                    'There are no new files to sync. Have a nice day!')

        if files:
            hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)

            for file in files:
                # GCS hook builds its own in-memory file so we have to create
                # and pass the path
                file_object = hook.get_key(file, self.bucket)
                with NamedTemporaryFile(mode='wb', delete=True) as f:
                    file_object.download_fileobj(f)
                    f.flush()

                    dest_gcs_bucket, dest_gcs_object_prefix = _parse_gcs_url(
                        self.dest_gcs)
                    # There will always be a '/' before file because it is
                    # enforced at instantiation time
                    dest_gcs_object = dest_gcs_object_prefix + file

                    # Sync is sequential and the hook already logs too much
                    # so skip this for now
                    # self.log.info(
                    #     'Saving file {0} from S3 bucket {1} in GCS bucket {2}'
                    #     ' as object {3}'.format(file, self.bucket,
                    #                             dest_gcs_bucket,
                    #                             dest_gcs_object))

                    gcs_hook.upload(dest_gcs_bucket, dest_gcs_object, f.name)

            self.log.info(
                "All done, uploaded %d files to Google Cloud Storage",
                len(files))
        else:
            self.log.info(
                'In sync, no files needed to be uploaded to Google Cloud'
                'Storage')

        return files