Beispiel #1
0
    def execute(self, context):
        """Execute the Apache Beam Pipeline."""
        self.beam_hook = BeamHook(runner=self.runner)
        pipeline_options = self.default_pipeline_options.copy()
        process_line_callback: Optional[Callable] = None
        is_dataflow = self.runner.lower(
        ) == BeamRunnerType.DataflowRunner.lower()
        dataflow_job_name: Optional[str] = None

        if isinstance(self.dataflow_config, dict):
            self.dataflow_config = DataflowConfiguration(
                **self.dataflow_config)

        if is_dataflow:
            dataflow_job_name, pipeline_options, process_line_callback = self._set_dataflow(
                pipeline_options=pipeline_options,
                job_name_variable_key="job_name")

        pipeline_options.update(self.pipeline_options)

        # Convert argument names from lowerCamelCase to snake case.
        formatted_pipeline_options = {
            convert_camel_to_snake(key): pipeline_options[key]
            for key in pipeline_options
        }

        with ExitStack() as exit_stack:
            if self.py_file.lower().startswith("gs://"):
                gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to)
                tmp_gcs_file = exit_stack.enter_context(  # pylint: disable=no-member
                    gcs_hook.provide_file(object_url=self.py_file))
                self.py_file = tmp_gcs_file.name

            self.beam_hook.start_python_pipeline(
                variables=formatted_pipeline_options,
                py_file=self.py_file,
                py_options=self.py_options,
                py_interpreter=self.py_interpreter,
                py_requirements=self.py_requirements,
                py_system_site_packages=self.py_system_site_packages,
                process_line_callback=process_line_callback,
            )

            if is_dataflow:
                self.dataflow_hook.wait_for_done(  # pylint: disable=no-value-for-parameter
                    job_name=dataflow_job_name,
                    location=self.dataflow_config.location,
                    job_id=self.dataflow_job_id,
                    multiple_jobs=False,
                )

        return {"dataflow_job_id": self.dataflow_job_id}
    def execute(self, context) -> List[str]:
        # list all files in an Google Cloud Storage bucket
        hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.google_impersonation_chain,
        )

        self.log.info(
            'Getting list of the files. Bucket: %s; Delimiter: %s; Prefix: %s',
            self.bucket,
            self.delimiter,
            self.prefix,
        )

        files = hook.list(bucket_name=self.bucket, prefix=self.prefix, delimiter=self.delimiter)

        s3_hook = S3Hook(
            aws_conn_id=self.dest_aws_conn_id, verify=self.dest_verify, extra_args=self.dest_s3_extra_args
        )

        if not self.replace:
            # if we are not replacing -> list all files in the S3 bucket
            # and only keep those files which are present in
            # Google Cloud Storage and not in S3
            bucket_name, prefix = S3Hook.parse_s3_url(self.dest_s3_key)
            # look for the bucket and the prefix to avoid look into
            # parent directories/keys
            existing_files = s3_hook.list_keys(bucket_name, prefix=prefix)
            # in case that no files exists, return an empty array to avoid errors
            existing_files = existing_files if existing_files is not None else []
            # remove the prefix for the existing files to allow the match
            existing_files = [file.replace(prefix, '', 1) for file in existing_files]
            files = list(set(files) - set(existing_files))

        if files:

            for file in files:
                file_bytes = hook.download(object_name=file, bucket_name=self.bucket)

                dest_key = self.dest_s3_key + file
                self.log.info("Saving file to %s", dest_key)

                s3_hook.load_bytes(
                    cast(bytes, file_bytes), key=dest_key, replace=self.replace, acl_policy=self.s3_acl_policy
                )

            self.log.info("All done, uploaded %d files to S3", len(files))
        else:
            self.log.info("In sync, no files needed to be uploaded to S3")

        return files
Beispiel #3
0
    def execute(self, context):

        hook = GCSHook(
            google_cloud_storage_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to
        )

        self.log.info('Getting list of the files. Bucket: %s; Delimiter: %s; Prefix: %s',
                      self.bucket, self.delimiter, self.prefix)

        return hook.list(bucket_name=self.bucket,
                         prefix=self.prefix,
                         delimiter=self.delimiter)
Beispiel #4
0
 def execute(self, context: "Context") -> None:
     hook = GCSHook(
         gcp_conn_id=self.gcp_conn_id,
         impersonation_chain=self.impersonation_chain,
     )
     hook.insert_object_acl(
         bucket_name=self.bucket,
         object_name=self.object_name,
         entity=self.entity,
         role=self.role,
         generation=self.generation,
         user_project=self.user_project,
     )
Beispiel #5
0
 def execute(self, context):
     hook = GCSHook(
         google_cloud_storage_conn_id=self.gcp_conn_id,
         delegate_to=self.delegate_to,
         impersonation_chain=self.impersonation_chain,
     )
     hook.sync(source_bucket=self.source_bucket,
               destination_bucket=self.destination_bucket,
               source_object=self.source_object,
               destination_object=self.destination_object,
               recursive=self.recursive,
               delete_extra_files=self.delete_extra_files,
               allow_overwrite=self.allow_overwrite)
Beispiel #6
0
 def _upload_to_gcs(self, files_to_upload):
     """
     Upload all of the file splits (and optionally the schema .json file) to
     Google Cloud Storage.
     """
     hook = GCSHook(
         gcp_conn_id=self.gcp_conn_id,
         delegate_to=self.delegate_to)
     for tmp_file in files_to_upload:
         hook.upload(self.bucket, tmp_file.get('file_name'),
                     tmp_file.get('file_handle').name,
                     mime_type=tmp_file.get('file_mime_type'),
                     gzip=self.gzip if tmp_file.get('file_name') != self.schema_filename else False)
    def deserialize_value(result) -> Any:
        result = BaseXCom.deserialize_value(result)

        if isinstance(result, str) and result.startswith(
                GCSXComBackend.PREFIX):
            object_name = result.replace(GCSXComBackend.PREFIX, "")
            hook = GCSHook()

            with hook.provide_file(bucket_name=GCSXComBackend.BUCKET_NAME,
                                   object_name=object_name) as f:
                f.flush()
                result = pickle.load(f)

        return result
Beispiel #8
0
 def execute(self, context: "Context") -> None:
     hook = GCSHook(
         gcp_conn_id=self.gcp_conn_id,
         impersonation_chain=self.impersonation_chain,
     )
     StorageLink.persist(
         context=context,
         task_instance=self,
         uri=self.bucket,
         project_id=hook.project_id,
     )
     hook.insert_bucket_acl(
         bucket_name=self.bucket, entity=self.entity, role=self.role, user_project=self.user_project
     )
 def _upload_to_gcs(self, files_to_upload: Dict[str, Any]):
     hook = GCSHook(
         google_cloud_storage_conn_id=self.gcp_conn_id,
         delegate_to=self.delegate_to,
         impersonation_chain=self.impersonation_chain,
     )
     for obj, tmp_file_handle in files_to_upload.items():
         hook.upload(
             bucket_name=self.bucket,
             object_name=obj,
             filename=tmp_file_handle.name,
             mime_type='application/json',
             gzip=self.gzip,
         )
Beispiel #10
0
 def _upload_to_gcs(self, file_to_upload):
     """Upload a file (data split or schema .json file) to Google Cloud Storage."""
     hook = GCSHook(
         gcp_conn_id=self.gcp_conn_id,
         delegate_to=self.delegate_to,
         impersonation_chain=self.impersonation_chain,
     )
     hook.upload(
         self.bucket,
         file_to_upload.get('file_name'),
         file_to_upload.get('file_handle').name,
         mime_type=file_to_upload.get('file_mime_type'),
         gzip=self.gzip if file_to_upload.get('file_name') != self.schema_filename else False,
     )
Beispiel #11
0
    def execute(self, context):
        """
        Uploads the file to Google Cloud Storage
        """
        hook = GCSHook(google_cloud_storage_conn_id=self.gcp_conn_id,
                       delegate_to=self.delegate_to)

        hook.upload(
            bucket_name=self.bucket,
            object_name=self.dst,
            mime_type=self.mime_type,
            filename=self.src,
            gzip=self.gzip,
        )
Beispiel #12
0
    def execute(self, context):
        hook = GCSHook(google_cloud_storage_conn_id=self.gcp_conn_id,
                       delegate_to=self.delegate_to)

        if self.objects:
            objects = self.objects
        else:
            objects = hook.list(bucket_name=self.bucket_name,
                                prefix=self.prefix)

        self.log.info("Deleting %s objects from %s", len(objects),
                      self.bucket_name)
        for object_name in objects:
            hook.delete(bucket_name=self.bucket_name, object_name=object_name)
Beispiel #13
0
 def execute(self, context):
     hook = GCSHook(
         google_cloud_storage_conn_id=self.gcp_conn_id,
         delegate_to=self.delegate_to
     )
     try:
         hook.create_bucket(bucket_name=self.bucket_name,
                            resource=self.resource,
                            storage_class=self.storage_class,
                            location=self.location,
                            project_id=self.project_id,
                            labels=self.labels)
     except Conflict:  # HTTP 409
         self.log.warning("Bucket %s already exists", self.bucket_name)
Beispiel #14
0
 def _upload_to_gcs(self, file_to_upload):
     """Upload a file (data split or schema .json file) to Google Cloud Storage."""
     hook = GCSHook(
         gcp_conn_id=self.gcp_conn_id,
         delegate_to=self.delegate_to,
         impersonation_chain=self.impersonation_chain,
     )
     hook.upload(
         bucket_name=self.bucket,
         object_name=file_to_upload.get('file_name'),
         filename=file_to_upload.get('file_handle').name,
         mime_type='application/json',
         gzip=self.gzip,
     )
Beispiel #15
0
    def execute(self, context):
        self.hook = DataflowHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            poll_sleep=self.poll_sleep,
            cancel_timeout=self.cancel_timeout,
            wait_until_finished=self.wait_until_finished,
        )
        dataflow_options = copy.copy(self.dataflow_default_options)
        dataflow_options.update(self.options)
        is_running = False
        if self.check_if_running != CheckJobRunning.IgnoreJob:
            is_running = self.hook.is_job_dataflow_running(  # type: ignore[attr-defined]
                name=self.job_name,
                variables=dataflow_options,
                project_id=self.project_id,
                location=self.location,
            )
            while is_running and self.check_if_running == CheckJobRunning.WaitForRun:
                is_running = self.hook.is_job_dataflow_running(  # type: ignore[attr-defined]
                    name=self.job_name,
                    variables=dataflow_options,
                    project_id=self.project_id,
                    location=self.location,
                )

        if not is_running:
            with ExitStack() as exit_stack:
                if self.jar.lower().startswith("gs://"):
                    gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to)
                    tmp_gcs_file = exit_stack.enter_context(  # pylint: disable=no-member
                        gcs_hook.provide_file(object_url=self.jar)
                    )
                    self.jar = tmp_gcs_file.name

                def set_current_job_id(job_id):
                    self.job_id = job_id

                self.hook.start_java_dataflow(  # type: ignore[attr-defined]
                    job_name=self.job_name,
                    variables=dataflow_options,
                    jar=self.jar,
                    job_class=self.job_class,
                    append_job_name=True,
                    multiple_jobs=self.multiple_jobs,
                    on_new_job_id_callback=set_current_job_id,
                    project_id=self.project_id,
                    location=self.location,
                )
Beispiel #16
0
    def execute(self, context):
        hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )

        if self.objects:
            objects = self.objects
        else:
            objects = hook.list(bucket_name=self.bucket_name, prefix=self.prefix)

        self.log.info("Deleting %s objects from %s", len(objects), self.bucket_name)
        for object_name in objects:
            hook.delete(bucket_name=self.bucket_name, object_name=object_name)
 def execute(self, context):
     hook = CloudTextToSpeechHook(gcp_conn_id=self.gcp_conn_id)
     result = hook.synthesize_speech(
         input_data=self.input_data,
         voice=self.voice,
         audio_config=self.audio_config,
         retry=self.retry,
         timeout=self.timeout,
     )
     with NamedTemporaryFile() as temp_file:
         temp_file.write(result.audio_content)
         cloud_storage_hook = GCSHook(google_cloud_storage_conn_id=self.gcp_conn_id)
         cloud_storage_hook.upload(
             bucket_name=self.target_bucket_name, object_name=self.target_filename, filename=temp_file.name
         )
Beispiel #18
0
    def execute(self, context: 'Context'):
        hook = GoogleDisplayVideo360Hook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            api_version=self.api_version,
            impersonation_chain=self.impersonation_chain,
        )
        gcs_hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )

        resource = hook.get_query(query_id=self.report_id)
        # Check if report is ready
        if resource["metadata"]["running"]:
            raise AirflowException(f"Report {self.report_id} is still running")

        # If no custom report_name provided, use DV360 name
        file_url = resource["metadata"][
            "googleCloudStoragePathForLatestReport"]
        report_name = self.report_name or urlparse(file_url).path.split(
            "/")[-1]
        report_name = self._resolve_file_name(report_name)

        # Download the report
        self.log.info("Starting downloading report %s", self.report_id)
        with tempfile.NamedTemporaryFile(delete=False) as temp_file:
            with urllib.request.urlopen(file_url) as response:
                shutil.copyfileobj(response, temp_file, length=self.chunk_size)

            temp_file.flush()
            # Upload the local file to bucket
            bucket_name = self._set_bucket_name(self.bucket_name)
            gcs_hook.upload(
                bucket_name=bucket_name,
                object_name=report_name,
                gzip=self.gzip,
                filename=temp_file.name,
                mime_type="text/csv",
            )
        self.log.info(
            "Report %s was saved in bucket %s as %s.",
            self.report_id,
            self.bucket_name,
            report_name,
        )
        self.xcom_push(context, key="report_name", value=report_name)
Beispiel #19
0
    def execute(self, context: 'Context'):
        sheet_hook = GSheetsHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )
        gcs_hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )

        # Pull data and upload
        destination_array: List[str] = []
        sheet_titles = sheet_hook.get_sheet_titles(
            spreadsheet_id=self.spreadsheet_id, sheet_filter=self.sheet_filter)
        for sheet_range in sheet_titles:
            data = sheet_hook.get_values(spreadsheet_id=self.spreadsheet_id,
                                         range_=sheet_range)
            gcs_path_to_file = self._upload_data(gcs_hook, sheet_hook,
                                                 sheet_range, data)
            destination_array.append(gcs_path_to_file)

        self.xcom_push(context, "destination_objects", destination_array)
        return destination_array
Beispiel #20
0
 def _get_gcs_hook(self):
     if not self.hook:
         self.hook = GCSHook(
             gcp_conn_id=self.google_cloud_conn_id,
             delegate_to=self.delegate_to,
         )
     return self.hook
Beispiel #21
0
 def _upload_data(self, gcs_hook: GCSHook,
                  gdrive_hook: GoogleDriveHook) -> str:
     file_handle = BytesIO()
     self._set_file_metadata(gdrive_hook=gdrive_hook)
     file_id = self.file_metadata["id"]
     mime_type = self.file_metadata["mime_type"]
     request = gdrive_hook.get_media_request(file_id=file_id)
     gdrive_hook.download_content_from_request(file_handle=file_handle,
                                               request=request,
                                               chunk_size=104857600)
     gcs_hook.upload(
         bucket_name=self.destination_bucket,
         object_name=self.destination_object,
         data=file_handle.getvalue(),
         mime_type=mime_type,
     )
Beispiel #22
0
    def execute(self, context) -> list:

        hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )

        self.log.info(
            'Getting list of the files. Bucket: %s; Delimiter: %s; Prefix: %s',
            self.bucket,
            self.delimiter,
            self.prefix,
        )

        return hook.list(bucket_name=self.bucket, prefix=self.prefix, delimiter=self.delimiter)
def local_pq_to_gcs(task_instance, bucket_name, dest_blob_dim_transform):
    """
    get local transfomed pq file and load into gcs

    Arg:

        1. bucket_name = 'your-gcs-bucket'

        2. dest_blob_dim_transform = 'transformed/project_two_airflow/'

    Return:
        dict_of_local_pq={'DIM_cloud':dest_blob_dim_transform, 'DIM_lifted_index':dest_blob_dim_transform,
                  'DIM_prec_amount':dest_blob_dim_transform, 'DIM_rh':dest_blob_dim_transform,
                  'DIM_snow_depth':dest_blob_dim_transform , FACT_weather : dest_blob_dim_transform}
    """
    dict_of_local_pq = task_instance.xcom_pull(task_ids='transform_raw_json')
    dict_of_gcs_pq = {}
    for name, pq_path in dict_of_local_pq.items():
        blob_name = pq_path.split('/')[-1]
        dest_blob_transform = dest_blob_dim_transform + blob_name
        GCSHook(gcp_conn_id='google_cloud_default').upload(
            bucket_name=bucket_name,
            object_name=dest_blob_transform,
            filename=pq_path)
        dict_of_gcs_pq[name] = dest_blob_transform

    return dict_of_gcs_pq
Beispiel #24
0
    def execute(self, context):
        self.log.info('Executing download: %s, %s, %s', self.bucket, self.object, self.filename)
        hook = GCSHook(
            google_cloud_storage_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )

        if self.store_to_xcom_key:
            file_bytes = hook.download(bucket_name=self.bucket, object_name=self.object)
            if sys.getsizeof(file_bytes) < MAX_XCOM_SIZE:
                context['ti'].xcom_push(key=self.store_to_xcom_key, value=file_bytes)
            else:
                raise AirflowException('The size of the downloaded file is too large to push to XCom!')
        else:
            hook.download(bucket_name=self.bucket, object_name=self.object, filename=self.filename)
Beispiel #25
0
    def execute(self, context):
        gcs_hook = GCSHook(gcp_conn_id=self.gcp_conn_id,
                           delegate_to=self.delegate_to)

        sftp_hook = SFTPHook(self.sftp_conn_id)

        if WILDCARD in self.source_path:
            total_wildcards = self.source_path.count(WILDCARD)
            if total_wildcards > 1:
                raise AirflowException(
                    "Only one wildcard '*' is allowed in source_path parameter. "
                    "Found {} in {}.".format(total_wildcards,
                                             self.source_path))

            prefix, delimiter = self.source_path.split(WILDCARD, 1)
            base_path = os.path.dirname(prefix)

            files, _, _ = sftp_hook.get_tree_map(base_path,
                                                 prefix=prefix,
                                                 delimiter=delimiter)

            for file in files:
                destination_path = file.replace(base_path,
                                                self.destination_path, 1)
                self._copy_single_object(gcs_hook, sftp_hook, file,
                                         destination_path)

        else:
            destination_object = (self.destination_path
                                  if self.destination_path else
                                  self.source_path.rsplit("/", 1)[1])
            self._copy_single_object(gcs_hook, sftp_hook, self.source_path,
                                     destination_object)
Beispiel #26
0
 def _get_gcs_hook(self) -> Optional[GCSHook]:
     if not self.hook:
         self.hook = GCSHook(
             gcp_conn_id=self.google_cloud_conn_id,
             delegate_to=self.delegate_to,
             impersonation_chain=self.impersonation_chain,
         )
     return self.hook
Beispiel #27
0
 def execute(self, context: "Context") -> None:
     hook = GCSHook(
         gcp_conn_id=self.gcp_conn_id,
         delegate_to=self.delegate_to,
         impersonation_chain=self.impersonation_chain,
     )
     try:
         hook.create_bucket(
             bucket_name=self.bucket_name,
             resource=self.resource,
             storage_class=self.storage_class,
             location=self.location,
             project_id=self.project_id,
             labels=self.labels,
         )
     except Conflict:  # HTTP 409
         self.log.warning("Bucket %s already exists", self.bucket_name)
Beispiel #28
0
    def execute(self, context):
        """
        Uploads the file to Google cloud storage
        """
        hook = GoogleCloudStorageHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to)

        with tempfile.NamedTemporaryFile('w', suffix=self.src) as temp:
            hook.upload(
                bucket_name=self.bucket,
                object_name=self.dst,
                mime_type=self.mime_type,
                filename=temp.name,
                gzip=self.gzip,
            )
            temp.flush()
Beispiel #29
0
    def execute(self, context: dict):
        hook = GoogleSearchAdsHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            api_version=self.api_version,
            impersonation_chain=self.impersonation_chain,
        )

        gcs_hook = GCSHook(
            gcp_conn_id=self.gcp_conn_id,
            delegate_to=self.delegate_to,
            impersonation_chain=self.impersonation_chain,
        )

        # Resolve file name of the report
        report_name = self.report_name or self.report_id
        report_name = self._resolve_file_name(report_name)

        response = hook.get(report_id=self.report_id)
        if not response['isReportReady']:
            raise AirflowException('Report {} is not ready yet'.format(
                self.report_id))

        # Resolve report fragments
        fragments_count = len(response["files"])

        # Download chunks of report's data
        self.log.info("Downloading Search Ads report %s", self.report_id)
        with NamedTemporaryFile() as temp_file:
            for i in range(fragments_count):
                byte_content = hook.get_file(report_fragment=i,
                                             report_id=self.report_id)
                fragment = byte_content if i == 0 else self._handle_report_fragment(
                    byte_content)
                temp_file.write(fragment)

            temp_file.flush()

            gcs_hook.upload(
                bucket_name=self.bucket_name,
                object_name=report_name,
                gzip=self.gzip,
                filename=temp_file.name,
            )
        self.xcom_push(context, key="file_name", value=report_name)
class GoogleCloudBucketHelper:
    """GoogleCloudStorageHook helper class to download GCS object."""
    GCS_PREFIX_LENGTH = 5

    def __init__(self,
                 gcp_conn_id: str = 'google_cloud_default',
                 delegate_to: Optional[str] = None) -> None:
        self._gcs_hook = GCSHook(gcp_conn_id, delegate_to)

    def google_cloud_to_local(self, file_name: str) -> str:
        """
        Checks whether the file specified by file_name is stored in Google Cloud
        Storage (GCS), if so, downloads the file and saves it locally. The full
        path of the saved file will be returned. Otherwise the local file_name
        will be returned immediately.

        :param file_name: The full path of input file.
        :type file_name: str
        :return: The full path of local file.
        :rtype: str
        """
        if not file_name.startswith('gs://'):
            return file_name

        # Extracts bucket_id and object_id by first removing 'gs://' prefix and
        # then split the remaining by path delimiter '/'.
        path_components = file_name[self.GCS_PREFIX_LENGTH:].split('/')
        if len(path_components) < 2:
            raise Exception(
                'Invalid Google Cloud Storage (GCS) object path: {}'
                .format(file_name))

        bucket_id = path_components[0]
        object_id = '/'.join(path_components[1:])
        local_file = os.path.join(
            tempfile.gettempdir(),
            'dataflow{}-{}'.format(str(uuid.uuid4())[:8], path_components[-1])
        )
        self._gcs_hook.download(bucket_id, object_id, local_file)

        if os.stat(local_file).st_size > 0:
            return local_file
        raise Exception(
            'Failed to download Google Cloud Storage (GCS) object: {}'
            .format(file_name))