def execute(self, context): log.info('--------------------S1Metadata_PLUGIN running------------') task_instance = context['task_instance'] log.info("Receiving from 'get_input_from':\n{}".format( self.get_inputs_from)) download_task_id = self.get_inputs_from['download_task_id'] addo_task_ids = self.get_inputs_from['addo_task_ids'] upload_task_ids = self.get_inputs_from['upload_task_ids'] archive_product_task_id = self.get_inputs_from[ 'archive_product_task_id'] downloaded = context['task_instance'].xcom_pull( task_ids=download_task_id, key=XCOM_RETURN_KEY) local_granules_paths = [] for tid in addo_task_ids: local_granules_path = context['task_instance'].xcom_pull( task_ids=tid, key=XCOM_RETURN_KEY) if local_granules_path: local_granules_paths += local_granules_path uploaded_granules_paths = context['task_instance'].xcom_pull( task_ids=upload_task_ids, key=XCOM_RETURN_KEY) original_package_path = context['task_instance'].xcom_pull( task_ids=archive_product_task_id, key=XCOM_RETURN_KEY) granules_dict, bbox = collect_granules_metadata( local_granules_paths, self.granules_upload_dir, self.bands_dict) if not downloaded: log.info("No products from Download task, Nothing to do.") return list() if not local_granules_paths: log.info("No local granules from processing, Nothing to do.") return list() if not uploaded_granules_paths: log.info("No uploaded granules from upload task, Nothing to do.") return list() if not original_package_path: log.info( "No original package path from original package upload task, Nothing to do." ) return list() safe_package_path = downloaded.keys()[0] safe_package_filename = os.path.basename(safe_package_path) product_id = downloaded[safe_package_path].get('title') originalPackageLocation = self.original_package_download_base_url + safe_package_filename processing_dir = os.path.join(self.processing_dir, product_id) if not os.path.exists(processing_dir): os.makedirs(processing_dir) log.info('safe_package_path: {}'.format(safe_package_path)) log.info('local_granules_paths: {}'.format(local_granules_paths)) s1reader = S1GDALReader(safe_package_path) product_metadata = s1reader.get_metadata() product_metadata['footprint'] = s1reader.get_footprint() log.info(pprint.pformat(product_metadata, indent=4)) timeStart = product_metadata['ACQUISITION_START_TIME'] timeEnd = product_metadata['ACQUISITION_STOP_TIME'] owslinks_dict = create_owslinks_dict( product_identifier=product_id, timestart=timeStart, timeend=timeEnd, granule_bbox=bbox, gs_workspace=self.gs_workspace, gs_wms_layer=self.gs_wms_layer, gs_wms_width=self.gs_wms_width, gs_wms_height=self.gs_wms_height, gs_wms_format=self.gs_wms_format, gs_wms_version=self.gs_wms_version, gs_wfs_featuretype=self.gs_wfs_featuretype, gs_wfs_format=self.gs_wfs_format, gs_wfs_version=self.gs_wfs_version, gs_wcs_coverage_id=self.gs_wcs_coverage_id, gs_wcs_scale_i=self.gs_wcs_scale_i, gs_wcs_scale_j=self.gs_wcs_scale_j, gs_wcs_format=self.gs_wcs_format, gs_wcs_version=self.gs_wcs_version) # create thumbnail # TODO: create proper thumbnail from quicklook. Also remove temp file log.info("Creating thumbnail") thumbnail_path = os.path.join(processing_dir, "thumbnail.png") quicklook_path = s1reader.get_quicklook() log.info(pprint.pformat(quicklook_path)) copyfile(quicklook_path, thumbnail_path) search_params_dict = create_search_dict(product_metadata, originalPackageLocation) log.info(pprint.pformat(search_params_dict)) metadata_dict = create_metadata_dict(product_metadata) log.info(pprint.pformat(metadata_dict)) description_dict = create_description_dict(product_metadata, originalPackageLocation) log.info(pprint.pformat(description_dict)) # create description.html and dump it to file log.info("Creating description.html") html_description = create_product_description(description_dict) search_params_dict['htmlDescription'] = html_description # create metadata XML log.info("Creating metadata.xml") metadata_xml = create_product_metadata(metadata_dict) po = PythonOperator(task_id="s1_metadata_dictionary_creation", python_callable=create_procuct_zip, op_kwargs={ 'processing_dir': processing_dir, 'search_params_dict': search_params_dict, 'description_html': html_description, 'metadata_xml': metadata_xml, 'granules_dict': granules_dict, 'owslinks_dict': owslinks_dict, 'thumbnail_path': thumbnail_path }) out = po.execute(context) zip_paths = list() if out: zip_paths.append(out) return zip_paths