def vgt_ndvi_linearx2(input_files, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"input_file": input_files[1], "before_file":input_files[0], "after_file": input_files[2], "output_file": output_file, "output_format": 'GTIFF', "options": "compress = lzw", 'threshold': 0.1} raster_image_math.do_ts_linear_filter(**args)
def vgt_ndvi_baresoil_linearx2(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress = lzw"} print args raster_image_math.do_make_baresoil(**args)
def modis_par_monavg(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', \ "options": "compress=lzw", "input_nodata": in_nodata} raster_image_math.do_avg_image(**args)
def processing_merge(pipeline_run_level=0, pipeline_printout_level=0, input_products='', output_product='', mapset=''): # Dummy return arguments proc_lists = functions.ProcLists() list_subprods = proc_lists.list_subprods list_subprod_groups = proc_lists.list_subprod_groups es2_data_dir = es_constants.processing_dir+os.path.sep # Do some checks on the integrity of the inputs # Manage output_product data out_product_code = output_product[0].productcode out_sub_product_code = output_product[0].subproductcode out_version = output_product[0].version out_mapset = output_product[0].mapsetcode out_subdir = functions.set_path_sub_directory(out_product_code, out_sub_product_code,'Ingest', out_version, out_mapset) out_prod_ident = functions.set_path_filename_no_date(out_product_code, out_sub_product_code, out_mapset, out_version, ext) out_dir = es2_data_dir + out_subdir # Check the output product directory functions.check_output_dir(out_dir) # Loop over the input products: for input in input_products: # Extract info from input product product_code = input.productcode sub_product_code = input.subproductcode version = input.version start_date = input.start_date end_date = input.end_date product_info = querydb.get_product_out_info_connect(productcode=product_code, subproductcode=sub_product_code, version=version) prod_type = product_info[0].product_type in_subdir = functions.set_path_sub_directory(product_code, sub_product_code, prod_type, version, out_mapset) in_prod_ident = functions.set_path_filename_no_date(out_product_code, out_sub_product_code, out_mapset, version, ext) # Create the list of dates -> returns empty if start==end==None list_dates = proc_functions.get_list_dates_for_dataset(product_code, sub_product_code, version, start_date=start_date, end_date=end_date) # If list_dates == None, look at all existing files if list_dates is None: print 'To be Done !!!' # Otherwise, build list of files from list of dates else: for my_date in list_dates: in_file_path = es2_data_dir + in_subdir + my_date + in_prod_ident out_file_path = out_dir+my_date+out_prod_ident # Create the link functions.create_sym_link(in_file_path, out_file_path, force=False) return list_subprods, list_subprod_groups
def modis_pp_1mon(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"chla_file": input_file[1], "sst_file": input_file[3], "kd_file": input_file[0],"par_file": input_file[2], \ "sst_nodata": sst_nodata, "kd_nodata": kd_nodata,\ "par_nodata": par_nodata, "output_file": output_file, "output_nodata": -9999, "output_format": 'GTIFF',\ "output_type": None, "options": "compress=lzw"} raster_image_math.do_compute_primary_production(**args)
def std_precip_1mondiff(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw" } raster_image_math.do_oper_subtraction(**args)
def fewsnet_1monavg(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw" } raster_image_math.do_avg_image(**args)
def std_fire_10dcountmax(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw" } raster_image_math.do_max_image(**args)
def modis_pp_comp(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"chla_file": input_file[1], "sst_file": input_file[3], "kd_file": input_file[0], "par_file": input_file[2], \ "sst_nodata": sst_nodata, "kd_nodata": kd_nodata, "chla_nodata": chla_nodata, \ "par_nodata": par_nodata, "output_file": output_file, "output_nodata": output_nodata, "output_format": 'GTIFF', \ "output_type": None, "options": "compress=lzw"} raster_image_math.do_compute_primary_production(**args)
def compute_minimum(input_file, output_file): output_file = functions.list_to_element(output_file) reduced_list = functions.exclude_current_year(input_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": reduced_list, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw" } raster_image_math.do_min_image(**args)
def vgt_ndvi_10dmin_no_filter(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress = lzw" } raster_image_math.do_min_image(**args)
def lsasaf_lst_10d15min(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', \ "options": "compress=lzw", "input_nodata":-32768} raster_image_math.do_max_image(**args) # Do also the house-keeping, by deleting the files older than 6 months number_months_keep = 6 remove_old_files(prod, starting_sprod, version, native_mapset, 'Ingest', number_months_keep)
def std_dmp_10dperc(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file[0], "avg_file": input_file[1], "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw" } raster_image_math.do_compute_perc_diff_vs_avg(**args)
def compute_normalized_anomaly(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file[0], "min_file": input_file[1], "max_file": input_file[2], "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw" } raster_image_math.do_make_vci(**args)
def vgt_ndvi_baresoil_linearx2(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress = lzw" } print args raster_image_math.do_make_baresoil(**args)
def std_dmp_10davg(input_file, output_file): reduced_list = exclude_current_year(input_file) output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": reduced_list, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw" } raster_image_math.do_avg_image(**args)
def lsasaf_etp_1moncum(input_file, output_file): # output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw", "input_nodata": -32768 } raster_image_math.do_cumulate(**args)
def std_dmp_10dratio(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress = lzw" } raster_image_math.do_oper_division_perc(**args)
def compute_product_std_deviation(input_file, output_file): current_file = [i[0] for i in input_file] avg_file = [i[1] for i in input_file][0] output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": current_file, "output_file": avg_file, "output_format": 'GTIFF', "options": "compress=lzw", "output_stddev": output_file } raster_image_math.do_stddev_image(**args)
def olci_wrr_3dcum(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "", "input_nodata": 1000, "output_nodata": 1000 } raster_image_math.do_avg_image(**args)
def std_fire_10dcountmin(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) # The coded value (nodata=0) leads to the wrong result args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw", "input_nodata": -32768 } raster_image_math.do_min_image(**args)
def lsasaf_etp_10d30min(input_file, output_file): # PUT a condition on the number of files: AT least 8 (out of 10) output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', \ "options": "compress=lzw", "input_nodata":-32768} raster_image_math.do_avg_image(**args) # Do also the house-keeping, by deleting the files older than 6 months number_months_keep = 6 remove_old_files(prod, "lsasaf-et", version, native_mapset, 'Ingest', number_months_keep)
def std_precip_10davg(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw", 'output_type': 'Float32', 'input_nodata': -32767 } raster_image_math.do_avg_image(**args)
def create_archives_from_dir(req_directory): # It processes all .req files in a dir, and moves both .req and .bsx (if any) in: # req_directory/Done -> when ok # req_directory/Faulty -> in case of errors if req_directory is not None: logger.info('Working on a directory: %s' % req_directory) if os.path.isdir(req_directory): req_files = glob.glob(req_directory + '*.req') for my_req in req_files: logger.info('Working on file: %s' % my_req) try: # Create the .bsx files status = requests.create_archive_from_request(my_req) # If ok, set target to Done if status == 0: target_dir = '{}{}{}'.format(req_directory, os.path.sep, 'Done') # Also check at least one .bsx has been generated bsx_files = glob.glob(my_req.replace('.req', '*.bsx')) if len(bsx_files) == 0: target_dir = '{}{}{}'.format(req_directory, os.path.sep, 'NothingDone') except: logger.error('Error in processing request: %s' % my_req) status = 1 if status: target_dir = '{}{}{}'.format(req_directory, os.path.sep, 'Faulty') # Move to target dir (Done or Faulty) functions.check_output_dir(target_dir) files = glob.glob(my_req.replace('.req', '*.bsx')) # See ES2-64: 07.11.2017 shutil.move( my_req, target_dir + os.path.sep + os.path.basename(my_req)) for my_file in files: # See ES2-64: 07.11.2017 shutil.move( my_file, target_dir + os.path.sep + os.path.basename(my_file)) else: logger.error('Req. directory does not exist: %s' % req_directory) else: logger.error('Req. directory must be defined')
def compute_monthly_prod_from_10d(input_file, output_file): # ES2- 235 Do not show temporary products like composite not complete (ex monthly composite available mid month...) input_file_date = functions.get_date_from_path_full(input_file[0]) if len(input_file) == 3: if not functions.is_date_current_month(input_file_date): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress = lzw" } raster_image_math.do_avg_image(**args)
def sst_fronts_detection(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress = lzw", "parameters": parameters } raster_image_math.do_detect_sst_fronts(**args) print('Done with raster')
def sst_shapefile_conversion(self, input_file, output_file): output_file = functions.list_to_element(output_file) # Check if the output file already exists - and delete it if os.path.isfile(output_file): files=glob.glob(output_file.replace('.shp','.*')) for my_file in files: os.remove(my_file) functions.check_output_dir(os.path.dirname(output_file)) command=es_constants.es2globals['gdal_polygonize']+' '+ input_file+' '+ output_file+' -nomask -f "ESRI Shapefile"' p = os.system(command) return 0
def vgt_ndvi_linearx2(input_files, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_files[1], "before_file": input_files[0], "after_file": input_files[2], "output_file": output_file, "output_format": 'GTIFF', "options": "compress = lzw", 'threshold': 0.1 } raster_image_math.do_ts_linear_filter(**args)
def modis_chla_monavg(input_file, output_file): output_file = functions.list_to_element(output_file) out_filename=os.path.basename(output_file) str_date=out_filename[0:6] expected_ndays=functions.get_number_days_month(str_date) functions.check_output_dir(os.path.dirname(output_file)) current_ndays=len(input_file) if expected_ndays != current_ndays: logger.info('Missing days for period: %s. Skip' % str_date) else: args = {"input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', \ "options": "compress=lzw", "input_nodata": in_nodata} raster_image_math.do_avg_image(**args)
def std_fire_10dcount10kmax(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw", 'output_type': 'Int16', 'input_nodata': -32768 } # args = {"input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw", 'output_type': 'Int16', 'input_nodata': 0} raster_image_math.do_max_image(**args)
def modis_chla_monavg(input_file, output_file): output_file = functions.list_to_element(output_file) out_filename = os.path.basename(output_file) str_date = out_filename[0:6] expected_ndays = functions.get_number_days_month(str_date) functions.check_output_dir(os.path.dirname(output_file)) current_ndays = len(input_file) if expected_ndays != current_ndays: logger.info('Missing days for period: %s. Skip' % str_date) else: args = {"input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', \ "options": "compress=lzw", "input_nodata": in_nodata} raster_image_math.do_avg_image(**args)
def compute_3dayavg(input_file, output_file): no_data = int(sds_meta.get_nodata_value(input_file[0])) output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "", "input_nodata": no_data, "output_nodata": no_data } raster_image_math.do_avg_image(**args)
def std_fire_10dcount10kratio(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) # args = {"input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw", 'output_type':'Float32', 'input_nodata':-32768} args = { "input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw", 'output_type': 'Float32', 'input_nodata': -32768, 'output_nodata': -32768 } raster_image_math.do_oper_division_perc(**args)
def test_max_image(self): output_filename = 'fewsnet-rfe/10dmax/1221_fewsnet-rfe_10dmax_FEWSNET-Africa-8km_2.0.tif' output_file = os.path.join(self.root_out_dir, output_filename) ref_file = os.path.join(self.ref_dir, output_filename) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": self.input_files, "output_file": output_file, "output_format": 'GTIFF', "options": "compress = lzw" } raster_image_math.do_max_image(**args) equal = self.checkFile(ref_file, output_file) self.assertEqual(equal, 1)
def opfish_computation(input_file, output_file): no_data = int(sds_meta.get_nodata_value(input_file)) output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "nodata": no_data, "output_format": 'GTIFF', "options": "compress = lzw", "parameters": parameters } raster_image_math.compute_opFish_indicator(**args) print('Done with raster')
def test_stats_4_raster(self): # Define the Native mapset input_mapset_name = 'SPOTV-Africa-1km' grid_mapset_name = 'SPOTV-Africa-1km' target_mapset_name = 'SPOTV-Africa-10km' grid_file = '/eStation2/layers/Mask_Africa_SPOTV_10km.tif' input_file = self.ref_dir + 'modis-firms/10dcount/20200301_modis-firms_10dcount_SPOTV-Africa-1km_v6.0.tif' output_filename = 'modis-firms/10dcount10k/20200301_modis-firms_10dcount10k_SPOTV-Africa-1km_v6.0.tif' output_file = os.path.join(self.root_out_dir, output_filename) ref_file = os.path.join(self.ref_dir, output_filename) functions.check_output_dir(os.path.dirname(output_file)) # input_file='/data/processing/modis-firms/v6.0/SPOTV-Africa-1km/derived/10dcount/20171101_modis-firms_10dcount_SPOTV-Africa-1km_v6.0.tif' # output_file='/data/tmp/20171101_modis-firms_10dcount10k_SPOTV-Africa-1km_v6.0.tif' tmpdir = tempfile.mkdtemp(prefix=__name__, suffix='_' + os.path.basename(output_file), dir='/data/tmp/') # Temporary (not masked) file output_file_temp = tmpdir + os.path.sep + os.path.basename(output_file) operation = 'sum' raster_image_math.do_stats_4_raster(input_file, grid_file, output_file_temp, operation, input_mapset_name, grid_mapset_name, output_format=None, nodata=-32768, output_type='Int16', options=None) args = { "inputfile": output_file_temp, "output_file": output_file, "native_mapset_name": grid_mapset_name, "target_mapset_name": target_mapset_name } raster_image_math.do_reproject(**args) equal = self.checkFile(ref_file, output_file) self.assertEqual(equal, 1)
def fewsnet_10davg(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw"} raster_image_math.do_avg_image(**args)
def loop_processing(dry_run=False, serialize=False): # Driver of the process service # Reads configuration from the database # Creates the pipelines for the active processing # Calls the active pipelines with the relevant argument # Arguments: dry_run -> if > 0, it triggers pipeline_printout() rather than pipeline_run() # -> if < 0, it triggers pipeline_printout_graph() rather than pipeline_run() # serialize -> False (default): detach the process and work in parallel # -> True: do NOT detach processes and work in series (mainly for debugging) # Clean dir with locks if os.path.isdir(es_constants.processing_tasks_dir): shutil.rmtree(es_constants.processing_tasks_dir) logger.info("Entering routine %s" % 'loop_processing') echo_query = False functions.check_output_dir(es_constants.processing_tasks_dir) while True: logger.debug("Entering infinite loop") # Get all active processing chains from the database. active_processing_chains = querydb.get_active_processing_chains() # Manage dry_run if dry_run: pipeline_run_level = 0 pipeline_printout_level = 3 else: pipeline_run_level = 3 pipeline_printout_level = 0 for chain in active_processing_chains: logger.debug("Processing Chain N.:%s" % str(chain.process_id)) derivation_method = chain.derivation_method # name of the method in the module algorithm = chain.algorithm # name of the .py module mapset = chain.output_mapsetcode process_id = chain.process_id # Get input products input_products = querydb.get_processing_chain_products(chain.process_id,type='input') product_code = input_products[0].productcode sub_product_code = input_products[0].subproductcode version = input_products[0].version # Get product metadata for output products (from first input) input_product_info = querydb.get_product_out_info(productcode=product_code, subproductcode=sub_product_code, version=version) # Case of a 'std_' (i.e. ruffus with 1 input) processing -> get all info from 1st INPUT and manage dates if re.search('^std_.*',algorithm): start_date = input_products[0].start_date end_date = input_products[0].end_date # Manage the dates list_dates = proc_functions.get_list_dates_for_dataset(product_code, sub_product_code, version, start_date=start_date, end_date=end_date) # Prepare arguments args = {'pipeline_run_level':pipeline_run_level, \ 'pipeline_printout_level':pipeline_printout_level,\ 'starting_sprod': sub_product_code, \ 'prod': product_code, \ 'mapset':mapset,\ 'starting_dates': list_dates,\ 'version':version} # Case of no 'std' (e.g. merge processing) -> get output products and pass everything to function else: output_products = querydb.get_processing_chain_products(chain.process_id,type='output') # Prepare arguments args = {'pipeline_run_level':pipeline_run_level, \ 'pipeline_printout_level':pipeline_printout_level,\ 'input_products': input_products, \ 'output_product': output_products} # Define an id from a combination of fields processing_unique_id='ID='+str(process_id)+'_METHOD='+derivation_method+'_ALGO='+algorithm+'.lock' processing_unique_lock=es_constants.processing_tasks_dir+processing_unique_id if not os.path.isfile(processing_unique_lock): logger.debug("Launching processing for ID: %s" % processing_unique_id) open(processing_unique_lock,'a').close() # Define the module name and function() module_name = 'processing_'+algorithm function_name = 'processing_'+derivation_method # Enter the module and walk until to the name of the function() to be executed proc_dir = __import__("apps.processing") proc_pck = getattr(proc_dir, "processing") proc_mod = getattr(proc_pck, module_name) proc_func= getattr(proc_mod, function_name) # Fork and call the std_precip 'generic' processing if serialize==False: pid = os.fork() if pid == 0: # Here I'm the child process -> call to the processing pipeline proc_lists = proc_func(**args) # Upsert database upsert_database(process_id, product_code, version, mapset, proc_lists, input_product_info) # Simulate longer processing (TEMP) logger.info("Going to sleep for a while - to be removed") time.sleep(2) logger.info("Waking-up now, and removing the .lock") os.remove(processing_unique_lock) sys.exit(0) else: # Here I'm the parent process -> just go on .. pass # Do NOT detach process (work in series) else: proc_lists = proc_func(**args) logger.info("Going to sleep for a while - to be removed") # Upsert database upsert_database(process_id, product_code, version, mapset, proc_lists, input_product_info) time.sleep(2) logger.info("Waking-up now, and removing the .lock") os.remove(processing_unique_lock) else: logger.debug("Processing already running for ID: %s " % processing_unique_id) # logger.info("End of the loop ... wait a while") time.sleep(5)
def std_precip_1monnp(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"input_file": input_file[0], "min_file": input_file[1],"max_file": input_file[2], "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw"} raster_image_math.do_make_vci(**args)
def std_precip_1monperc(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"input_file": input_file[0], "avg_file": input_file[1], "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw"} raster_image_math.do_compute_perc_diff_vs_avg(**args)
def std_precip_1mondiff(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress=lzw"} raster_image_math.do_oper_subtraction(**args)
def vgt_ndvi_1monmax(input_file, output_file): output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = {"input_file": input_file, "output_file": output_file, "output_format": 'GTIFF', "options": "compress = lzw"} raster_image_math.do_max_image(**args)