def get_file_from_motu_command(motu_command, target_dir, userpwd=''): # Create a tmp directory for download tmpdir = es_constants.es2globals['base_tmp_dir'] result = 1 # if target_file is None: # target_file='test_output_file' list_motu_cmd = motu_command.split() target_file = list_motu_cmd[-1] target_fullpath = tmpdir + os.sep + target_file target_final = target_dir + os.sep #c = pycurl.Curl() try: #subprocess.call([motu_command]) os.system(motu_command) #Check file exist in the path if functions.is_file_exists_in_path(target_fullpath): shutil.move(target_fullpath, target_final) result = 0 else: result = 1 # mv_cmd = "mv "+target_fullpath+' '+target_final # os.system(mv_cmd) #shutil.move(target_fullpath, target_final) return result except OSError: logger.warning('Output NOT downloaded: %s - error : %i' % (motu_command)) return 1
def change_subProds_params(self, starting_sprod, frequency=None, product_type='Ingest'): self.starting_sprod = starting_sprod self.change_frequency_params(frequency) self.product_type = product_type self.starting_files = [] # --------------------------------------------------------------------- # Define input files self.in_prod_ident = functions.set_path_filename_no_date( self.prod, self.starting_sprod, self.mapset, self.version, self.ext) # logger.debug('Base data directory is: %s' % es2_data_dir) input_dir = self.es2_data_dir + \ functions.set_path_sub_directory(self.prod, self.starting_sprod, self.product_type, self.version, self.mapset) if self.starting_dates is not None: # starting_files = [] for my_date in self.starting_dates: #ES2 450 #+++++++ Check file exists before appending +++++++++++++++ if functions.is_file_exists_in_path(input_dir + my_date + self.in_prod_ident): self.starting_files.append(input_dir + my_date + self.in_prod_ident) else: self.starting_files = input_dir + "*" + self.in_prod_ident
def __init__(self, prod, starting_sprod, mapset, version, starting_dates=None, proc_lists=None, frequency=None, product_type='Ingest'): self.prod = prod self.starting_sprod = starting_sprod self.mapset = mapset self.version = version self.starting_dates = starting_dates self.proc_lists = proc_lists self.frequency = frequency self.product_type = product_type self.starting_files = [] # ES2-410 This should be used for creating the derived subproduct code dynamically self.subproduct_code = starting_sprod # General definitions for this processing chain self.ext = es_constants.ES2_OUTFILE_EXTENSION self.es2_data_dir = es_constants.es2globals[ 'processing_dir'] + os.path.sep # --------------------------------------------------------------------- # Define input files self.in_prod_ident = functions.set_path_filename_no_date( self.prod, self.starting_sprod, self.mapset, self.version, self.ext) #logger.debug('Base data directory is: %s' % es2_data_dir) input_dir = self.es2_data_dir+ \ functions.set_path_sub_directory(self.prod, self.starting_sprod, self.product_type, self.version, self.mapset) if self.starting_dates is not None: # starting_files = [] for my_date in self.starting_dates: #ES2 450 #+++++++ Check file exists before appending +++++++++++++++ if functions.is_file_exists_in_path(input_dir + my_date + self.in_prod_ident): self.starting_files.append(input_dir + my_date + self.in_prod_ident) else: self.starting_files = input_dir + "*" + self.in_prod_ident # Look for all input files in input_dir, and sort them if self.starting_dates is not None: self.input_files = self.starting_files else: self.input_files = glob.glob(self.starting_files) # self.input_files = glob.glob(self.starting_files) # # --------------------------------------------------------------------- # # Initialize the monthly input prod # # --------------------------------------------------------------------- self.intialize_month_parameters()
def generate_parameters_3davg(): # Look for all input files in input_dir, and sort them if starting_dates is not None: input_files = [] for my_date in starting_dates: input_files.append(input_dir + my_date + in_prod_ident) else: starting_files = input_dir + "*" + in_prod_ident input_files = glob.glob(starting_files) logger.debug("starting_files %s" % input_files) day_list = [] # Create unique list of all dekads (as 'Julian' number) for input_file in input_files: basename = os.path.basename(input_file) mydate = functions.get_date_from_path_filename(basename) mydate_yyyymmdd = str(mydate)[0:8] if mydate_yyyymmdd not in day_list: day_list.append(mydate_yyyymmdd) day_list = sorted(day_list) # Compute the 'julian' dakad for the current day today = datetime.today() yesterday = today - timedelta(1) today_str = today.strftime('%Y%m%d') yesterday_str = yesterday.strftime('%Y%m%d') dekad_now = functions.conv_date_2_dekad(today_str) for myday in day_list: # Exclude the current day and yesterday #if myday != today_str or myday != yesterday_str: #some_list = ['abc-123', 'def-456', 'ghi-789', 'abc-456'] input_file = [s for s in input_files if myday in s] file_list = [] #for input_file in input_files: #for i, input_file in enumerate(input_files, 1): basename = os.path.basename(input_file[0]) # Date is in format YYYYMMDD mydate_yyyymmdd = functions.get_date_from_path_filename(basename) #if mydate_yyyymmdd != day_list[i]: yyyy = int(mydate_yyyymmdd[0:4]) mm = int(mydate_yyyymmdd[4:6]) dd = int(mydate_yyyymmdd[6:8]) day2 = datetime(yyyy, mm, dd) + timedelta(1) day2_filepath = input_dir + day2.strftime('%Y%m%d') + in_prod_ident if not functions.is_file_exists_in_path(day2_filepath): continue day3 = datetime(yyyy, mm, dd) + timedelta(2) day3_filepath = input_dir + day3.strftime('%Y%m%d') + in_prod_ident if not functions.is_file_exists_in_path(day3_filepath): continue file_list.append(input_file[0]) file_list.append(day2_filepath) file_list.append(day3_filepath) output_file = es_constants.processing_dir + subdir_3davg + os.path.sep + mydate_yyyymmdd + prod_ident_3davg file_list = sorted(file_list) # Check here the number of missing files (for optimization) if len(file_list) == 3: yield (file_list, output_file)
def create_pipeline(prod, starting_sprod, mapset, version, starting_dates=None, proc_lists=None): my_date = None # --------------------------------------------------------------------- # Create lists if proc_lists is None: proc_lists = functions.ProcLists() activate_opfish_computation = 1 #activate_shapefile_conversion = 1 sds_meta = metadata.SdsMetadata() es2_data_dir = es_constants.es2globals['processing_dir'] + os.path.sep # --------------------------------------------------------------------- # Define input files (chla) in_prod_ident = functions.set_path_filename_no_date( prod, starting_sprod, mapset, version, ext) input_dir = es2_data_dir + functions.set_path_sub_directory( prod, starting_sprod, 'Ingest', version, mapset) if starting_dates is not None: starting_files = [] for my_date in starting_dates: if functions.is_file_exists_in_path( input_dir + my_date + in_prod_ident ): # ES2 450 #+++++++ Check file exists before appending +++++++++++++++ starting_files.append(input_dir + my_date + in_prod_ident) else: starting_files = input_dir + "*" + in_prod_ident # --------------------------------------------------------------------- # 1. Define and customize parameters # --------------------------------------------------------------------- parameters = { 'chl_grad_min': 0.00032131, # smaller window detects more fronts 'chl_grad_int': 0.021107, 'chl_feed_min': 0.08, 'chl_feed_max': 11.0, # Temperature: 0.45 deg (multiply by 100 !!) 'dc': 0.91 } if prod == 'modis-chla': parameters = { 'chl_grad_min': 0.00032131, # smaller window detects more fronts 'chl_grad_int': 0.021107, 'chl_feed_min': 0.08, 'chl_feed_max': 11.0, # Temperature: 0.45 deg (multiply by 100 !!) 'dc': 0.91 } # --------------------------------------------------------------------- # Chal Gradient (raster) output_sprod_group = proc_lists.proc_add_subprod_group("gradient") output_sprod = proc_lists.proc_add_subprod( "opfish", "gradient", final=False, descriptive_name='Ocean Productive index for Fish', description='Ocean Productive index for Fish', frequency_id='', date_format='YYYYMMDD', masked=False, timeseries_role='', active_default=True) prod_ident_gradient = functions.set_path_filename_no_date( prod, output_sprod, mapset, version, ext) subdir_gradient = functions.set_path_sub_directory(prod, output_sprod, 'Derived', version, mapset) formatter_in = "(?P<YYYYMMDD>[0-9]{8})" + in_prod_ident formatter_out = [ "{subpath[0][5]}" + os.path.sep + subdir_gradient + "{YYYYMMDD[0]}" + prod_ident_gradient ] @active_if(activate_opfish_computation) @transform(starting_files, formatter(formatter_in), formatter_out) def opfish_computation(input_file, output_file): no_data = int(sds_meta.get_nodata_value(input_file)) output_file = functions.list_to_element(output_file) functions.check_output_dir(os.path.dirname(output_file)) args = { "input_file": input_file, "output_file": output_file, "nodata": no_data, "output_format": 'GTIFF', "options": "compress = lzw", "parameters": parameters } raster_image_math.compute_opFish_indicator(**args) print('Done with raster') return proc_lists
def test_is_file_exists_in_path(self): file_path = '/data/test_data/tamsat-rfe/3.0/TAMSAT-Africa-4km/tif/10d/20190101_tamsat-rfe_10d_TAMSAT-Africa-4km_3.0.tif' is_file_exists = functions.is_file_exists_in_path(file_path) self.assertTrue(is_file_exists)