def func(): files = [] files.append( '/media/proy/HDD500GB/DATA/CMIP5/tasmax_day_MIROC5_historical_r1i1p1_19500101-19591231.nc' ) # files.append('/media/proy/HDD500GB/DATA/CMIP5/tasmax_day_MIROC5_historical_r1i1p1_19600101-19691231.nc') icclim.indice(files, 'tasmax', indice_name='TG', out_file='temp.nc')
def indices(indice_pp): return icclim.indice( indice_name=indice_pp, in_files=files_pr_12km, var_name='prAdjust', #slice_mode='AMJJAS', time_range=[dt1, dt2], base_period_time_range=[base_dt1, base_dt2], out_file=calc_indice_pp, callback=callback.defaultCallback2)
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) #homedir = os.environ['HOME'] #os.chdir(homedir) def callback(b): self.callback("Processing",b) files_tasmax = []; files_tasmax.extend(self.filesTasmaxIn.getValue()) var_tasmax = self.varTasmaxIn.getValue() files_tasmin = []; files_tasmin.extend(self.filesTasminIn.getValue()) var_tasmin = self.varTasminIn.getValue() indice_name = self.indiceNameIn.getValue() slice_mode = self.sliceModeIn.getValue() time_range = self.timeRangeIn.getValue() #out_file_name = self.outputFileNameIn.getValue() out_file_name = "out.nc" level = self.NLevelIn.getValue() if time_range: startdate = dateutil.parser.parse(time_range.split("/")[0]) stopdate = dateutil.parser.parse(time_range.split("/")[1]) time_range = [startdate,stopdate] self.status.set("Preparing....", 0) #pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ #fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" """ Internal output path""" #fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" """ Create output directory """ #mkdir_p(fileOutPath) self.status.set("Processing input lists: " + str(files_tasmax) + " " + str(files_tasmin), 0) icclim.indice(indice_name=indice_name, in_files=[files_tasmax,files_tasmin], var_name=[var_tasmax,var_tasmin], slice_mode=slice_mode, time_range=time_range, out_file=out_file_name, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=None, window_width=5, only_leap_years=False, ignore_Feb29th=True, interpolation='hyndman_fan', netcdf_version='NETCDF4_CLASSIC', out_unit='days') """ Set output """ #url = fileOutURL+"/"+out_file_name; #self.opendapURL.setValue(url); self.output.setValue(out_file_name) self.status.set("ready",100);
def _process(self, parameters): icclim.indice(**parameters['input'][self.name]) self.write('output', parameters['input'])
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) tmpFolderPath=os.getcwd() os.chdir(home) def callback(b): self.callback("Processing",b) files = []; files.extend(self.filesIn.getValue()) var = self.varNameIn.getValue() slice_mode = self.sliceModeIn.getValue() time_range = self.timeRangeIn.getValue() out_file_name = self.outputFileNameIn.getValue() level = self.NLevelIn.getValue() if(level == "None"): level = None if(slice_mode == "None"): slice_mode = None if(time_range == "None"): time_range = None else: startdate = dateutil.parser.parse(time_range.split("/")[0]) stopdate = dateutil.parser.parse(time_range.split("/")[1]) time_range = [startdate,stopdate] self.status.set("Preparing....", 0) pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" """ Internal output path""" fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" """ Create output directory """ mkdir_p(fileOutPath) self.status.set("Processing input list: "+str(files),0) my_indice_params = {'indice_name': 'TIMEAVG', 'calc_operation': 'mean' } from netCDF4 import Dataset dataset = Dataset(files[0]) isMember = False memberIndex = 12 try: for a in range(0,dataset.variables["member"].shape[0]): isMember = True logging.debug("Checking index " +str(a)) logging.debug("Has value "+str(dataset.variables["member"][a])) memberValue = str("".join(dataset.variables["member"][a])) logging.debug(memberValue) if memberValue=="median": memberIndex=a except: pass if isMember == True: logging.debug("IS Member data") logging.debug("Using memberIndex "+str(memberIndex)) icclim.indice(user_indice=my_indice_params, in_files=files, var_name=var, slice_mode=slice_mode, time_range=time_range, out_file=fileOutPath+out_file_name, threshold=None, N_lev=memberIndex, lev_dim_pos=0, #transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=None, window_width=5, only_leap_years=False, ignore_Feb29th=True, interpolation='hyndman_fan', out_unit='days') else: icclim.indice(user_indice=my_indice_params, in_files=files, var_name=var, slice_mode=slice_mode, time_range=time_range, out_file=fileOutPath+out_file_name, threshold=None, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=None, window_width=5, only_leap_years=False, ignore_Feb29th=True, interpolation='hyndman_fan', out_unit='days') """ Set output """ url = fileOutURL+"/"+out_file_name; self.opendapURL.setValue(url); self.status.set("ready",100);
"-"+\ year_dt2+month_dt2+day_dt2+\ '.nc' #"_historical"+\ #"_rcp45"+\ #"_rcp85"+\ print 'Going into output file:', out_indice_pp print icclim.indice( indice_name=indice_pp, in_files=files_pr_50km, var_name='prAdjust', slice_mode='year', time_range=[dt1_HadGEM, dt2_HadGEM], base_period_time_range=[base_dt1_HadGEM, base_dt2_HadGEM], out_file=out_indice_pp, callback=callback.defaultCallback2) # #================================================================================================= # # # EC Earth model (r12i1pi in file name!) # models_list_50km_EC_EARTH = ['ICHEC-EC-EARTH'] # # ========================================================================= # # ========================================================================= # # Indices you want to calculate using lists # indice_list_pp = ['PRCPTOT','RX1day','CWD','CDD','R10mm','R20mm','R95p','RR1'] # # Indices : 'PRCPTOT','RX1day','CWD','CDD','R10mm','R20mm','R95p','RR1' # # =========================================================================
year_dt1+month_dt1+\ day_dt1+\ "-"+\ year_dt2+month_dt2+day_dt2+\ '.nc' # #"_historical"+\ # #"_rcp45"+\ # #"_rcp85"+\ print 'Going into output file:', out_indice_pp print icclim.indice(indice_name=indice_pp, in_files=files_tas_50km, var_name='tasAdjust', slice_mode='year', time_range=[dt1,dt2], base_period_time_range=[base_dt1, base_dt2], out_file=out_indice_pp, callback=callback.defaultCallback2) #================================================================================================= # HadGEM mode (period ends with yyyy1230!) # models_list_50km_HadGEM = ['MOHC-HadGEM2-ES'] # # ========================================================================= # indice_list_pp = ['HD17'] # # Counting : 'HD17' # #indice_pp = ' # # =========================================================================
# Create output folder if not os.path.exists(args.test_output_dir): os.mkdir(args.test_output_dir) # Loop sections in test config file for section in sections: ind = section.split("_")[0] logging.info("======> " + section) # Process some of the entries from the test config file section_dict = ConfigSectionMap(Config, section) section_dict = get_callback(section_dict) section_dict = get_input_file_path(section_dict, input_test_data_dir) section_dict = get_varnames_from_filenames(section_dict) section_dict = get_time_ranges(section_dict) section_dict = try_literal_interpretation(section_dict, 'slice_mode') section_dict = try_literal_interpretation(section_dict, 'threshold') if 'user_indice' in section_dict.keys(): section_dict['user_indice'] = ast.literal_eval(section_dict['user_indice']) testid = get_test_md5hash(section_dict, section, test_output_dir) section_dict = get_output_file_path(test_output_dir, section, section_dict) logging.info("Test ID: " + testid) # Run test if not args.list_only: icclim.indice(**section_dict) logging.info("<====== " + section + "\n")
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) # homedir = os.environ['HOME'] # os.chdir(homedir) def callback(b): self.callback("Processing", b) files = self.getInputValues(identifier='files') var = self.varNameIn.getValue() indice_name = self.indiceNameIn.getValue() slice_mode = self.sliceModeIn.getValue() time_range = self.timeRangeIn.getValue() # out_file_name = self.outputFileNameIn.getValue() out_file_name = 'out.nc' level = self.NLevelIn.getValue() thresholdlist = self.getInputValues(identifier='threshold') if time_range: startdate = dateutil.parser.parse(time_range.split("/")[0]) stopdate = dateutil.parser.parse(time_range.split("/")[1]) time_range = [startdate, stopdate] LOGGER.debug("time_range: {}".format(time_range)) thresh = None if thresholdlist: thresh = [float(threshold) for threshold in thresholdlist] LOGGER.debug("thresh: ".format(thresh)) self.status.set("Preparing....", 0) # pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ # fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" # fileOutURL = config.output_url() + pathToAppendToOutputDirectory+"/" """ Internal output path""" # fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" # fileOutPath = config.output_path() + pathToAppendToOutputDirectory +"/" """ Create output directory """ # make_dirs(fileOutPath) self.status.set("Processing input list: " + str(files), 0) icclim.indice(indice_name=indice_name, in_files=files, var_name=var, slice_mode=slice_mode, time_range=time_range, out_file=out_file_name, threshold=thresh, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=None, window_width=5, only_leap_years=False, ignore_Feb29th=True, interpolation='hyndman_fan', netcdf_version='NETCDF4_CLASSIC', out_unit='days') """ Set output """ # url = fileOutURL+"/"+out_file_name # self.opendapURL.setValue(url) self.output.setValue(out_file_name) self.status.set("ready", 100)
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) # homedir = os.environ['HOME'] # os.chdir(homedir) def callback(b): self.callback("Processing", b) files_tasmax = []; files_tasmax.extend(self.filesTasmaxIn.getValue()) var_tasmax = self.varTasmaxIn.getValue() files_tasmin = []; files_tasmin.extend(self.filesTasminIn.getValue()) var_tasmin = self.varTasminIn.getValue() indice_name = self.indiceNameIn.getValue() slice_mode = self.sliceModeIn.getValue() time_range = self.timeRangeIn.getValue() # out_file_name = self.outputFileNameIn.getValue() out_file_name = "out.nc" level = self.NLevelIn.getValue() if time_range: startdate = dateutil.parser.parse(time_range.split("/")[0]) stopdate = dateutil.parser.parse(time_range.split("/")[1]) time_range = [startdate, stopdate] self.status.set("Preparing....", 0) # pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ # fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" """ Internal output path""" # fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" """ Create output directory """ # mkdir_p(fileOutPath) self.status.set("Processing input lists: " + str(files_tasmax) + " " + str(files_tasmin), 0) icclim.indice(indice_name=indice_name, in_files=[files_tasmax, files_tasmin], var_name=[var_tasmax, var_tasmin], slice_mode=slice_mode, time_range=time_range, out_file=out_file_name, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=None, window_width=5, only_leap_years=False, ignore_Feb29th=True, interpolation='hyndman_fan', netcdf_version='NETCDF4_CLASSIC', out_unit='days') """ Set output """ # url = fileOutURL+"/"+out_file_name; # self.opendapURL.setValue(url); self.output.setValue(out_file_name) self.status.set("ready", 100);
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) #homedir = os.environ['HOME'] #os.chdir(homedir) def callback(b): self.callback("Processing",b) files = self.getInputValues(identifier='files') var = self.varNameIn.getValue() indice_name = self.indiceNameIn.getValue() slice_mode = self.sliceModeIn.getValue() time_range = self.timeRangeIn.getValue() #out_file_name = self.outputFileNameIn.getValue() out_file_name = 'out.nc' level = self.NLevelIn.getValue() thresholdlist = self.getInputValues(identifier='threshold') if (time_range): startdate = dateutil.parser.parse(time_range.split("/")[0]) stopdate = dateutil.parser.parse(time_range.split("/")[1]) time_range = [startdate,stopdate] logger.debug("time_range: %s", time_range) thresh = None if(thresholdlist): thresh = [float(threshold) for threshold in threshholdList] logger.debug("thresh: %s", thresh) self.status.set("Preparing....", 0) #pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ from flyingpigeon import config #fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" #fileOutURL = config.outputUrl_path() + pathToAppendToOutputDirectory+"/" """ Internal output path""" #fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" #fileOutPath = config.output_path() + pathToAppendToOutputDirectory +"/" """ Create output directory """ #make_dirs(fileOutPath) self.status.set("Processing input list: "+str(files),0) icclim.indice(indice_name=indice_name, in_files=files, var_name=var, slice_mode=slice_mode, time_range=time_range, out_file=out_file_name, threshold=thresh, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=None, window_width=5, only_leap_years=False, ignore_Feb29th=True, interpolation='hyndman_fan', netcdf_version='NETCDF4_CLASSIC', out_unit='days') """ Set output """ #url = fileOutURL+"/"+out_file_name #self.opendapURL.setValue(url) self.output.setValue(out_file_name) self.status.set("ready",100)
"-"+\ year_dt2+month_dt2+day_dt2+\ '.nc' #"_historical"+\ #"_rcp45"+\ #"_rcp85"+\ print 'Going into output file:', out_indice_pp print icclim.indice(indice_name=indice_pp, in_files=files_pr_50km, var_name='prAdjust', slice_mode='year', time_range=[dt1_HadGEM,dt2_HadGEM], base_period_time_range=[base_dt1_HadGEM, base_dt2_HadGEM], out_file=out_indice_pp, callback=callback.defaultCallback2) # #================================================================================================= # # # EC Earth model (r12i1pi in file name!) # models_list_50km_EC_EARTH = ['ICHEC-EC-EARTH'] # # ========================================================================= # # ========================================================================= # # Indices you want to calculate using lists # indice_list_pp = ['PRCPTOT','RX1day','CWD','CDD','R10mm','R20mm','R95p','RR1'] # # Indices : 'PRCPTOT','RX1day','CWD','CDD','R10mm','R20mm','R95p','RR1'
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) homedir = os.environ['HOME'] os.chdir(homedir) #Added for supporting cache #Change the db connection parameter (if needed) cache = _clipccache.clipccache(username='******', password='******', server='127.0.0.1', port=5432, homedir=homedir) result = cache.cache_search(self, None) if result is not None: #return already computed results self.opendapURL.setValue(result); logging.debug("Found Result in cache " + self.opendapURL.value) self.status.set("ready",100); return #End def callback(b): self.callback("Processing",b) files = []; files.extend(self.filesIn.getValue()) var = self.varNameIn.getValue() indice_name = self.indiceNameIn.getValue() slice_mode = self.sliceModeIn.getValue() time_range = self.timeRangeIn.getValue() out_file_name = self.outputFileNameIn.getValue() level = self.NLevelIn.getValue() thresholdlist = self.thresholdIn.getValue() thresh = None if(level == "None"): level = None if(time_range == "None"): time_range = None else: startdate = dateutil.parser.parse(time_range.split("/")[0]) stopdate = dateutil.parser.parse(time_range.split("/")[1]) time_range = [startdate,stopdate] if(thresholdlist != "None"): if(thresholdlist[0]!="None"): thresh = [] for threshold in thresholdlist: thresh.append(float(threshold)) self.status.set("Preparing....", 0) pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" """ Internal output path""" fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" """ Create output directory """ mkdir_p(fileOutPath) self.status.set("Processing input list: "+str(files),0) icclim.indice(indice_name=indice_name, in_files=files, var_name=var, slice_mode=slice_mode, time_range=time_range, out_file=fileOutPath+out_file_name, threshold=thresh, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=None, window_width=5, only_leap_years=False, ignore_Feb29th=True, interpolation='hyndman_fan', netcdf_version='NETCDF4_CLASSIC', out_unit='days') """ Set output """ url = fileOutURL+"/"+out_file_name; self.opendapURL.setValue(url); #Added for supporting cache resinsert = cache.insert_new(self, None) if resinsert is None: logging.error("Error updating the cache catalog") #End self.status.set("ready",100);
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) #homedir = os.environ['HOME'] #os.chdir(homedir) def callback(b): self.callback("Processing", b) indice_name = self.indiceNameIn.getValue() in_files = self.getInputValues(identifier='filesBasePeriod') time_range_base_period = self.timeRangeBasePeriodIn.getValue() var_name = self.varNameIn.getValue() leap_nonleap_years = self.leapNonLeapYearsIn.getValue() in_files.extend(self.getInputValues(identifier='filesStudyPeriod')) time_range_study_period = self.timeRangeStudyPeriodIn.getValue() slice_mode = self.sliceModeIn.getValue() #out_file_name = self.outputFileNameIn.getValue() out_file_name = 'out.nc' level = self.NLevelIn.getValue() if time_range_base_period: startdate = dateutil.parser.parse( time_range_base_period.split("/")[0]) stopdate = dateutil.parser.parse( time_range_base_period.split("/")[1]) time_range_base_period = [startdate, stopdate] if time_range_study_period: startdate = dateutil.parser.parse( time_range_study_period.split("/")[0]) stopdate = dateutil.parser.parse( time_range_study_period.split("/")[1]) time_range_study_period = [startdate, stopdate] ## if (leap_nonleap_years == "take all years (leap and non-leap)"): ## leap_nonleap_years = False ## else: ## leap_nonleap_years = True #home = expanduser("~") self.status.set("Preparing....", 0) #pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ #fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" """ Internal output path""" #fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" """ Create output directory """ #mkdir_p(fileOutPath) self.status.set("Processing input list: " + str(in_files), 0) icclim.indice(indice_name=indice_name, in_files=in_files, var_name=var_name, slice_mode=slice_mode, time_range=time_range_study_period, out_file=out_file_name, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=time_range_base_period, window_width=5, only_leap_years=leap_nonleap_years, ignore_Feb29th=True, interpolation='hyndman_fan', netcdf_version='NETCDF4_CLASSIC', out_unit='days') """ Set output """ #url = fileOutURL+"/"+out_file_name; #self.opendapURL.setValue(url); self.output.setValue(out_file_name) self.status.set("ready", 100)
'.nc' #"_historical"+\ # #"_rcp45"+\ # #"_rcp85"+\ print 'Going into output file:', indice_out_name print # # ========================================================================= icclim.indice(indice_name=indice_pp, in_files=files_tas_nbc_50km, var_name='tas', slice_mode='year', time_range=[dt1,dt2], base_period_time_range=[base_dt1, base_dt2], out_file=indice_out_name, callback=callback.defaultCallback2) #Done with calculations for all 9/9 models print print 'Done!' print quit()
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) homedir = os.environ['HOME'] os.chdir(homedir) def callback(b): self.callback("Processing",b) in_files_t = [] in_files_t.extend(self.filesBasePeriodTemperatureIn.getValue()) in_files_p = [] in_files_p.extend(self.filesBasePeriodPrecipitationIn.getValue()) time_range_base_period = self.timeRangeBasePeriodIn.getValue() time_range_study_period = self.timeRangeStudyPeriodIn.getValue() var_name_t = self.varNameTemperatureIn.getValue() var_name_p = self.varNamePrecipitationIn.getValue() indice_name = self.indiceNameIn.getValue() in_files_t.extend(self.filesStudyPeriodTemperatureIn.getValue()) in_files_p.extend(self.filesStudyPeriodPrecipitationIn.getValue()) leap_nonleap_years = self.leapNonLeapYearsIn.getValue() slice_mode = self.sliceModeIn.getValue() out_file_name = self.outputFileNameIn.getValue() level = self.NLevelIn.getValue() if(level == "None"): level = None if (time_range_base_period == "None"): time_range_base_period = None else: startdate = dateutil.parser.parse(time_range_base_period.split("/")[0]) stopdate = dateutil.parser.parse(time_range_base_period.split("/")[1]) time_range_base_period = [startdate,stopdate] if(time_range_study_period == "None"): time_range_study_period = None else: startdate = dateutil.parser.parse(time_range_study_period.split("/")[0]) stopdate = dateutil.parser.parse(time_range_study_period.split("/")[1]) time_range_study_period = [startdate,stopdate] if leap_nonleap_years == "take all years (leap and non-leap)": leap_nonleap_years = False else: leap_nonleap_years = True home = expanduser("~") self.status.set("Preparing....", 0) pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" """ Internal output path""" fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" """ Create output directory """ mkdir_p(fileOutPath) self.status.set("Processing input lists: " + str(in_files_t) + " " + str(in_files_p), 0) # CW (cold/wet days): (TG < 25th pctl) and (RR > 75th pctl) # CD (cold/dry days): (TG < 25th pctl) and (RR < 25th pctl) # WD (warm/dry days): (TG > 75th pctl) and (RR < 25th pctl) # WW (warm/wet days): (TG > 75th pctl) and (RR > 75th pctl) if indice_name == 'CW': logical_operation = ['lt', 'gt'] thresh = ['p25', 'p75'] elif indice_name == 'CD': logical_operation = ['lt', 'lt'] thresh = ['p25', 'p25'] elif indice_name == 'WD': logical_operation = ['gt', 'lt'] thresh = ['p75', 'p25'] elif indice_name == 'WW': logical_operation = ['gt', 'gt'] thresh = ['p75', 'p75'] my_indice_params = {'indice_name': indice_name, 'calc_operation': 'nb_events', ### 'calc_operation': 'max_nb_consecutive_events' 'logical_operation': logical_operation, 'thresh': thresh, 'var_type': ['t', 'p'], 'link_logical_operations': 'and' } icclim.indice(user_indice=my_indice_params, in_files=[in_files_t,in_files_p], var_name=[var_name_t,var_name_p], slice_mode=slice_mode, time_range=time_range_study_period, out_file=fileOutPath+out_file_name, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=time_range_base_period, window_width=5, only_leap_years=leap_nonleap_years, ignore_Feb29th=True, interpolation='hyndman_fan', out_unit='days') """ Set output """ url = fileOutURL+"/"+out_file_name; self.opendapURL.setValue(url); self.status.set("ready",100);
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) #homedir = os.environ['HOME'] #os.chdir(homedir) def callback(b): self.callback("Processing", b) in_files_t = [] in_files_t.extend(self.filesBasePeriodTemperatureIn.getValue()) in_files_p = [] in_files_p.extend(self.filesBasePeriodPrecipitationIn.getValue()) time_range_base_period = self.timeRangeBasePeriodIn.getValue() time_range_study_period = self.timeRangeStudyPeriodIn.getValue() var_name_t = self.varNameTemperatureIn.getValue() var_name_p = self.varNamePrecipitationIn.getValue() indice_name = self.indiceNameIn.getValue() in_files_t.extend(self.filesStudyPeriodTemperatureIn.getValue()) in_files_p.extend(self.filesStudyPeriodPrecipitationIn.getValue()) leap_nonleap_years = self.leapNonLeapYearsIn.getValue() slice_mode = self.sliceModeIn.getValue() #out_file_name = self.outputFileNameIn.getValue() out_file_name = 'out.nc' level = self.NLevelIn.getValue() if time_range_base_period: startdate = dateutil.parser.parse( time_range_base_period.split("/")[0]) stopdate = dateutil.parser.parse( time_range_base_period.split("/")[1]) time_range_base_period = [startdate, stopdate] if time_range_study_period: startdate = dateutil.parser.parse( time_range_study_period.split("/")[0]) stopdate = dateutil.parser.parse( time_range_study_period.split("/")[1]) time_range_study_period = [startdate, stopdate] #home = expanduser("~") self.status.set("Preparing....", 0) #pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ #fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" """ Internal output path""" #fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" """ Create output directory """ #mkdir_p(fileOutPath) self.status.set( "Processing input lists: " + str(in_files_t) + " " + str(in_files_p), 0) # CW (cold/wet days): (TG < 25th pctl) and (RR > 75th pctl) # CD (cold/dry days): (TG < 25th pctl) and (RR < 25th pctl) # WD (warm/dry days): (TG > 75th pctl) and (RR < 25th pctl) # WW (warm/wet days): (TG > 75th pctl) and (RR > 75th pctl) if indice_name == 'CW': logical_operation = ['lt', 'gt'] thresh = ['p25', 'p75'] elif indice_name == 'CD': logical_operation = ['lt', 'lt'] thresh = ['p25', 'p25'] elif indice_name == 'WD': logical_operation = ['gt', 'lt'] thresh = ['p75', 'p25'] elif indice_name == 'WW': logical_operation = ['gt', 'gt'] thresh = ['p75', 'p75'] my_indice_params = { 'indice_name': indice_name, 'calc_operation': 'nb_events', ### 'calc_operation': 'max_nb_consecutive_events' 'logical_operation': logical_operation, 'thresh': thresh, 'var_type': ['t', 'p'], 'link_logical_operations': 'and' } icclim.indice(user_indice=my_indice_params, in_files=[in_files_t, in_files_p], var_name=[var_name_t, var_name_p], slice_mode=slice_mode, time_range=time_range_study_period, out_file=out_file_name, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=time_range_base_period, window_width=5, only_leap_years=leap_nonleap_years, ignore_Feb29th=True, interpolation='hyndman_fan', out_unit='days') """ Set output """ #url = fileOutURL+"/"+out_file_name #self.opendapURL.setValue(url) self.output.setValue(out_file_name) self.status.set("ready", 100)
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) #homedir = os.environ['HOME'] #os.chdir(homedir) def callback(b): self.callback("Processing",b) indice_name = self.indiceNameIn.getValue() in_files = self.getInputValues(identifier='filesBasePeriod') time_range_base_period = self.timeRangeBasePeriodIn.getValue() var_name = self.varNameIn.getValue() leap_nonleap_years = self.leapNonLeapYearsIn.getValue() in_files.extend(self.getInputValues(identifier='filesStudyPeriod')) time_range_study_period = self.timeRangeStudyPeriodIn.getValue() slice_mode = self.sliceModeIn.getValue() #out_file_name = self.outputFileNameIn.getValue() out_file_name = 'out.nc' level = self.NLevelIn.getValue() if time_range_base_period: startdate = dateutil.parser.parse(time_range_base_period.split("/")[0]) stopdate = dateutil.parser.parse(time_range_base_period.split("/")[1]) time_range_base_period = [startdate,stopdate] if time_range_study_period: startdate = dateutil.parser.parse(time_range_study_period.split("/")[0]) stopdate = dateutil.parser.parse(time_range_study_period.split("/")[1]) time_range_study_period = [startdate,stopdate] ## if (leap_nonleap_years == "take all years (leap and non-leap)"): ## leap_nonleap_years = False ## else: ## leap_nonleap_years = True #home = expanduser("~") self.status.set("Preparing....", 0) #pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ #fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" """ Internal output path""" #fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" """ Create output directory """ #mkdir_p(fileOutPath) self.status.set("Processing input list: " + str(in_files), 0) icclim.indice(indice_name=indice_name, in_files=in_files, var_name=var_name, slice_mode=slice_mode, time_range=time_range_study_period, out_file=out_file_name, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=time_range_base_period, window_width=5, only_leap_years=leap_nonleap_years, ignore_Feb29th=True, interpolation='hyndman_fan', netcdf_version='NETCDF4_CLASSIC', out_unit='days') """ Set output """ #url = fileOutURL+"/"+out_file_name; #self.opendapURL.setValue(url); self.output.setValue(out_file_name) self.status.set("ready",100);
from datetime import datetime from time import time from glob import glob #in_file_OpenDAP = 'http://opendap.nmdc.eu/knmi/thredds/dodsC/IS-ENES/TESTSETS/tas_day_EC-EARTH_rcp26_r8i1p1_20060101-20251231.nc' #in_file_OpenDAP2 = 'http://opendap.nmdc.eu/knmi/thredds/dodsC/IS-ENES/TESTSETS/tas_day_EC-EARTH_rcp26_r8i1p1_20260101-20501231.nc' tas_path = '/data/tatarinova/CMIP5/tas_day/' tas_files = glob(tas_path + '*.nc') dt1 = datetime(1980,01,01) dt2 = datetime(2000,12,31) ofile = 'huuuuj.nc' start = time() icclim.indice(in_files = tas_files, out_file = ofile, var = 'tas', indice_name = 'TX', time_range=[dt1, dt2], slice_mode='month', project='CMIP5') stop = time() time1 = stop - start print 'time: ', time1
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) homedir = os.environ['HOME'] os.chdir(homedir) def callback(b): self.callback("Processing",b) files = []; files.extend(self.filesIn.getValue()) var = self.varNameIn.getValue() indice_name = self.indiceNameIn.getValue() slice_mode = self.sliceModeIn.getValue() time_range = self.timeRangeIn.getValue() out_file_name = self.outputFileNameIn.getValue() level = self.NLevelIn.getValue() thresholdlist = self.thresholdIn.getValue() thresh = None if(level == "None"): level = None if(time_range == "None"): time_range = None else: startdate = dateutil.parser.parse(time_range.split("/")[0]) stopdate = dateutil.parser.parse(time_range.split("/")[1]) time_range = [startdate,stopdate] if(thresholdlist != "None"): if(thresholdlist[0]!="None"): thresh = [] for threshold in thresholdlist: thresh.append(float(threshold)) self.status.set("Preparing....", 0) pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" """ Internal output path""" fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" """ Create output directory """ mkdir_p(fileOutPath) self.status.set("Processing input list: "+str(files),0) icclim.indice(indice_name=indice_name, in_files=files, var_name=var, slice_mode=slice_mode, time_range=time_range, out_file=fileOutPath+out_file_name, threshold=thresh, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=None, window_width=5, only_leap_years=False, ignore_Feb29th=True, interpolation='hyndman_fan', netcdf_version='NETCDF4_CLASSIC', out_unit='days') """ Set output """ url = fileOutURL+"/"+out_file_name; self.opendapURL.setValue(url); self.status.set("ready",100);
# test icclim import icclim from datetime import datetime from time import time in_file_OpenDAP = 'http://opendap.nmdc.eu/knmi/thredds/dodsC/IS-ENES/TESTSETS/tas_day_EC-EARTH_rcp26_r8i1p1_20060101-20251231.nc' dt1 = datetime(2010,01,01) dt2 = datetime(2010,12,31) ofile = 'huuuuj.nc' start = time() icclim.indice([in_file_OpenDAP], ofile, 'tas', 'GSL', time_range=[dt1, dt2], slice_mode='year', project='CMIP5', N_lev=None) stop = time() time1 = stop - start print 'time: ', time1
index_period) fname_out = fname_out.replace(index_var, index_name, 1) f_out = fileout_format.format(run['institute'], run['model'], run['label'], index_period, index_name, run['grid'], 'latest', fname_out) # test if directory path exists, if not create if not os.path.exists(os.path.dirname(f_out)): os.makedirs(os.path.dirname(f_out)) # these are user-defined indices if 'params' in index.keys(): icclim.indice(user_indice=index['params'], in_files=files, var_name=index_var, slice_mode=index_period, out_file=f_out) # need to edit the long_name to make it consistent with this user variable cmd = 'ncatted -O -h -a long_name,' + index_name + ',o,c,"' + index[ 'long_name'] + '" ' + f_out os.system(cmd) if index_name == 'CDD' and run['institute'] == 'CMCC': # need to scale 6 hour period back to days with Dataset(f_out, 'r+') as fh: cdd = fh.variables[index_name] cdd_rescale = cdd[:] cdd_rescale = cdd[:] // 4 cdd[:] = cdd_rescale elif run['institute'] == 'CMCC' and ( index_name == 'RX1day' or index_name == 'RX5day'):
"-"+\ year_dt2+month_dt2+day_dt2+\ '.nc' #"_historical"+\ #"_rcp45"+\ #"_rcp85"+\ print 'Going into output file:', indice_out_name print icclim.indice(indice_name=indice_pp, in_files=files_pr_nbc_50km, var_name='pr', slice_mode='year', time_range=[dt1,dt2], base_period_time_range=[base_dt1, base_dt2], out_file=indice_out_name, callback=callback.defaultCallback2) #================================================================================================= # # HadGEM mode (period ends iwth yyyy1230!) # models_list_50km_HadGEM = ['MOHC-HadGEM2-ES'] # # Important! # # ========================================================================= # # Declare which indices you want to calculate using lists # indice_list_pp = ['PRCPTOT','RX1day','RR1','CWD','CDD','R10mm','R20mm','R95p']
def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) homedir = os.environ['HOME'] os.chdir(homedir) def callback(b): self.callback("Processing", b) files = [] files.extend(self.filesIn.getValue()) var = self.varNameIn.getValue() indice_name = self.indiceNameIn.getValue() slice_mode = self.sliceModeIn.getValue() time_range = self.timeRangeIn.getValue() out_file_name = self.outputFileNameIn.getValue() level = self.NLevelIn.getValue() thresholdlist = self.thresholdIn.getValue() thresh = None if (level == "None"): level = None if (time_range == "None"): time_range = None else: startdate = dateutil.parser.parse(time_range.split("/")[0]) stopdate = dateutil.parser.parse(time_range.split("/")[1]) time_range = [startdate, stopdate] if (thresholdlist != "None"): if (thresholdlist[0] != "None"): thresh = [] for threshold in thresholdlist: thresh.append(float(threshold)) self.status.set("Preparing....", 0) pathToAppendToOutputDirectory = "/WPS_" + self.identifier + "_" + datetime.now( ).strftime("%Y%m%dT%H%M%SZ") """ URL output path """ fileOutURL = os.environ[ 'POF_OUTPUT_URL'] + pathToAppendToOutputDirectory + "/" """ Internal output path""" fileOutPath = os.environ[ 'POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory + "/" """ Create output directory """ mkdir_p(fileOutPath) self.status.set("Processing input list: " + str(files), 0) icclim.indice(indice_name=indice_name, in_files=files, var_name=var, slice_mode=slice_mode, time_range=time_range, out_file=fileOutPath + out_file_name, threshold=thresh, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=None, window_width=5, only_leap_years=False, ignore_Feb29th=True, interpolation='hyndman_fan', netcdf_version='NETCDF4_CLASSIC', out_unit='days') """ Set output """ url = fileOutURL + "/" + out_file_name self.opendapURL.setValue(url) self.status.set("ready", 100)