def execute(self): import time # performance test process_start_time = time.time() # measure process execution time ... from os import path from tempfile import mkstemp from flyingpigeon import analogs from datetime import datetime as dt from flyingpigeon.ocgis_module import call from flyingpigeon.datafetch import reanalyses from flyingpigeon.utils import get_variable, rename_variable self.status.set('execution started at : %s ' % dt.now(), 5) start_time = time.time() # measure init ... resource = self.getInputValues(identifier='resource') bbox_obj = self.BBox.getValue() refSt = self.getInputValues(identifier='refSt') refEn = self.getInputValues(identifier='refEn') dateSt = self.getInputValues(identifier='dateSt') dateEn = self.getInputValues(identifier='dateEn') normalize = self.getInputValues(identifier='normalize')[0] distance = self.getInputValues(identifier='dist')[0] outformat = self.getInputValues(identifier='outformat')[0] timewin = int(self.getInputValues(identifier='timewin')[0]) experiment = self.getInputValues(identifier='experiment')[0] dataset, var = experiment.split('_') refSt = dt.strptime(refSt[0], '%Y-%m-%d') refEn = dt.strptime(refEn[0], '%Y-%m-%d') dateSt = dt.strptime(dateSt[0], '%Y-%m-%d') dateEn = dt.strptime(dateEn[0], '%Y-%m-%d') if normalize == 'None': seacyc = False else: seacyc = True if outformat == 'ascii': outformat = '.txt' elif outformat == 'netCDF': outformat = '.nc' else: logger.error('output format not valid') if bbox_obj is not None: logger.info("bbox_obj={0}".format(bbox_obj.coords)) bbox = [ bbox_obj.coords[0][0], bbox_obj.coords[0][1], bbox_obj.coords[1][0], bbox_obj.coords[1][1] ] logger.info("bbox={0}".format(bbox)) else: bbox = None #start = min( refSt, dateSt ) #end = max( refEn, dateEn ) # region = self.getInputValues(identifier='region')[0] # bbox = [float(b) for b in region.split(',')] try: if dataset == 'NCEP': if 'z' in var: variable = 'hgt' level = var.strip('z') #conform_units_to=None else: variable = 'slp' level = None #conform_units_to='hPa' elif '20CRV2' in var: if 'z' in level: variable = 'hgt' level = var.strip('z') #conform_units_to=None else: variable = 'prmsl' level = None #conform_units_to='hPa' else: logger.error('Reanalyses dataset not known') logger.info('environment set') except Exception as e: msg = 'failed to set environment %s ' % e logger.error(msg) raise Exception(msg) logger.debug("init took %s seconds.", time.time() - start_time) self.status.set('Read in the arguments', 5) ################# # get input data ################# start_time = time.time() # measure get_input_data ... self.status.set('fetching input data', 7) try: input = reanalyses(start=dateSt.year, end=dateEn.year, variable=var, dataset=dataset) nc_subset = call(resource=input, variable=var, geom=bbox) except Exception as e: msg = 'failed to fetch or subset input files %s' % e logger.error(msg) raise Exception(msg) logger.debug("get_input_subset_dataset took %s seconds.", time.time() - start_time) self.status.set('**** Input data fetched', 10) ######################## # input data preperation ######################## self.status.set('Start preparing input data', 12) start_time = time.time() # mesure data preperation ... try: self.status.set('Preparing simulation data', 15) simulation = call(resource=nc_subset, time_range=[dateSt, dateEn]) except: msg = 'failed to prepare simulation period' logger.debug(msg) try: self.status.set('Preparing target data', 17) var_target = get_variable(resource) #var_simulation = get_variable(simulation) archive = call( resource=resource, variable=var_target, time_range=[refSt, refEn], geom=bbox, t_calendar= 'standard', # conform_units_to=conform_units_to, spatial_wrapping='wrap', regrid_destination=simulation, regrid_options='bil') except Exception as e: msg = 'failed subset archive dataset %s ' % e logger.debug(msg) raise Exception(msg) try: if var != var_target: rename_variable(archive, oldname=var_target, newname=var) logger.info('varname %s in netCDF renamed to %s' % (var_target, var)) except Exception as e: msg = 'failed to rename variable in target files %s ' % e logger.debug(msg) raise Exception(msg) try: if seacyc == True: seasoncyc_base, seasoncyc_sim = analogs.seacyc( archive, simulation, method=normalize) else: seasoncyc_base, seasoncyc_sim = None except Exception as e: msg = 'failed to prepare seasonal cycle reference files %s ' % e logger.debug(msg) raise Exception(msg) ip, output = mkstemp(dir='.', suffix='.txt') output_file = path.abspath(output) files = [path.abspath(archive), path.abspath(simulation), output_file] logger.debug("data preperation took %s seconds.", time.time() - start_time) ############################ # generating the config file ############################ self.status.set('writing config file', 15) start_time = time.time() # measure write config ... try: config_file = analogs.get_configfile( files=files, seasoncyc_base=seasoncyc_base, seasoncyc_sim=seasoncyc_sim, timewin=timewin, varname=var, seacyc=seacyc, cycsmooth=91, nanalog=nanalog, seasonwin=seasonwin, distfun=distance, outformat=outformat, calccor=True, silent=False, period=[ dt.strftime(refSt, '%Y-%m-%d'), dt.strftime(refEn, '%Y-%m-%d') ], bbox="%s,%s,%s,%s" % (bbox[0], bbox[2], bbox[1], bbox[3])) except Exception as e: msg = 'failed to generate config file %s ' % e logger.debug(msg) raise Exception(msg) logger.debug("write_config took %s seconds.", time.time() - start_time) ####################### # CASTf90 call ####################### import subprocess import shlex start_time = time.time() # measure call castf90 self.status.set('Start CASTf90 call', 20) try: #self.status.set('execution of CASTf90', 50) cmd = 'analogue.out %s' % path.relpath(config_file) #system(cmd) args = shlex.split(cmd) output, error = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() logger.info('analogue.out info:\n %s ' % output) logger.debug('analogue.out errors:\n %s ' % error) self.status.set('**** CASTf90 suceeded', 90) except Exception as e: msg = 'CASTf90 failed %s ' % e logger.error(msg) raise Exception(msg) logger.debug("castf90 took %s seconds.", time.time() - start_time) self.status.set('preparting output', 99) self.config.setValue(config_file) self.analogs.setValue(output_file) self.simulation_netcdf.setValue(simulation) self.target_netcdf.setValue(archive) self.status.set('execution ended', 100) logger.debug("total execution took %s seconds.", time.time() - process_start_time)
def execute(self): import time # performance test process_start_time = time.time() # measure process execution time ... from os import path from tempfile import mkstemp from flyingpigeon import analogs from datetime import datetime as dt from flyingpigeon.ocgis_module import call from flyingpigeon.datafetch import reanalyses from flyingpigeon.utils import get_variable, rename_variable self.status.set('execution started at : %s ' % dt.now(),5) start_time = time.time() # measure init ... resource = self.getInputValues(identifier='resource') bbox_obj = self.BBox.getValue() refSt = self.getInputValues(identifier='refSt') refEn = self.getInputValues(identifier='refEn') dateSt = self.getInputValues(identifier='dateSt') dateEn = self.getInputValues(identifier='dateEn') normalize = self.getInputValues(identifier='normalize')[0] distance = self.getInputValues(identifier='dist')[0] outformat = self.getInputValues(identifier='outformat')[0] timewin = int(self.getInputValues(identifier='timewin')[0]) experiment = self.getInputValues(identifier='experiment')[0] dataset , var = experiment.split('_') refSt = dt.strptime(refSt[0],'%Y-%m-%d') refEn = dt.strptime(refEn[0],'%Y-%m-%d') dateSt = dt.strptime(dateSt[0],'%Y-%m-%d') dateEn = dt.strptime(dateEn[0],'%Y-%m-%d') if normalize == 'None': seacyc = False else: seacyc = True if outformat == 'ascii': outformat = '.txt' elif outformat == 'netCDF': outformat = '.nc' else: logger.error('output format not valid') if bbox_obj is not None: logger.info("bbox_obj={0}".format(bbox_obj.coords)) bbox = [bbox_obj.coords[0][0], bbox_obj.coords[0][1],bbox_obj.coords[1][0],bbox_obj.coords[1][1]] logger.info("bbox={0}".format(bbox)) else: bbox=None #start = min( refSt, dateSt ) #end = max( refEn, dateEn ) # region = self.getInputValues(identifier='region')[0] # bbox = [float(b) for b in region.split(',')] try: if dataset == 'NCEP': if 'z' in var: variable='hgt' level=var.strip('z') #conform_units_to=None else: variable='slp' level=None #conform_units_to='hPa' elif '20CRV2' in var: if 'z' in level: variable='hgt' level=var.strip('z') #conform_units_to=None else: variable='prmsl' level=None #conform_units_to='hPa' else: logger.error('Reanalyses dataset not known') logger.info('environment set') except Exception as e: msg = 'failed to set environment %s ' % e logger.error(msg) raise Exception(msg) logger.debug("init took %s seconds.", time.time() - start_time) self.status.set('Read in the arguments', 5) ################# # get input data ################# start_time = time.time() # measure get_input_data ... self.status.set('fetching input data', 7) try: input = reanalyses(start = dateSt.year, end = dateEn.year, variable=var, dataset=dataset) nc_subset = call(resource=input, variable=var, geom=bbox) except Exception as e : msg = 'failed to fetch or subset input files %s' % e logger.error(msg) raise Exception(msg) logger.debug("get_input_subset_dataset took %s seconds.", time.time() - start_time) self.status.set('**** Input data fetched', 10) ######################## # input data preperation ######################## self.status.set('Start preparing input data', 12) start_time = time.time() # mesure data preperation ... try: self.status.set('Preparing simulation data', 15) simulation = call(resource=nc_subset, time_range=[dateSt , dateEn]) except: msg = 'failed to prepare simulation period' logger.debug(msg) try: self.status.set('Preparing target data', 17) var_target = get_variable(resource) #var_simulation = get_variable(simulation) archive = call(resource=resource, variable=var_target, time_range=[refSt , refEn], geom=bbox, t_calendar='standard',# conform_units_to=conform_units_to, spatial_wrapping='wrap', regrid_destination=simulation, regrid_options='bil') except Exception as e: msg = 'failed subset archive dataset %s ' % e logger.debug(msg) raise Exception(msg) try: if var != var_target: rename_variable(archive, oldname=var_target, newname=var) logger.info('varname %s in netCDF renamed to %s' %(var_target, var)) except Exception as e: msg = 'failed to rename variable in target files %s ' % e logger.debug(msg) raise Exception(msg) try: if seacyc == True: seasoncyc_base , seasoncyc_sim = analogs.seacyc(archive, simulation, method=normalize) else: seasoncyc_base , seasoncyc_sim = None except Exception as e: msg = 'failed to prepare seasonal cycle reference files %s ' % e logger.debug(msg) raise Exception(msg) ip, output = mkstemp(dir='.',suffix='.txt') output_file = path.abspath(output) files=[path.abspath(archive), path.abspath(simulation), output_file] logger.debug("data preperation took %s seconds.", time.time() - start_time) ############################ # generating the config file ############################ self.status.set('writing config file', 15) start_time = time.time() # measure write config ... try: config_file = analogs.get_configfile( files=files, seasoncyc_base = seasoncyc_base, seasoncyc_sim = seasoncyc_sim, timewin=timewin, varname=var, seacyc=seacyc, cycsmooth=91, nanalog=nanalog, seasonwin=seasonwin, distfun=distance, outformat=outformat, calccor=True, silent=False, period=[dt.strftime(refSt,'%Y-%m-%d'),dt.strftime(refEn,'%Y-%m-%d')], bbox="%s,%s,%s,%s" % (bbox[0],bbox[2],bbox[1],bbox[3])) except Exception as e: msg = 'failed to generate config file %s ' % e logger.debug(msg) raise Exception(msg) logger.debug("write_config took %s seconds.", time.time() - start_time) ####################### # CASTf90 call ####################### import subprocess import shlex start_time = time.time() # measure call castf90 self.status.set('Start CASTf90 call', 20) try: #self.status.set('execution of CASTf90', 50) cmd = 'analogue.out %s' % path.relpath(config_file) #system(cmd) args = shlex.split(cmd) output,error = subprocess.Popen(args, stdout = subprocess.PIPE, stderr= subprocess.PIPE).communicate() logger.info('analogue.out info:\n %s ' % output) logger.debug('analogue.out errors:\n %s ' % error) self.status.set('**** CASTf90 suceeded', 90) except Exception as e: msg = 'CASTf90 failed %s ' % e logger.error(msg) raise Exception(msg) logger.debug("castf90 took %s seconds.", time.time() - start_time) self.status.set('preparting output', 99) self.config.setValue( config_file ) self.analogs.setValue( output_file ) self.simulation_netcdf.setValue( simulation ) self.target_netcdf.setValue( archive ) self.status.set('execution ended', 100) logger.debug("total execution took %s seconds.", time.time() - process_start_time)
def execute(self): init_process_logger('log.txt') self.output_log.setValue('log.txt') import time # performance test process_start_time = time.time() # measure process execution time ... from os import path from tempfile import mkstemp from datetime import datetime as dt from flyingpigeon.ocgis_module import call from flyingpigeon import analogs from flyingpigeon.datafetch import reanalyses self.status.set('execution started at : %s ' % dt.now(), 5) start_time = time.time() # measure init ... ####################### # read input parameters ####################### try: self.status.set('read input parameter : %s ' % dt.now(), 5) refSt = self.getInputValues(identifier='refSt') refEn = self.getInputValues(identifier='refEn') dateSt = self.getInputValues(identifier='dateSt') dateEn = self.getInputValues(identifier='dateEn') seasonwin = int(self.getInputValues(identifier='seasonwin')[0]) nanalog = int(self.getInputValues(identifier='nanalog')[0]) bbox_obj = self.BBox.getValue() normalize = self.getInputValues(identifier='normalize')[0] distance = self.getInputValues(identifier='dist')[0] outformat = self.getInputValues(identifier='outformat')[0] timewin = int(self.getInputValues(identifier='timewin')[0]) experiment = self.getInputValues(identifier='experiment')[0] logger.info('input parameters set') self.status.set('Read in and convert the arguments', 5) except Exception as e: msg = 'failed to read input prameter %s ' % e logger.error(msg) raise Exception(msg) ###################################### # convert types and set environment ###################################### try: self.status.set('Preparing enviroment converting arguments', 7) refSt = dt.strptime(refSt[0], '%Y-%m-%d') refEn = dt.strptime(refEn[0], '%Y-%m-%d') dateSt = dt.strptime(dateSt[0], '%Y-%m-%d') dateEn = dt.strptime(dateEn[0], '%Y-%m-%d') if normalize == 'None': seacyc = False else: seacyc = True if outformat == 'ascii': outformat = '.txt' elif outformat == 'netCDF': outformat = '.nc' else: logger.error('output format not valid') start = min(refSt, dateSt) end = max(refEn, dateEn) if bbox_obj is not None: logger.info("bbox_obj={0}".format(bbox_obj.coords)) bbox = [bbox_obj.coords[0][0], bbox_obj.coords[0][1], bbox_obj.coords[1][0], bbox_obj.coords[1][1]] logger.info("bbox={0}".format(bbox)) else: bbox = None # region = self.getInputValues(identifier='region')[0] # bbox = [float(b) for b in region.split(',')] dataset, var = experiment.split('_') logger.info('environment set') except Exception as e: msg = 'failed to set environment %s ' % e logger.error(msg) raise Exception(msg) try: if dataset == 'NCEP': if 'z' in var: variable = 'hgt' level = var.strip('z') # conform_units_to=None else: variable = 'slp' level = None # conform_units_to='hPa' elif '20CRV2' in var: if 'z' in level: variable = 'hgt' level = var.strip('z') # conform_units_to=None else: variable = 'prmsl' level = None # conform_units_to='hPa' else: logger.error('Reanalyses dataset not known') logger.info('environment set') except Exception as e: msg = 'failed to set environment %s ' % e logger.error(msg) raise Exception(msg) logger.debug("init took %s seconds.", time.time() - start_time) self.status.set('Read in and convert the arguments done', 8) ################# # get input data ################# start_time = time.time() # measure get_input_data ... self.status.set('fetching input data', 7) try: input = reanalyses(start=start.year, end=end.year, variable=var, dataset=dataset) logger.info('input files %s' % input) nc_subset = call(resource=input, variable=var, geom=bbox, spatial_wrapping='wrap') except Exception as e: msg = 'failed to fetch or subset input files %s' % e logger.error(msg) raise Exception(msg) logger.debug("get_input_subset_dataset took %s seconds.", time.time() - start_time) self.status.set('**** Input data fetched', 10) ######################## # input data preperation ######################## self.status.set('Start preparing input data', 12) start_time = time.time() # measure data preperation ... try: # Construct descriptive filenames for the three files # listed in config file refDatesString = dt.strftime(refSt, '%Y-%m-%d') + "_" + dt.strftime(refEn, '%Y-%m-%d') simDatesString = dt.strftime(dateSt, '%Y-%m-%d') + "_" + dt.strftime(dateEn, '%Y-%m-%d') archiveNameString = "base_" + var + "_" + refDatesString + '_%.1f_%.1f_%.1f_%.1f' \ % (bbox[0], bbox[2], bbox[1], bbox[3]) simNameString = "sim_" + var + "_" + simDatesString + '_%.1f_%.1f_%.1f_%.1f' \ % (bbox[0], bbox[2], bbox[1], bbox[3]) archive = call(resource=nc_subset, time_range=[refSt, refEn], prefix=archiveNameString) simulation = call(resource=nc_subset, time_range=[dateSt, dateEn], prefix=simNameString) logger.info('archive and simulation files generated: %s, %s' % (archive, simulation)) except Exception as e: msg = 'failed to prepare archive and simulation files %s ' % e logger.debug(msg) raise Exception(msg) try: if seacyc is True: logger.info('normalization function with method: %s ' % normalize) seasoncyc_base, seasoncyc_sim = analogs.seacyc( archive, simulation, method=normalize) else: seasoncyc_base = seasoncyc_sim = None except Exception as e: msg = 'failed to generate normalization files %s ' % e logger.debug(msg) raise Exception(msg) ip, output_file = mkstemp(dir='.', suffix='.txt') files = [path.abspath(archive), path.abspath(simulation), output_file] logger.debug("Data preperation took %s seconds.", time.time() - start_time) ############################ # generate the config file ############################ self.status.set('writing config file', 15) start_time = time.time() # measure write config ... try: config_file = analogs.get_configfile( files=files, seasoncyc_base=seasoncyc_base, seasoncyc_sim=seasoncyc_sim, timewin=timewin, varname=var, seacyc=seacyc, cycsmooth=91, nanalog=nanalog, seasonwin=seasonwin, distfun=distance, outformat=outformat, calccor=True, silent=False, period=[dt.strftime(refSt, '%Y-%m-%d'), dt.strftime(refEn, '%Y-%m-%d')], bbox="%s,%s,%s,%s" % (bbox[0], bbox[2], bbox[1], bbox[3])) except Exception as e: msg = 'failed to generate config file %s ' % e logger.debug(msg) raise Exception(msg) logger.debug("write_config took %s seconds.", time.time() - start_time) ####################### # CASTf90 call ####################### import subprocess import shlex start_time = time.time() # measure call castf90 self.status.set('Start CASTf90 call', 20) try: # self.status.set('execution of CASTf90', 50) cmd = 'analogue.out %s' % path.relpath(config_file) # system(cmd) args = shlex.split(cmd) output, error = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE ).communicate() logger.info('analogue.out info:\n %s ' % output) logger.debug('analogue.out errors:\n %s ' % error) self.status.set('**** CASTf90 suceeded', 90) except Exception as e: msg = 'CASTf90 failed %s ' % e logger.error(msg) raise Exception(msg) logger.debug("castf90 took %s seconds.", time.time() - start_time) ######################## # generate analog viewer ######################## try: f = analogs.reformat_analogs(output_file) logger.info('analogs reformated') self.status.set('Successfully reformatted analog file', 50) # put config file into output folder config_output_path, config_output_url = analogs.copy_configfile( config_file ) output_av = analogs.get_viewer( f, path.basename(config_output_path)) logger.info('Viewer generated') self.status.set('Successfully generated analogs viewer', 90) logger.info('output_av: %s ' % output_av) except Exception as e: msg = 'Failed to reformat analogs file or generate viewer%s ' % e logger.debug(msg) self.status.set('preparting output', 99) self.config.setValue(config_output_url) # config_file ) self.analogs.setValue(output_file) self.output_netcdf.setValue(simulation) self.output_html.setValue(output_av) self.status.set('execution ended', 100) logger.debug("total execution took %s seconds.", time.time() - process_start_time)
def _handler(self, request, response): init_process_logger('log.txt') response.outputs['output_log'].file = 'log.txt' process_start_time = time.time() # measure process execution time ... response.update_status('execution started at : %s ' % dt.now(), 5) start_time = time.time() # measure init ... resource = archiveextract( resource=rename_complexinputs(request.inputs['resource'])) refSt = request.inputs['refSt'][0].data refEn = request.inputs['refEn'][0].data dateSt = request.inputs['dateSt'][0].data dateEn = request.inputs['dateEn'][0].data regrset = request.inputs['regrset'][0].data # fix 31 December issue # refSt = dt.combine(refSt,dt_time(12,0)) # refEn = dt.combine(refEn,dt_time(12,0)) # dateSt = dt.combine(dateSt,dt_time(12,0)) # dateEn = dt.combine(dateEn,dt_time(12,0)) seasonwin = request.inputs['seasonwin'][0].data nanalog = request.inputs['nanalog'][0].data # bbox = [-80, 20, 50, 70] # TODO: Add checking for wrong cordinates and apply default if nesessary bbox = [] bboxStr = request.inputs['BBox'][0].data bboxStr = bboxStr.split(',') bbox.append(float(bboxStr[0])) bbox.append(float(bboxStr[2])) bbox.append(float(bboxStr[1])) bbox.append(float(bboxStr[3])) direction = request.inputs['direction'][0].data normalize = request.inputs['normalize'][0].data distance = request.inputs['dist'][0].data outformat = request.inputs['outformat'][0].data timewin = request.inputs['timewin'][0].data model_var = request.inputs['reanalyses'][0].data model, var = model_var.split('_') try: if direction == 're2mo': anaSt = dt.combine(dateSt, dt_time( 0, 0)) #dt.strptime(dateSt[0], '%Y-%m-%d') anaEn = dt.combine(dateEn, dt_time( 0, 0)) #dt.strptime(dateEn[0], '%Y-%m-%d') refSt = dt.combine(refSt, dt_time( 12, 0)) #dt.strptime(refSt[0], '%Y-%m-%d') refEn = dt.combine(refEn, dt_time( 12, 0)) #dt.strptime(refEn[0], '%Y-%m-%d') r_time_range = [anaSt, anaEn] m_time_range = [refSt, refEn] elif direction == 'mo2re': anaSt = dt.combine(dateSt, dt_time( 12, 0)) #dt.strptime(refSt[0], '%Y-%m-%d') anaEn = dt.combine(dateEn, dt_time( 12, 0)) #dt.strptime(refEn[0], '%Y-%m-%d') refSt = dt.combine(refSt, dt_time( 0, 0)) #dt.strptime(dateSt[0], '%Y-%m-%d') refEn = dt.combine(refEn, dt_time( 0, 0)) #dt.strptime(dateEn[0], '%Y-%m-%d') r_time_range = [refSt, refEn] m_time_range = [anaSt, anaEn] else: LOGGER.exception( 'failed to find time periods for comparison direction') except: msg = 'failed to put simulation and reference time in order' LOGGER.exception(msg) raise Exception(msg) if normalize == 'None': seacyc = False else: seacyc = True if outformat == 'ascii': outformat = '.txt' elif outformat == 'netCDF': outformat = '.nc' else: LOGGER.exception('output format not valid') try: if model == 'NCEP': getlevel = True if 'z' in var: level = var.strip('z') variable = 'hgt' # conform_units_to='hPa' else: variable = 'slp' level = None # conform_units_to='hPa' elif '20CRV2' in model: getlevel = False if 'z' in var: variable = 'hgt' level = var.strip('z') # conform_units_to=None else: variable = 'prmsl' level = None # conform_units_to='hPa' else: LOGGER.exception('Reanalyses model not known') LOGGER.info('environment set') except: msg = 'failed to set environment' LOGGER.exception(msg) raise Exception(msg) # LOGGER.exception("init took %s seconds.", time.time() - start_time) response.update_status('Read in the arguments', 6) ################# # get input data ################# # TODO: do not forget to select years start_time = time.time() # measure get_input_data ... response.update_status('fetching input data', 7) try: if direction == 're2mo': nc_reanalyses = reanalyses(start=anaSt.year, end=anaEn.year, variable=var, dataset=model, getlevel=getlevel) else: nc_reanalyses = reanalyses(start=refSt.year, end=refEn.year, variable=var, dataset=model, getlevel=getlevel) if type(nc_reanalyses) == list: nc_reanalyses = sorted( nc_reanalyses, key=lambda i: path.splitext(path.basename(i))[0]) else: nc_reanalyses = [nc_reanalyses] # For 20CRV2 geopotential height, daily dataset for 100 years is about 50 Gb # So it makes sense, to operate it step-by-step # TODO: need to create dictionary for such datasets (for models as well) # TODO: benchmark the method bellow for NCEP z500 for 60 years, may be use the same (!) # TODO Now everything regrid to the reanalysis if ('20CRV2' in model) and ('z' in var): tmp_total = [] origvar = get_variable(nc_reanalyses) for z in nc_reanalyses: tmp_n = 'tmp_%s' % (uuid.uuid1()) b0 = call(resource=z, variable=origvar, level_range=[int(level), int(level)], geom=bbox, spatial_wrapping='wrap', prefix='levdom_' + path.basename(z)[0:-3]) tmp_total.append(b0) tmp_total = sorted( tmp_total, key=lambda i: path.splitext(path.basename(i))[0]) inter_subset_tmp = call(resource=tmp_total, variable=origvar, time_range=r_time_range) # Clean for i in tmp_total: tbr = 'rm -f %s' % (i) #system(tbr) # Create new variable ds = Dataset(inter_subset_tmp, mode='a') z_var = ds.variables.pop(origvar) dims = z_var.dimensions new_var = ds.createVariable('z%s' % level, z_var.dtype, dimensions=(dims[0], dims[2], dims[3])) new_var[:, :, :] = squeeze(z_var[:, 0, :, :]) # new_var.setncatts({k: z_var.getncattr(k) for k in z_var.ncattrs()}) ds.close() nc_subset = call(inter_subset_tmp, variable='z%s' % level) else: nc_subset = call( resource=nc_reanalyses, variable=var, geom=bbox, spatial_wrapping='wrap', time_range=r_time_range, # conform_units_to=conform_units_to ) # nc_subset = call(resource=nc_reanalyses, variable=var, geom=bbox, spatial_wrapping='wrap') # XXXXXX wrap # LOGGER.exception("get_input_subset_model took %s seconds.", time.time() - start_time) response.update_status('**** Input reanalyses data fetched', 10) except: msg = 'failed to fetch or subset input files' LOGGER.exception(msg) raise Exception(msg) ######################## # input data preperation ######################## response.update_status('Start preparing input data', 12) # Filter resource: if type(resource) == list: resource = sorted(resource, key=lambda i: path.splitext(path.basename(i))[0]) else: resource = [resource] tmp_resource = [] m_start = m_time_range[0] m_end = m_time_range[1] for re in resource: s, e = get_timerange(re) tmpSt = dt.strptime(s, '%Y%m%d') tmpEn = dt.strptime(e, '%Y%m%d') if ((tmpSt <= m_end) and (tmpEn >= m_start)): tmp_resource.append(re) LOGGER.debug('Selected file: %s ' % (re)) resource = tmp_resource start_time = time.time() # mesure data preperation ... # TODO: Check the callendars ! for model vs reanalyses. # TODO: Check the units! model vs reanalyses. try: m_total = [] modvar = get_variable(resource) # resource properties ds = Dataset(resource[0]) m_var = ds.variables[modvar] dims = list(m_var.dimensions) dimlen = len(dims) try: model_id = ds.getncattr('model_id') except AttributeError: model_id = 'Unknown model' LOGGER.debug('MODEL: %s ' % (model_id)) lev_units = 'hPa' if (dimlen > 3): lev = ds.variables[dims[1]] # actually index [1] need to be detected... assuming zg(time, plev, lat, lon) lev_units = lev.units if (lev_units == 'Pa'): m_level = str(int(level) * 100) else: m_level = level else: m_level = None if level == None: level_range = None else: level_range = [int(m_level), int(m_level)] for z in resource: tmp_n = 'tmp_%s' % (uuid.uuid1()) # select level and regrid b0 = call( resource=z, variable=modvar, level_range=level_range, spatial_wrapping='wrap', #cdover='system', regrid_destination=nc_reanalyses[0], regrid_options='bil', prefix=tmp_n) # select domain b01 = call(resource=b0, geom=bbox, spatial_wrapping='wrap', prefix='levregr_' + path.basename(z)[0:-3]) tbr = 'rm -f %s' % (b0) #system(tbr) tbr = 'rm -f %s' % (tmp_n) #system(tbr) # get full resource m_total.append(b01) ds.close() model_subset = call(m_total, time_range=m_time_range) for i in m_total: tbr = 'rm -f %s' % (i) #system(tbr) if m_level is not None: # Create new variable in model set ds = Dataset(model_subset, mode='a') mod_var = ds.variables.pop(modvar) dims = mod_var.dimensions new_modvar = ds.createVariable('z%s' % level, mod_var.dtype, dimensions=(dims[0], dims[2], dims[3])) new_modvar[:, :, :] = squeeze(mod_var[:, 0, :, :]) # new_var.setncatts({k: z_var.getncattr(k) for k in z_var.ncattrs()}) ds.close() mod_subset = call(model_subset, variable='z%s' % level) else: mod_subset = model_subset # if direction == 're2mo': # try: # response.update_status('Preparing simulation data', 15) # reanalyses_subset = call(resource=nc_subset, time_range=[anaSt, anaEn]) # except: # msg = 'failed to prepare simulation period' # LOGGER.exception(msg) # try: # response.update_status('Preparing target data', 17) # var_target = get_variable(resource) # # var_simulation = get_variable(simulation) # model_subset_tmp = call(resource=resource, variable=var_target, # time_range=[refSt, refEn], # t_calendar='standard', # spatial_wrapping='wrap', # regrid_destination=nc_reanalyses[0], # regrid_options='bil') # # model_subset = call(resource=resource, variable=var_target, # # time_range=[refSt, refEn], # # geom=bbox, # # t_calendar='standard', # # # conform_units_to=conform_units_to, # # spatial_wrapping='wrap', # # regrid_destination=reanalyses_subset, # # regrid_options='bil') # XXXXXXXXXXXX ADD WRAP rem calendar # model_subset = call(resource=model_subset_tmp,variable=var_target, geom=bbox, spatial_wrapping='wrap', t_calendar='standard') # # ISSUE: the regrided model has white border with null! Check it. # # check t_calendar! # except: # msg = 'failed subset archive model' # LOGGER.exception(msg) # raise Exception(msg) # else: # try: # response.update_status('Preparing target data', 15) # var_target = get_variable(resource) # # var_simulation = get_variable(simulation) # model_subset = call(resource=resource, variable=var_target, # time_range=[refSt, refEn], # geom=bbox, # t_calendar='standard', # # conform_units_to=conform_units_to, # # spatial_wrapping='wrap', # ) # except: # msg = 'failed subset archive model' # LOGGER.exception(msg) # raise Exception(msg) # try: # response.update_status('Preparing simulation data', 17) # reanalyses_subset = call(resource=nc_subset, # time_range=[anaSt, anaEn], # regrid_destination=model_subset, # regrid_options='bil') # except: # msg = 'failed to prepare simulation period' # LOGGER.exception(msg) except: msg = 'failed to subset simulation or reference data' LOGGER.exception(msg) raise Exception(msg) # -------------------------------------------- try: if direction == 'mo2re': simulation = mod_subset archive = nc_subset base_id = model sim_id = model_id elif direction == 're2mo': simulation = nc_subset archive = mod_subset base_id = model_id sim_id = model else: LOGGER.exception('direction not valid: %s ' % direction) except: msg = 'failed to find comparison direction' LOGGER.exception(msg) raise Exception(msg) try: if level is not None: out_var = 'z%s' % level else: var_archive = get_variable(archive) var_simulation = get_variable(simulation) if var_archive != var_simulation: rename_variable(archive, oldname=var_archive, newname=var_simulation) out_var = var_simulation LOGGER.info('varname %s in netCDF renamed to %s' % (var_archive, var_simulation)) except: msg = 'failed to rename variable in target files' LOGGER.exception(msg) raise Exception(msg) try: if seacyc is True: seasoncyc_base, seasoncyc_sim = analogs.seacyc( archive, simulation, method=normalize) else: seasoncyc_base = None seasoncyc_sim = None except: msg = 'failed to prepare seasonal cycle reference files' LOGGER.exception(msg) raise Exception(msg) ip, output = mkstemp(dir='.', suffix='.txt') output_file = path.abspath(output) files = [path.abspath(archive), path.abspath(simulation), output_file] # LOGGER.exception("data preperation took %s seconds.", time.time() - start_time) ############################ # generating the config file ############################ response.update_status('writing config file', 18) start_time = time.time() # measure write config ... try: config_file = analogs.get_configfile( files=files, seasoncyc_base=seasoncyc_base, seasoncyc_sim=seasoncyc_sim, base_id=base_id, sim_id=sim_id, timewin=timewin, varname=var, seacyc=seacyc, cycsmooth=91, nanalog=nanalog, seasonwin=seasonwin, distfun=distance, outformat=outformat, calccor=True, silent=False, period=[ dt.strftime(refSt, '%Y-%m-%d'), dt.strftime(refEn, '%Y-%m-%d') ], bbox="%s,%s,%s,%s" % (bbox[0], bbox[2], bbox[1], bbox[3])) except: msg = 'failed to generate config file' LOGGER.exception(msg) raise Exception(msg) # LOGGER.exception("write_config took %s seconds.", time.time() - start_time) ####################### # CASTf90 call ####################### import subprocess import shlex start_time = time.time() # measure call castf90 response.update_status('Start CASTf90 call', 20) try: # response.update_status('execution of CASTf90', 50) cmd = 'analogue.out %s' % path.relpath(config_file) # system(cmd) args = shlex.split(cmd) output, error = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() LOGGER.info('analogue.out info:\n %s ' % output) LOGGER.exception('analogue.out errors:\n %s ' % error) response.update_status('**** CASTf90 suceeded', 90) except: msg = 'CASTf90 failed' LOGGER.exception(msg) raise Exception(msg) LOGGER.debug("castf90 took %s seconds.", time.time() - start_time) response.update_status('preparting output', 91) # Stopper to keep twitcher results, for debug # dummy=dummy response.outputs[ 'config'].file = config_file #config_output_url # config_file ) response.outputs['analogs'].file = output_file response.outputs['output_netcdf'].file = simulation response.outputs['target_netcdf'].file = archive ######################## # generate analog viewer ######################## formated_analogs_file = analogs.reformat_analogs(output_file) # response.outputs['formated_analogs'].storage = FileStorage() response.outputs['formated_analogs'].file = formated_analogs_file LOGGER.info('analogs reformated') response.update_status('reformatted analog file', 95) viewer_html = analogs.render_viewer( # configfile=response.outputs['config'].get_url(), configfile=config_file, # datafile=response.outputs['formated_analogs'].get_url()) datafile=formated_analogs_file) response.outputs['output'].file = viewer_html response.update_status('Successfully generated analogs viewer', 99) LOGGER.info('rendered pages: %s ', viewer_html) response.update_status('execution ended', 100) LOGGER.debug("total execution took %s seconds.", time.time() - process_start_time) return response
def _handler(self, request, response): init_process_logger('log.txt') response.outputs['output_log'].file = 'log.txt' process_start_time = time.time() # measure process execution time ... response.update_status('execution started at : %s ' % dt.now(), 5) start_time = time.time() # measure init ... resource = archiveextract( resource=rename_complexinputs(request.inputs['resource'])) refSt = request.inputs['refSt'][0].data refEn = request.inputs['refEn'][0].data dateSt = request.inputs['dataSt'][0].data dateEn = request.inputs['dataEn'][0].data seasonwin = request.inputs['seasonwin'][0].data nanalog = request.inputs['nanalog'][0].data bbox = [-80, 20, 50, 70] direction = self.getInputValues(identifier='direction')[0] normalize = request.inputs['normalize'][0].data distance = request.inputs['dist'][0].data outformat = request.inputs['outformat'][0].data timewin = request.inputs['timewin'][0].data model_var = request.inputs['reanalyses'][0].data model, var = model_var.split('_') try: if direction == 're2mo': anaSt = dt.strptime(dateSt[0], '%Y-%m-%d') anaEn = dt.strptime(dateEn[0], '%Y-%m-%d') refSt = dt.strptime(refSt[0], '%Y-%m-%d') refEn = dt.strptime(refEn[0], '%Y-%m-%d') elif direction == 'mo2re': anaSt = dt.strptime(refSt[0], '%Y-%m-%d') anaEn = dt.strptime(refEn[0], '%Y-%m-%d') refSt = dt.strptime(dateSt[0], '%Y-%m-%d') refEn = dt.strptime(dateEn[0], '%Y-%m-%d') else: LOGGER.exception( 'failed to find time periods for comparison direction') except: msg = 'failed to put simulation and reference time in order' LOGGER.exception(msg) raise Exception(msg) if normalize == 'None': seacyc = False else: seacyc = True if outformat == 'ascii': outformat = '.txt' elif outformat == 'netCDF': outformat = '.nc' else: LOGGER.exception('output format not valid') # if bbox_obj is not None: # LOGGER.info("bbox_obj={0}".format(bbox_obj.coords)) # bbox = [bbox_obj.coords[0][0], # bbox_obj.coords[0][1], # bbox_obj.coords[1][0], # bbox_obj.coords[1][1]] # LOGGER.info("bbox={0}".format(bbox)) # else: # bbox = None try: if model == 'NCEP': if 'z' in var: variable = 'hgt' level = var.strip('z') # conform_units_to='hPa' else: variable = 'slp' level = None # conform_units_to='hPa' elif '20CRV2' in var: if 'z' in level: variable = 'hgt' level = var.strip('z') # conform_units_to=None else: variable = 'prmsl' level = None # conform_units_to='hPa' else: LOGGER.exception('Reanalyses model not known') LOGGER.info('environment set') except: msg = 'failed to set environment' LOGGER.exception(msg) raise Exception(msg) LOGGER.exception("init took %s seconds.", time.time() - start_time) response.update_status('Read in the arguments', 5) ################# # get input data ################# start_time = time.time() # measure get_input_data ... response.update_status('fetching input data', 7) try: nc_reanalyses = reanalyses(start=anaSt.year, end=anaEn.year, variable=var, dataset=model) nc_subset = call(resource=nc_reanalyses, variable=var, geom=bbox) LOGGER.exception("get_input_subset_model took %s seconds.", time.time() - start_time) response.update_status('**** Input data fetched', 10) except: msg = 'failed to fetch or subset input files' LOGGER.exception(msg) raise Exception(msg) ######################## # input data preperation ######################## response.update_status('Start preparing input data', 12) start_time = time.time() # mesure data preperation ... try: if direction == 're2mo': try: response.update_status('Preparing simulation data', 15) reanalyses_subset = call(resource=nc_subset, time_range=[anaSt, anaEn]) except: msg = 'failed to prepare simulation period' LOGGER.exception(msg) try: response.update_status('Preparing target data', 17) var_target = get_variable(resource) # var_simulation = get_variable(simulation) model_subset = call( resource=resource, variable=var_target, time_range=[refSt, refEn], geom=bbox, t_calendar='standard', # conform_units_to=conform_units_to, # spatial_wrapping='wrap', regrid_destination=reanalyses_subset, regrid_options='bil') except: msg = 'failed subset archive model' LOGGER.exception(msg) raise Exception(msg) else: try: response.update_status('Preparing target data', 17) var_target = get_variable(resource) # var_simulation = get_variable(simulation) model_subset = call( resource=resource, variable=var_target, time_range=[refSt, refEn], geom=bbox, t_calendar='standard', # conform_units_to=conform_units_to, # spatial_wrapping='wrap', ) except: msg = 'failed subset archive model' LOGGER.exception(msg) raise Exception(msg) try: response.update_status('Preparing simulation data', 15) reanalyses_subset = call(resource=nc_subset, time_range=[anaSt, anaEn], regrid_destination=model_subset, regrid_options='bil') except: msg = 'failed to prepare simulation period' LOGGER.exception(msg) except: msg = 'failed to subset simulation or reference data' LOGGER.exception(msg) raise Exception(msg) try: if direction == 'mo2re': simulation = model_subset archive = reanalyses_subset elif direction == 're2mo': simulation = reanalyses_subset archive = model_subset else: LOGGER.exception('direction not valid: %s ' % direction) except: msg = 'failed to find comparison direction' LOGGER.exception(msg) raise Exception(msg) try: var_archive = get_variable(archive) var_simulation = get_variable(simulation) if var_archive != var_simulation: rename_variable(archive, oldname=var_archive, newname=var_simulation) LOGGER.info('varname %s in netCDF renamed to %s' % (var_archive, var_simulation)) except: msg = 'failed to rename variable in target files' LOGGER.exception(msg) raise Exception(msg) try: if seacyc is True: seasoncyc_base, seasoncyc_sim = analogs.seacyc( archive, simulation, method=normalize) else: seasoncyc_base, seasoncyc_sim = None except: msg = 'failed to prepare seasonal cycle reference files' LOGGER.exception(msg) raise Exception(msg) ip, output = mkstemp(dir='.', suffix='.txt') output_file = path.abspath(output) files = [path.abspath(archive), path.abspath(simulation), output_file] LOGGER.exception("data preperation took %s seconds.", time.time() - start_time) ############################ # generating the config file ############################ response.update_status('writing config file', 15) start_time = time.time() # measure write config ... try: config_file = analogs.get_configfile( files=files, seasoncyc_base=seasoncyc_base, seasoncyc_sim=seasoncyc_sim, timewin=timewin, varname=var, seacyc=seacyc, cycsmooth=91, nanalog=nanalog, seasonwin=seasonwin, distfun=distance, outformat=outformat, calccor=True, silent=False, period=[ dt.strftime(refSt, '%Y-%m-%d'), dt.strftime(refEn, '%Y-%m-%d') ], bbox="%s,%s,%s,%s" % (bbox[0], bbox[2], bbox[1], bbox[3])) except: msg = 'failed to generate config file' LOGGER.exception(msg) raise Exception(msg) LOGGER.exception("write_config took %s seconds.", time.time() - start_time) ####################### # CASTf90 call ####################### import subprocess import shlex start_time = time.time() # measure call castf90 response.update_status('Start CASTf90 call', 20) try: # response.update_status('execution of CASTf90', 50) cmd = 'analogue.out %s' % path.relpath(config_file) # system(cmd) args = shlex.split(cmd) output, error = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() LOGGER.info('analogue.out info:\n %s ' % output) LOGGER.exception('analogue.out errors:\n %s ' % error) response.update_status('**** CASTf90 suceeded', 90) except: msg = 'CASTf90 failed' LOGGER.exception(msg) raise Exception(msg) LOGGER.debug("castf90 took %s seconds.", time.time() - start_time) response.update_status('preparting output', 99) response.outputs['config'] = config_output_url # config_file ) response.outputs['analogs'] = output_file response.outputs['output_netcdf'] = simulation response.outputs['target_netcdf'] = archive # response.outputs['output_html'] = output_av response.update_status('execution ended', 100) LOGGER.debug("total execution took %s seconds.", time.time() - process_start_time) response.update_status('preparting output', 99) return response
def execute(self): import time # performance test process_start_time = time.time() # measure process execution time ... from os import path from tempfile import mkstemp from datetime import datetime as dt from flyingpigeon.ocgis_module import call from flyingpigeon import analogs from flyingpigeon.datafetch import reanalyses self.status.set('execution started at : %s ' % dt.now(),5) start_time = time.time() # measure init ... ####################### ### read input parameters ####################### try: self.status.set('read input parameter : %s ' % dt.now(),5) refSt = self.getInputValues(identifier='refSt') refEn = self.getInputValues(identifier='refEn') dateSt = self.getInputValues(identifier='dateSt') dateEn = self.getInputValues(identifier='dateEn') seasonwin = int(self.getInputValues(identifier='seasonwin')[0]) nanalog = int(self.getInputValues(identifier='nanalog')[0]) bbox_obj = self.BBox.getValue() normalize = self.getInputValues(identifier='normalize')[0] distance = self.getInputValues(identifier='dist')[0] outformat = self.getInputValues(identifier='outformat')[0] timewin = int(self.getInputValues(identifier='timewin')[0]) experiment = self.getInputValues(identifier='experiment')[0] logger.info('input parameters set') self.status.set('Read in and convert the arguments', 5) except Exception as e: msg = 'failed to read input prameter %s ' % e logger.error(msg) raise Exception(msg) ###################################### ### convert types and set environment ###################################### try: self.status.set('Start preparing enviroment converting arguments', 7) refSt = dt.strptime(refSt[0],'%Y-%m-%d') refEn = dt.strptime(refEn[0],'%Y-%m-%d') dateSt = dt.strptime(dateSt[0],'%Y-%m-%d') dateEn = dt.strptime(dateEn[0],'%Y-%m-%d') if normalize == 'None': seacyc = False else: seacyc = True if outformat == 'ascii': outformat = '.txt' elif outformat == 'netCDF': outformat = '.nc' else: logger.error('output format not valid') start = min( refSt, dateSt ) end = max( refEn, dateEn ) if bbox_obj is not None: logger.info("bbox_obj={0}".format(bbox_obj.coords)) bbox = [bbox_obj.coords[0][0], bbox_obj.coords[0][1],bbox_obj.coords[1][0],bbox_obj.coords[1][1]] logger.info("bbox={0}".format(bbox)) else: bbox=None # region = self.getInputValues(identifier='region')[0] # bbox = [float(b) for b in region.split(',')] dataset , var = experiment.split('_') logger.info('environment set') except Exception as e: msg = 'failed to set environment %s ' % e logger.error(msg) raise Exception(msg) try: if dataset == 'NCEP': if 'z' in var: variable='hgt' level=var.strip('z') #conform_units_to=None else: variable='slp' level=None #conform_units_to='hPa' elif '20CRV2' in var: if 'z' in level: variable='hgt' level=var.strip('z') #conform_units_to=None else: variable='prmsl' level=None #conform_units_to='hPa' else: logger.error('Reanalyses dataset not known') logger.info('environment set') except Exception as e: msg = 'failed to set environment %s ' % e logger.error(msg) raise Exception(msg) logger.debug("init took %s seconds.", time.time() - start_time) self.status.set('Read in and convert the arguments done', 8) ################# # get input data ################# start_time = time.time() # measure get_input_data ... self.status.set('fetching input data', 7) try: input = reanalyses(start = start.year, end = end.year, variable=var, dataset=dataset) logger.info('input files %s' % input) nc_subset = call(resource=input, variable=var, geom=bbox, spatial_wrapping='wrap') except Exception as e : msg = 'failed to fetch or subset input files %s' % e logger.error(msg) raise Exception(msg) logger.debug("get_input_subset_dataset took %s seconds.", time.time() - start_time) self.status.set('**** Input data fetched', 10) ######################## # input data preperation ######################## self.status.set('Start preparing input data', 12) start_time = time.time() # measure data preperation ... try: #Construct descriptive filenames for the three files listed in config file refDatesString = dt.strftime(refSt,'%Y-%m-%d') + "_" + dt.strftime(refEn,'%Y-%m-%d') simDatesString = dt.strftime(dateSt,'%Y-%m-%d') + "_" + dt.strftime(dateEn,'%Y-%m-%d') archiveNameString = "base_" + var +"_" + refDatesString + '_%.1f_%.1f_%.1f_%.1f' % (bbox[0], bbox[2], bbox[1], bbox[3]) simNameString = "sim_" + var +"_" + simDatesString + '_%.1f_%.1f_%.1f_%.1f' % (bbox[0], bbox[2], bbox[1], bbox[3]) archive = call(resource=nc_subset, time_range=[refSt , refEn], prefix=archiveNameString) simulation = call(resource=nc_subset, time_range=[dateSt , dateEn], prefix=simNameString) logger.info('archive and simulation files generated: %s, %s' % (archive, simulation)) except Exception as e: msg = 'failed to prepare archive and simulation files %s ' % e logger.debug(msg) raise Exception(msg) try: if seacyc == True: logger.info('normalization function with method: %s ' % normalize) seasoncyc_base, seasoncyc_sim = analogs.seacyc(archive, simulation, method=normalize) else: seasoncyc_base = seasoncyc_sim = None except Exception as e: msg = 'failed to generate normalization files %s ' % e logger.debug(msg) raise Exception(msg) ip, output_file = mkstemp(dir='.',suffix='.txt') # ======= # #Create an empty config with with random name # ip, output = mkstemp(dir='.', suffix='.txt') # #Rename random name of config file to more descriptive string # import os # anlgname = "ana_" + var + "_" + distance + "_sim_" + simDatesString + "_ref_" + refDatesString + '_%.1f_%.1f_%.1f_%.1f_seasonwin%ddays_%danalogs.txt' % (bbox[0], bbox[2], bbox[1], bbox[3], seasonwin, nanalog) #+ seasonwin # os.rename(output,anlgname) # #Put config file in temporary working dir # tmppath = os.path.dirname(output) # output_file = os.path.join(tmppath, anlgname) # #Put all three files with their paths in array # >>>>>>> analogs detn gives descriptive names to files in config file files=[path.abspath(archive), path.abspath(simulation), output_file] logger.debug("Data preperation took %s seconds.", time.time() - start_time) ############################ # generate the config file ############################ self.status.set('writing config file', 15) start_time = time.time() # measure write config ... try: config_file = analogs.get_configfile( files=files, seasoncyc_base = seasoncyc_base, seasoncyc_sim=seasoncyc_sim, timewin=timewin, varname=var, seacyc=seacyc, cycsmooth=91, nanalog=nanalog, seasonwin=seasonwin, distfun=distance, outformat=outformat, calccor=True, silent=False, period=[dt.strftime(refSt,'%Y-%m-%d'),dt.strftime(refEn,'%Y-%m-%d')], bbox="%s,%s,%s,%s" % (bbox[0],bbox[2],bbox[1],bbox[3])) except Exception as e: msg = 'failed to generate config file %s ' % e logger.debug(msg) raise Exception(msg) logger.debug("write_config took %s seconds.", time.time() - start_time) ####################### # CASTf90 call ####################### import subprocess import shlex start_time = time.time() # measure call castf90 self.status.set('Start CASTf90 call', 20) try: #self.status.set('execution of CASTf90', 50) cmd = 'analogue.out %s' % path.relpath(config_file) #system(cmd) args = shlex.split(cmd) output,error = subprocess.Popen(args, stdout = subprocess.PIPE, stderr= subprocess.PIPE).communicate() logger.info('analogue.out info:\n %s ' % output) logger.debug('analogue.out errors:\n %s ' % error) self.status.set('**** CASTf90 suceeded', 90) except Exception as e: msg = 'CASTf90 failed %s ' % e logger.error(msg) raise Exception(msg) logger.debug("castf90 took %s seconds.", time.time() - start_time) ######################## # generate analog viewer ######################## try: f = analogs.reformat_analogs(output_file) logger.info('analogs reformated') self.status.set('Successfully reformatted analog file', 50) # put config file into output folder config_output_path, config_output_url = analogs.copy_configfile(config_file) output_av = analogs.get_viewer(f, path.basename(config_output_path)) logger.info('Viewer generated') self.status.set('Successfully generated analogs viewer', 90) logger.info('output_av: %s ' % output_av) except Exception as e: msg = 'Failed to reformat analogs file or generate viewer%s ' % e logger.debug(msg) self.status.set('preparting output', 99) self.config.setValue( config_output_url ) #config_file ) self.analogs.setValue( output_file ) self.output_netcdf.setValue( simulation ) self.output_html.setValue( output_av ) self.status.set('execution ended', 100) logger.debug("total execution took %s seconds.", time.time() - process_start_time)
from flyingpigeon import datafetch ncs = datafetch.reanalyses(start=2000, end=2017) from ocgis import RequestDataset, OcgOperations, env from ocgis.util.large_array import compute from datetime import datetime as dt import uuid # years = range(1948,2018) # ncs = [] # for year in years: # url = 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.dailyavgs/pressure/slp.%s.nc' % (year) # ncs.extend([utils.download_file(url)]) # print ncs level_range = [700, 700] time_range = [ dt.strptime('20100315', '%Y%m%d'), dt.strptime('20111210', '%Y%m%d') ] bbox = [-80, 20, 20, 70] # TODO: BUG: ocg compute is not running if calc == None calc = '%s=%s*1' % (variable, variable) # rd = RequestDataset(ncs) ops = OcgOperations(