def execute(self): ###################### # start execution ###################### init_process_logger('log.txt') self.output_log.setValue('log.txt') from flyingpigeon import analogs as anlg from flyingpigeon import config from os.path import basename ########################################### # reorganize analog txt file for javascript # and find associated config file ########################################### # Reformat data file output by the analogs detection process so that # it can be read by the analogues viewer template. try: # Get the output csv file of analogs process (input by user in # text box) analogs = self.getInputValues(identifier='resource')[0] configfile = anlg.get_viewer_configfile(analogs) f = anlg.reformat_analogs(analogs) logger.info('Analog file reformatted') self.status.set('Successfully reformatted analog file', 50) output_av = anlg.get_viewer(f, configfile) logger.info('Viewer html page generated') self.status.set( 'Successfully generated analogs viewer html page', 90) outputUrl_path = config.outputUrl_path()
def execute(self): ###################### # start execution ###################### from flyingpigeon import analogs as anlg from flyingpigeon import config from os.path import basename ########################################### # reorganize analog txt file for javascript # and find associated config file ########################################### # Reformat data file output by the analogs detection process so that # it can be read by the analogues viewer template. try: # Get the output csv file of analogs process (input by user in # text box) analogs = self.getInputValues(identifier='resource')[0] configfile = anlg.get_viewer_configfile(analogs) f = anlg.reformat_analogs(analogs) logger.info('Analog file reformatted') self.status.set('Successfully reformatted analog file', 50) output_av = anlg.get_viewer(f, configfile) logger.info('Viewer html page generated') self.status.set('Successfully generated analogs viewer html page', 90) outputUrl_path = config.outputUrl_path() output_data = outputUrl_path + '/' + basename(f) logger.info('Data url: %s ' % output_data) logger.info('output_av: %s ' % output_av) except Exception as e: msg = 'Failed to reformat analogs file or generate viewer%s ' % e logger.debug(msg) ################################ # set the outputs ################################ self.output_txt.setValue(output_data) self.output_html.setValue(output_av)
def execute(self): ###################### # start execution ###################### from flyingpigeon import analogs as anlg from flyingpigeon import config from os.path import basename ########################################### # reorganize analog txt file for javascript # and find associated config file ########################################### # Reformat data file output by the analogs detection process so that # it can be read by the analogues viewer template. try: # Get the output csv file of analogs process (input by user in # text box) analogs = self.getInputValues(identifier='resource')[0] configfile = anlg.get_viewer_configfile(analogs) f = anlg.reformat_analogs(analogs) logger.info('Analog file reformatted') self.status.set('Successfully reformatted analog file', 50) output_av = anlg.get_viewer(f, configfile) logger.info('Viewer html page generated') self.status.set( 'Successfully generated analogs viewer html page', 90) outputUrl_path = config.outputUrl_path() output_data = outputUrl_path + '/' + basename(f) logger.info('Data url: %s ' % output_data) logger.info('output_av: %s ' % output_av) except Exception as e: msg = 'Failed to reformat analogs file or generate viewer%s ' % e logger.debug(msg) ################################ # set the outputs ################################ self.output_txt.setValue(output_data) self.output_html.setValue(output_av)
def _handler(self, request, response): init_process_logger('log.txt') response.outputs['output_log'].file = 'log.txt' ########################################### # reorganize analog txt file for javascript # and find associated config file ########################################### # Reformat data file output by the analogs detection process so that # it can be read by the analogues viewer template. try: # Get the output csv file of analogs process (input by user in # text box) analogs = rename_complexinputs(request.inputs['analog_result'])[0] # analogs = request.inputs['analog_result'][0] LOGGER.info("analogs file path %s ", analogs) configfile = "dummy.txt" # anlg.get_viewer_configfile(analogs) analogs_mod = anlg.reformat_analogs(analogs) response.outputs['output_txt'].file = analogs_mod # output_data LOGGER.info("analogs for visualisation prepared") except Exception: msg = 'Failed to reformat analogs file' LOGGER.exception(msg) raise Exception(msg) try: output_av = anlg.get_viewer(configfile=basename(configfile), datafile=basename(analogs_mod)) LOGGER.info('Viewer html page generated') response.update_status( 'Successfully generated analogs viewer html page', 90) response.outputs['output_html'].file = output_av LOGGER.info('output_av: %s ', output_av) except Exception: msg = 'Failed to generate viewer' LOGGER.exception(msg) raise Exception(msg) return response
def _handler(self, request, response): init_process_logger('log.txt') response.outputs['output_log'].file = 'log.txt' ########################################### # reorganize analog txt file for javascript # and find associated config file ########################################### # Reformat data file output by the analogs detection process so that # it can be read by the analogues viewer template. try: # Get the output csv file of analogs process (input by user in # text box) analogs = request.inputs['analog_result'][0].data configfile = anlg.get_viewer_configfile(analogs) f = anlg.reformat_analogs(analogs) LOGGER.info('Analog file reformatted') response.update_status('Successfully reformatted analog file', 50) output_av = anlg.get_viewer(f, configfile) LOGGER.info('Viewer html page generated') response.update_status( 'Successfully generated analogs viewer html page', 90) output_url = config.output_url() output_data = output_url + '/' + basename(f) LOGGER.info('Data url: %s ' % output_data) LOGGER.info('output_av: %s ' % output_av) except Exception as e: msg = 'Failed to reformat analogs file or generate viewer%s ' % e LOGGER.debug(msg) ################################ # set the outputs ################################ response.outputs['output_txt'] = output_data response.outputs['output_htm'] = output_av return response
def execute(self): init_process_logger('log.txt') self.output_log.setValue('log.txt') import time # performance test process_start_time = time.time() # measure process execution time ... from os import path from tempfile import mkstemp from datetime import datetime as dt from flyingpigeon.ocgis_module import call from flyingpigeon import analogs from flyingpigeon.datafetch import reanalyses self.status.set('execution started at : %s ' % dt.now(), 5) start_time = time.time() # measure init ... ####################### # read input parameters ####################### try: self.status.set('read input parameter : %s ' % dt.now(), 5) refSt = self.getInputValues(identifier='refSt') refEn = self.getInputValues(identifier='refEn') dateSt = self.getInputValues(identifier='dateSt') dateEn = self.getInputValues(identifier='dateEn') seasonwin = int(self.getInputValues(identifier='seasonwin')[0]) nanalog = int(self.getInputValues(identifier='nanalog')[0]) bbox_obj = self.BBox.getValue() normalize = self.getInputValues(identifier='normalize')[0] distance = self.getInputValues(identifier='dist')[0] outformat = self.getInputValues(identifier='outformat')[0] timewin = int(self.getInputValues(identifier='timewin')[0]) experiment = self.getInputValues(identifier='experiment')[0] logger.info('input parameters set') self.status.set('Read in and convert the arguments', 5) except Exception as e: msg = 'failed to read input prameter %s ' % e logger.error(msg) raise Exception(msg) ###################################### # convert types and set environment ###################################### try: self.status.set('Preparing enviroment converting arguments', 7) refSt = dt.strptime(refSt[0], '%Y-%m-%d') refEn = dt.strptime(refEn[0], '%Y-%m-%d') dateSt = dt.strptime(dateSt[0], '%Y-%m-%d') dateEn = dt.strptime(dateEn[0], '%Y-%m-%d') if normalize == 'None': seacyc = False else: seacyc = True if outformat == 'ascii': outformat = '.txt' elif outformat == 'netCDF': outformat = '.nc' else: logger.error('output format not valid') start = min(refSt, dateSt) end = max(refEn, dateEn) if bbox_obj is not None: logger.info("bbox_obj={0}".format(bbox_obj.coords)) bbox = [bbox_obj.coords[0][0], bbox_obj.coords[0][1], bbox_obj.coords[1][0], bbox_obj.coords[1][1]] logger.info("bbox={0}".format(bbox)) else: bbox = None # region = self.getInputValues(identifier='region')[0] # bbox = [float(b) for b in region.split(',')] dataset, var = experiment.split('_') logger.info('environment set') except Exception as e: msg = 'failed to set environment %s ' % e logger.error(msg) raise Exception(msg) try: if dataset == 'NCEP': if 'z' in var: variable = 'hgt' level = var.strip('z') # conform_units_to=None else: variable = 'slp' level = None # conform_units_to='hPa' elif '20CRV2' in var: if 'z' in level: variable = 'hgt' level = var.strip('z') # conform_units_to=None else: variable = 'prmsl' level = None # conform_units_to='hPa' else: logger.error('Reanalyses dataset not known') logger.info('environment set') except Exception as e: msg = 'failed to set environment %s ' % e logger.error(msg) raise Exception(msg) logger.debug("init took %s seconds.", time.time() - start_time) self.status.set('Read in and convert the arguments done', 8) ################# # get input data ################# start_time = time.time() # measure get_input_data ... self.status.set('fetching input data', 7) try: input = reanalyses(start=start.year, end=end.year, variable=var, dataset=dataset) logger.info('input files %s' % input) nc_subset = call(resource=input, variable=var, geom=bbox, spatial_wrapping='wrap') except Exception as e: msg = 'failed to fetch or subset input files %s' % e logger.error(msg) raise Exception(msg) logger.debug("get_input_subset_dataset took %s seconds.", time.time() - start_time) self.status.set('**** Input data fetched', 10) ######################## # input data preperation ######################## self.status.set('Start preparing input data', 12) start_time = time.time() # measure data preperation ... try: # Construct descriptive filenames for the three files # listed in config file refDatesString = dt.strftime(refSt, '%Y-%m-%d') + "_" + dt.strftime(refEn, '%Y-%m-%d') simDatesString = dt.strftime(dateSt, '%Y-%m-%d') + "_" + dt.strftime(dateEn, '%Y-%m-%d') archiveNameString = "base_" + var + "_" + refDatesString + '_%.1f_%.1f_%.1f_%.1f' \ % (bbox[0], bbox[2], bbox[1], bbox[3]) simNameString = "sim_" + var + "_" + simDatesString + '_%.1f_%.1f_%.1f_%.1f' \ % (bbox[0], bbox[2], bbox[1], bbox[3]) archive = call(resource=nc_subset, time_range=[refSt, refEn], prefix=archiveNameString) simulation = call(resource=nc_subset, time_range=[dateSt, dateEn], prefix=simNameString) logger.info('archive and simulation files generated: %s, %s' % (archive, simulation)) except Exception as e: msg = 'failed to prepare archive and simulation files %s ' % e logger.debug(msg) raise Exception(msg) try: if seacyc is True: logger.info('normalization function with method: %s ' % normalize) seasoncyc_base, seasoncyc_sim = analogs.seacyc( archive, simulation, method=normalize) else: seasoncyc_base = seasoncyc_sim = None except Exception as e: msg = 'failed to generate normalization files %s ' % e logger.debug(msg) raise Exception(msg) ip, output_file = mkstemp(dir='.', suffix='.txt') files = [path.abspath(archive), path.abspath(simulation), output_file] logger.debug("Data preperation took %s seconds.", time.time() - start_time) ############################ # generate the config file ############################ self.status.set('writing config file', 15) start_time = time.time() # measure write config ... try: config_file = analogs.get_configfile( files=files, seasoncyc_base=seasoncyc_base, seasoncyc_sim=seasoncyc_sim, timewin=timewin, varname=var, seacyc=seacyc, cycsmooth=91, nanalog=nanalog, seasonwin=seasonwin, distfun=distance, outformat=outformat, calccor=True, silent=False, period=[dt.strftime(refSt, '%Y-%m-%d'), dt.strftime(refEn, '%Y-%m-%d')], bbox="%s,%s,%s,%s" % (bbox[0], bbox[2], bbox[1], bbox[3])) except Exception as e: msg = 'failed to generate config file %s ' % e logger.debug(msg) raise Exception(msg) logger.debug("write_config took %s seconds.", time.time() - start_time) ####################### # CASTf90 call ####################### import subprocess import shlex start_time = time.time() # measure call castf90 self.status.set('Start CASTf90 call', 20) try: # self.status.set('execution of CASTf90', 50) cmd = 'analogue.out %s' % path.relpath(config_file) # system(cmd) args = shlex.split(cmd) output, error = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE ).communicate() logger.info('analogue.out info:\n %s ' % output) logger.debug('analogue.out errors:\n %s ' % error) self.status.set('**** CASTf90 suceeded', 90) except Exception as e: msg = 'CASTf90 failed %s ' % e logger.error(msg) raise Exception(msg) logger.debug("castf90 took %s seconds.", time.time() - start_time) ######################## # generate analog viewer ######################## try: f = analogs.reformat_analogs(output_file) logger.info('analogs reformated') self.status.set('Successfully reformatted analog file', 50) # put config file into output folder config_output_path, config_output_url = analogs.copy_configfile( config_file ) output_av = analogs.get_viewer( f, path.basename(config_output_path)) logger.info('Viewer generated') self.status.set('Successfully generated analogs viewer', 90) logger.info('output_av: %s ' % output_av) except Exception as e: msg = 'Failed to reformat analogs file or generate viewer%s ' % e logger.debug(msg) self.status.set('preparting output', 99) self.config.setValue(config_output_url) # config_file ) self.analogs.setValue(output_file) self.output_netcdf.setValue(simulation) self.output_html.setValue(output_av) self.status.set('execution ended', 100) logger.debug("total execution took %s seconds.", time.time() - process_start_time)
def _handler(self, request, response): init_process_logger('log.txt') response.outputs['output_log'].file = 'log.txt' LOGGER.info('Start process') response.update_status('execution started at : {}'.format(dt.now()), 5) process_start_time = time.time() # measure process execution time ... start_time = time.time() # measure init ... ################################ # reading in the input arguments ################################ try: response.update_status('read input parameter : %s ' % dt.now(), 5) refSt = request.inputs['refSt'][0].data refEn = request.inputs['refEn'][0].data dateSt = request.inputs['dateSt'][0].data dateEn = request.inputs['dateEn'][0].data seasonwin = request.inputs['seasonwin'][0].data nanalog = request.inputs['nanalog'][0].data bbox = [-80, 20, 50, 70] # if bbox_obj is not None: # LOGGER.info("bbox_obj={0}".format(bbox_obj.coords)) # bbox = [bbox_obj.coords[0][0], # bbox_obj.coords[0][1], # bbox_obj.coords[1][0], # bbox_obj.coords[1][1]] # LOGGER.info("bbox={0}".format(bbox)) # else: # bbox = None # region = self.getInputValues(identifier='region')[0] # bbox = [float(b) for b in region.split(',')] # bbox_obj = self.BBox.getValue() normalize = request.inputs['normalize'][0].data distance = request.inputs['dist'][0].data outformat = request.inputs['outformat'][0].data timewin = request.inputs['timewin'][0].data model_var = request.inputs['reanalyses'][0].data model, var = model_var.split('_') # experiment = self.getInputValues(identifier='experiment')[0] # dataset, var = experiment.split('_') # LOGGER.info('environment set') LOGGER.info('input parameters set') response.update_status('Read in and convert the arguments', 5) except Exception as e: msg = 'failed to read input prameter %s ' % e LOGGER.error(msg) raise Exception(msg) ###################################### # convert types and set environment ###################################### try: response.update_status('Preparing enviroment converting arguments', 7) LOGGER.debug('date: %s %s %s %s ' % (type(refSt), refEn, dateSt, dateSt)) start = min(refSt, dateSt) end = max(refEn, dateEn) # # refSt = dt.strftime(refSt, '%Y-%m-%d') # refEn = dt.strftime(refEn, '%Y-%m-%d') # dateSt = dt.strftime(dateSt, '%Y-%m-%d') # dateEn = dt.strftime(dateEn, '%Y-%m-%d') if normalize == 'None': seacyc = False else: seacyc = True if outformat == 'ascii': outformat = '.txt' elif outformat == 'netCDF': outformat = '.nc' else: LOGGER.error('output format not valid') except Exception as e: msg = 'failed to set environment %s ' % e LOGGER.error(msg) raise Exception(msg) ########################### # set the environment ########################### response.update_status('fetching data from archive', 10) try: if model == 'NCEP': if 'z' in var: level = var.strip('z') conform_units_to = None else: level = None conform_units_to = 'hPa' elif '20CRV2' in model: if 'z' in var: level = var.strip('z') conform_units_to = None else: level = None conform_units_to = 'hPa' else: LOGGER.error('Reanalyses dataset not known') LOGGER.info('environment set for model: %s' % model) except: msg = 'failed to set environment' LOGGER.exception(msg) raise Exception(msg) ########################################## # fetch Data from original data archive ########################################## try: model_nc = rl(start=start.year, end=end.year, dataset=model, variable=var) LOGGER.info('reanalyses data fetched') except: msg = 'failed to get reanalyses data' LOGGER.exception(msg) raise Exception(msg) response.update_status('subsetting region of interest', 17) # from flyingpigeon.weatherregimes import get_level LOGGER.debug("start and end time: %s - %s" % (start, end)) time_range = [start, end] model_subset = call(resource=model_nc, variable=var, geom=bbox, spatial_wrapping='wrap', time_range=time_range, # conform_units_to=conform_units_to ) LOGGER.info('Dataset subset done: %s ', model_subset) response.update_status('dataset subsetted', 19) ############################################################ # get the required bbox and time region from resource data ############################################################ # # # try: # if dataset == 'NCEP': # if 'z' in var: # variable = 'hgt' # level = var.strip('z') # # conform_units_to=None # else: # variable = 'slp' # level = None # # conform_units_to='hPa' # elif '20CRV2' in var: # if 'z' in level: # variable = 'hgt' # level = var.strip('z') # # conform_units_to=None # else: # variable = 'prmsl' # level = None # # conform_units_to='hPa' # else: # LOGGER.error('Reanalyses dataset not known') # LOGGER.info('environment set') # except Exception as e: # msg = 'failed to set environment %s ' % e # LOGGER.error(msg) # raise Exception(msg) # # LOGGER.debug("init took %s seconds.", time.time() - start_time) # response.update_status('Read in and convert the arguments done', 8) # # ################# # # get input data # ################# # start_time = time.time() # measure get_input_data ... # response.update_status('fetching input data', 7) # try: # input = reanalyses(start=start.year, end=end.year, # variable=var, dataset=dataset) # LOGGER.info('input files %s' % input) # nc_subset = call(resource=input, variable=var, # geom=bbox, spatial_wrapping='wrap') # except Exception as e: # msg = 'failed to fetch or subset input files %s' % e # LOGGER.error(msg) # raise Exception(msg) LOGGER.debug("get_input_subset_dataset took %s seconds.", time.time() - start_time) response.update_status('**** Input data fetched', 10) ######################## # input data preperation ######################## response.update_status('Start preparing input data', 12) start_time = time.time() # measure data preperation ... try: # Construct descriptive filenames for the three files # listed in config file refDatesString = dt.strftime(refSt, '%Y-%m-%d') + "_" + dt.strftime(refEn, '%Y-%m-%d') simDatesString = dt.strftime(dateSt, '%Y-%m-%d') + "_" + dt.strftime(dateEn, '%Y-%m-%d') archiveNameString = "base_" + var + "_" + refDatesString + '_%.1f_%.1f_%.1f_%.1f' \ % (bbox[0], bbox[2], bbox[1], bbox[3]) simNameString = "sim_" + var + "_" + simDatesString + '_%.1f_%.1f_%.1f_%.1f' \ % (bbox[0], bbox[2], bbox[1], bbox[3]) archive = call(resource=model_subset, time_range=[refSt, refEn], prefix=archiveNameString) simulation = call(resource=model_subset, time_range=[dateSt, dateEn], prefix=simNameString) LOGGER.info('archive and simulation files generated: %s, %s' % (archive, simulation)) except Exception as e: msg = 'failed to prepare archive and simulation files %s ' % e LOGGER.debug(msg) raise Exception(msg) try: if seacyc is True: LOGGER.info('normalization function with method: %s ' % normalize) seasoncyc_base, seasoncyc_sim = analogs.seacyc( archive, simulation, method=normalize) else: seasoncyc_base = seasoncyc_sim = None except Exception as e: msg = 'failed to generate normalization files %s ' % e LOGGER.debug(msg) raise Exception(msg) ip, output_file = mkstemp(dir='.', suffix='.txt') files = [path.abspath(archive), path.abspath(simulation), output_file] LOGGER.debug("Data preperation took %s seconds.", time.time() - start_time) ############################ # generate the config file ############################ response.update_status('writing config file', 15) start_time = time.time() # measure write config ... try: config_file = analogs.get_configfile( files=files, seasoncyc_base=seasoncyc_base, seasoncyc_sim=seasoncyc_sim, timewin=timewin, varname=var, seacyc=seacyc, cycsmooth=91, nanalog=nanalog, seasonwin=seasonwin, distfun=distance, outformat=outformat, calccor=True, silent=False, period=[dt.strftime(refSt, '%Y-%m-%d'), dt.strftime(refEn, '%Y-%m-%d')], bbox="%s,%s,%s,%s" % (bbox[0], bbox[2], bbox[1], bbox[3])) except Exception as e: msg = 'failed to generate config file %s ' % e LOGGER.debug(msg) raise Exception(msg) LOGGER.debug("write_config took %s seconds.", time.time() - start_time) ####################### # CASTf90 call ####################### start_time = time.time() # measure call castf90 response.update_status('Start CASTf90 call', 20) try: # response.update_status('execution of CASTf90', 50) cmd = ['analogue.out', path.relpath(config_file)] LOGGER.debug("castf90 command: %s", cmd) output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) LOGGER.info('analogue output:\n %s', output) response.update_status('**** CASTf90 suceeded', 90) except CalledProcessError as e: msg = 'CASTf90 failed:\n{0}'.format(e.output) LOGGER.error(msg) raise Exception(msg) LOGGER.debug("castf90 took %s seconds.", time.time() - start_time) ######################## # generate analog viewer ######################## response.update_status('preparting output', 50) response.outputs['config'].file = config_file response.outputs['analogs'].file = output_file response.outputs['output_netcdf'].file = simulation try: formated_analogs_file = analogs.reformat_analogs(output_file) response.outputs['formated_analogs'].file = formated_analogs_file LOGGER.info('analogs reformated') response.update_status('Successfully reformatted analog file', 60) except Exception as e: msg = 'Failed to reformat analogs file.' % e LOGGER.error(msg) raise Exception(msg) try: output_av = analogs.get_viewer( formated_analogs_file, path.basename(config_file)) response.outputs['output_html'].file = output_av.name response.update_status('Successfully generated analogs viewer', 90) LOGGER.info('output_av: %s ', output_av) except Exception as e: msg = 'Failed to generate viewer: %s' % e LOGGER.error(msg) raise Exception(msg) response.update_status('execution ended', 100) LOGGER.debug("total execution took %s seconds.", time.time() - process_start_time) return response
def execute(self): import time # performance test process_start_time = time.time() # measure process execution time ... from os import path from tempfile import mkstemp from datetime import datetime as dt from flyingpigeon.ocgis_module import call from flyingpigeon import analogs from flyingpigeon.datafetch import reanalyses self.status.set('execution started at : %s ' % dt.now(),5) start_time = time.time() # measure init ... ####################### ### read input parameters ####################### try: self.status.set('read input parameter : %s ' % dt.now(),5) refSt = self.getInputValues(identifier='refSt') refEn = self.getInputValues(identifier='refEn') dateSt = self.getInputValues(identifier='dateSt') dateEn = self.getInputValues(identifier='dateEn') seasonwin = int(self.getInputValues(identifier='seasonwin')[0]) nanalog = int(self.getInputValues(identifier='nanalog')[0]) bbox_obj = self.BBox.getValue() normalize = self.getInputValues(identifier='normalize')[0] distance = self.getInputValues(identifier='dist')[0] outformat = self.getInputValues(identifier='outformat')[0] timewin = int(self.getInputValues(identifier='timewin')[0]) experiment = self.getInputValues(identifier='experiment')[0] logger.info('input parameters set') self.status.set('Read in and convert the arguments', 5) except Exception as e: msg = 'failed to read input prameter %s ' % e logger.error(msg) raise Exception(msg) ###################################### ### convert types and set environment ###################################### try: self.status.set('Start preparing enviroment converting arguments', 7) refSt = dt.strptime(refSt[0],'%Y-%m-%d') refEn = dt.strptime(refEn[0],'%Y-%m-%d') dateSt = dt.strptime(dateSt[0],'%Y-%m-%d') dateEn = dt.strptime(dateEn[0],'%Y-%m-%d') if normalize == 'None': seacyc = False else: seacyc = True if outformat == 'ascii': outformat = '.txt' elif outformat == 'netCDF': outformat = '.nc' else: logger.error('output format not valid') start = min( refSt, dateSt ) end = max( refEn, dateEn ) if bbox_obj is not None: logger.info("bbox_obj={0}".format(bbox_obj.coords)) bbox = [bbox_obj.coords[0][0], bbox_obj.coords[0][1],bbox_obj.coords[1][0],bbox_obj.coords[1][1]] logger.info("bbox={0}".format(bbox)) else: bbox=None # region = self.getInputValues(identifier='region')[0] # bbox = [float(b) for b in region.split(',')] dataset , var = experiment.split('_') logger.info('environment set') except Exception as e: msg = 'failed to set environment %s ' % e logger.error(msg) raise Exception(msg) try: if dataset == 'NCEP': if 'z' in var: variable='hgt' level=var.strip('z') #conform_units_to=None else: variable='slp' level=None #conform_units_to='hPa' elif '20CRV2' in var: if 'z' in level: variable='hgt' level=var.strip('z') #conform_units_to=None else: variable='prmsl' level=None #conform_units_to='hPa' else: logger.error('Reanalyses dataset not known') logger.info('environment set') except Exception as e: msg = 'failed to set environment %s ' % e logger.error(msg) raise Exception(msg) logger.debug("init took %s seconds.", time.time() - start_time) self.status.set('Read in and convert the arguments done', 8) ################# # get input data ################# start_time = time.time() # measure get_input_data ... self.status.set('fetching input data', 7) try: input = reanalyses(start = start.year, end = end.year, variable=var, dataset=dataset) logger.info('input files %s' % input) nc_subset = call(resource=input, variable=var, geom=bbox, spatial_wrapping='wrap') except Exception as e : msg = 'failed to fetch or subset input files %s' % e logger.error(msg) raise Exception(msg) logger.debug("get_input_subset_dataset took %s seconds.", time.time() - start_time) self.status.set('**** Input data fetched', 10) ######################## # input data preperation ######################## self.status.set('Start preparing input data', 12) start_time = time.time() # measure data preperation ... try: #Construct descriptive filenames for the three files listed in config file refDatesString = dt.strftime(refSt,'%Y-%m-%d') + "_" + dt.strftime(refEn,'%Y-%m-%d') simDatesString = dt.strftime(dateSt,'%Y-%m-%d') + "_" + dt.strftime(dateEn,'%Y-%m-%d') archiveNameString = "base_" + var +"_" + refDatesString + '_%.1f_%.1f_%.1f_%.1f' % (bbox[0], bbox[2], bbox[1], bbox[3]) simNameString = "sim_" + var +"_" + simDatesString + '_%.1f_%.1f_%.1f_%.1f' % (bbox[0], bbox[2], bbox[1], bbox[3]) archive = call(resource=nc_subset, time_range=[refSt , refEn], prefix=archiveNameString) simulation = call(resource=nc_subset, time_range=[dateSt , dateEn], prefix=simNameString) logger.info('archive and simulation files generated: %s, %s' % (archive, simulation)) except Exception as e: msg = 'failed to prepare archive and simulation files %s ' % e logger.debug(msg) raise Exception(msg) try: if seacyc == True: logger.info('normalization function with method: %s ' % normalize) seasoncyc_base, seasoncyc_sim = analogs.seacyc(archive, simulation, method=normalize) else: seasoncyc_base = seasoncyc_sim = None except Exception as e: msg = 'failed to generate normalization files %s ' % e logger.debug(msg) raise Exception(msg) ip, output_file = mkstemp(dir='.',suffix='.txt') # ======= # #Create an empty config with with random name # ip, output = mkstemp(dir='.', suffix='.txt') # #Rename random name of config file to more descriptive string # import os # anlgname = "ana_" + var + "_" + distance + "_sim_" + simDatesString + "_ref_" + refDatesString + '_%.1f_%.1f_%.1f_%.1f_seasonwin%ddays_%danalogs.txt' % (bbox[0], bbox[2], bbox[1], bbox[3], seasonwin, nanalog) #+ seasonwin # os.rename(output,anlgname) # #Put config file in temporary working dir # tmppath = os.path.dirname(output) # output_file = os.path.join(tmppath, anlgname) # #Put all three files with their paths in array # >>>>>>> analogs detn gives descriptive names to files in config file files=[path.abspath(archive), path.abspath(simulation), output_file] logger.debug("Data preperation took %s seconds.", time.time() - start_time) ############################ # generate the config file ############################ self.status.set('writing config file', 15) start_time = time.time() # measure write config ... try: config_file = analogs.get_configfile( files=files, seasoncyc_base = seasoncyc_base, seasoncyc_sim=seasoncyc_sim, timewin=timewin, varname=var, seacyc=seacyc, cycsmooth=91, nanalog=nanalog, seasonwin=seasonwin, distfun=distance, outformat=outformat, calccor=True, silent=False, period=[dt.strftime(refSt,'%Y-%m-%d'),dt.strftime(refEn,'%Y-%m-%d')], bbox="%s,%s,%s,%s" % (bbox[0],bbox[2],bbox[1],bbox[3])) except Exception as e: msg = 'failed to generate config file %s ' % e logger.debug(msg) raise Exception(msg) logger.debug("write_config took %s seconds.", time.time() - start_time) ####################### # CASTf90 call ####################### import subprocess import shlex start_time = time.time() # measure call castf90 self.status.set('Start CASTf90 call', 20) try: #self.status.set('execution of CASTf90', 50) cmd = 'analogue.out %s' % path.relpath(config_file) #system(cmd) args = shlex.split(cmd) output,error = subprocess.Popen(args, stdout = subprocess.PIPE, stderr= subprocess.PIPE).communicate() logger.info('analogue.out info:\n %s ' % output) logger.debug('analogue.out errors:\n %s ' % error) self.status.set('**** CASTf90 suceeded', 90) except Exception as e: msg = 'CASTf90 failed %s ' % e logger.error(msg) raise Exception(msg) logger.debug("castf90 took %s seconds.", time.time() - start_time) ######################## # generate analog viewer ######################## try: f = analogs.reformat_analogs(output_file) logger.info('analogs reformated') self.status.set('Successfully reformatted analog file', 50) # put config file into output folder config_output_path, config_output_url = analogs.copy_configfile(config_file) output_av = analogs.get_viewer(f, path.basename(config_output_path)) logger.info('Viewer generated') self.status.set('Successfully generated analogs viewer', 90) logger.info('output_av: %s ' % output_av) except Exception as e: msg = 'Failed to reformat analogs file or generate viewer%s ' % e logger.debug(msg) self.status.set('preparting output', 99) self.config.setValue( config_output_url ) #config_file ) self.analogs.setValue( output_file ) self.output_netcdf.setValue( simulation ) self.output_html.setValue( output_av ) self.status.set('execution ended', 100) logger.debug("total execution took %s seconds.", time.time() - process_start_time)