def parseImageParametersBackground(request,session): picnicsession.updateSessionComment(session,'setup') if 'custom_display' in request.params and request.params.getone('custom_display')!='default': import hsrl.utils.json_config as jc disp=jc.json_config(picnicsession.loadjson(session,'display_parameters.json'))#session['display_defaults']) else: import hsrl.data_stream.display_utilities as du (disp,conf)=du.get_display_defaults(session['display_defaults_file']) #else:#fixme should this be enable_all()? # (disp,conf)=du.get_display_defaults('web_plots.json') allfigs=session['figstocapture']==None if not allfigs: for k,v in session['figstocapture'].items(): if None in v: allfigs=True break if not allfigs: # None indicates all should be captured, so if its not, scan structure data_req='images' lib_filetype='data' for fi in disp.get_attrs(): # for each figure if 'enable' in disp.get_labels(fi): # if it can be enabled/disabled disp.set_value(fi,'enable',0) for inst,figset in session['figstocapture'].items(): for fi in disp.get_attrs(): # for each figure if 'enable' in disp.get_labels(fi): # if it can be enabled/disabled if fi in figset or ('#'+fi) in figset: #if requested, enable it disp.set_value(fi,'enable',1) if not fi.endswith('_image') and inst=='hsrl': data_req='images housekeeping' lib_filetype=None elif ('-'+fi) in figset or ('-#'+fi) in figset:#if explicitly disabled, disable it disp.set_value(fi,'enable',0) else: if session['figstocapture']!=None: for inst,figset in session['figstocapture'].items(): for fi in disp.get_attrs(): # for each figure if 'enable' in disp.get_labels(fi): # if it can be enabled/disabled if ('-'+fi) in figset or ('-#'+fi) in figset:#if explicitly disabled, disable it disp.set_value(fi,'enable',0) data_req= 'images housekeeping' lib_filetype=None picnicsession.storejson(session,disp.json_representation(),'display_parameters.json') if 'data_request' not in session: session['data_request']=data_req if 'lib_filetype' not in session: session['lib_filetype']=lib_filetype picnicsession.storesession(session)
def reimagerequest(request): #print 'URLREQ: ',request.matched_route.name sessionid=request.matchdict['session']#request.session.get_csrf_token();#params.getone('csrf_token') #folder=picnicsession.sessionfolder(sessionid); session=picnicsession.loadsession(sessionid) if True: oldsessionid=sessionid pysession=request.session sessionid=pysession.new_csrf_token() session['sessionid']=sessionid session['finalpage']=request.route_path('imageresult',session=sessionid); for f in ('display_parameters.json','process_parameters.json'): j=picnicsession.loadjson(oldsessionid,f,failvalue=None) if j!=None: picnicsession.storejson(sessionid,j,f) return picnicsession.newSessionProcess("createimages",request,session)
def reimagerequest(request): #print 'URLREQ: ',request.matched_route.name sessionid = request.matchdict[ 'session'] #request.session.get_csrf_token();#params.getone('csrf_token') #folder=picnicsession.sessionfolder(sessionid); session = picnicsession.loadsession(sessionid) if True: oldsessionid = sessionid pysession = request.session sessionid = pysession.new_csrf_token() session['sessionid'] = sessionid session['finalpage'] = request.route_path('imageresult', session=sessionid) for f in ('display_parameters.json', 'process_parameters.json'): j = picnicsession.loadjson(oldsessionid, f, failvalue=None) if j != None: picnicsession.storejson(sessionid, j, f) return picnicsession.newSessionProcess("createimages", request, session)
def parseImageParameters(request, session): methods = ['site', 'dataset', 'instrument'] for m in methods: if m not in request.params: continue method = m session['method'] = method try: session[method] = int(request.params.getone(method)) session['methodkey'] = int(request.params.getone(method)) except: session[method] = request.params.getone(method) session['methodkey'] = request.params.getone(method) break starttime = datetime(int(request.params.getone('byr')), int(request.params.getone('bmo')), int(request.params.getone('bdy')), int(request.params.getone('bhr')), int(request.params.getone('bmn')), 0) endtime = datetime(int(request.params.getone('eyr')), int(request.params.getone('emo')), int(request.params.getone('edy')), int(request.params.getone('ehr')), int(request.params.getone('emn')), 0) session['altmin'] = float(request.params.getone('lheight')) * 1000 session['altmax'] = float(request.params.getone('height')) * 1000 session['starttime'] = starttime.strftime(picnicsession.json_dateformat) session['endtime'] = endtime.strftime(picnicsession.json_dateformat) #contstruct dpl datinfo = lib(**{method: session[method]}) instruments = datinfo['Instruments'] name = datinfo['Name'] datasetname = instruments[0].lower() #print figstocapture datasets = [] for inst in instruments: datasets.extend(lib.instrument(inst)['datasets']) #dplc=dpl_rti(datasetname,starttime,endtime,timedelta(seconds=timeres),endtime-starttime,altmin,altmax,altres);#alt in m #construct image generation parameters session['dataset'] = datasetname session['name'] = name if 'custom_processing' in request.params and request.params.getone( 'custom_processing') != 'default': cust = request.params.getone('custom_processing') if cust == 'custom': try: pd = request.params.getone('process_parameters_content') pdc = pd.file.read() d = json.loads(pdc) #.file.read()) except: traceback.format_exc() return HTTPBadRequest() else: d = server_archive.get_archived_json( request.params.getone('custom_processing_token'), cust) #print 'Storing custom process parameters ',request.params.getone('process_parameters_content') picnicsession.storejson(session, d, 'process_parameters.json') #return HTTPTemporaryRedirect(location=request.route_path('progress_withid',session=sessionid)) if 'custom_display' in request.params and request.params.getone( 'custom_display') != 'default': cust = request.params.getone('custom_display') if cust == 'custom': pd = request.params.getone('display_defaults_content') print 'Storing custom image parameters to', session[ 'sessionid'], 'display_parameters.json' try: d = json.loads(pd.file.read()) except: traceback.format_exc() return HTTPBadRequest() else: d = server_archive.get_archived_json( request.params.getone('custom_display_token'), cust) picnicsession.storejson(session, d, 'display_parameters.json') session['figstocapture'] = None getdatasets = datasets imagesetlist = jsgen.formsetsForInstruments(datasets, 'images') session['figrequest'] = {} for i in imagesetlist: session['figrequest'][i['formname']] = 'custom' elif 'display_defaults_file' in request.params: session['display_defaults_file'] = request.params.getone( 'display_defaults_file') if os.path.sep in session['display_defaults_file']: session['display_defaults_file'] = picnicsession.sessionfile( session, session['display_defaults_file']) session['figstocapture'] = None getdatasets = datasets imagesetlist = jsgen.formsetsForInstruments(datasets, 'images') session['figrequest'] = {} for i in imagesetlist: session['figrequest'][i['formname']] = 'custom' else: session['display_defaults_file'] = 'all_plots.json' imagesetlist = jsgen.formsetsForInstruments(datasets, 'images') getdatasets = [] figstocapture = {} session['figrequest'] = {} for i in imagesetlist: #print i try: setmode = request.params.getone(i['formname']) session['figrequest'][i['formname']] = setmode figstocapture[i['setenum']] = i['sets'][setmode]['figs'] if len(i['sets'][setmode]['figs']) > 0: #radio buttons if 'enabled' in i['sets'][setmode]: for dat in i['sets'][setmode]['enabled']: if dat not in getdatasets: getdatasets.append(dat) if 'required' in i['sets'][setmode]: for dat in i['sets'][setmode]['required']: if dat not in getdatasets: getdatasets.append(dat) if "options" in i and len( i["options"] ) > 0: #checkboxes only currently, may extend to choicebox for opt in i["options"]: if opt["formname"] in request.params and request.params.getone( opt['formname']): if 'enabled' in opt: for dat in opt['enabled']: if dat not in getdatasets: getdatasets.append(dat) if 'required' in opt: for dat in opt['required']: if dat not in getdatasets: getdatasets.append(dat) figstocapture[i['setenum']].extend(opt['included']) else: for f in opt['included']: figstocapture[i['setenum']].append('-' + f) except: pass session['figstocapture'] = figstocapture session['datastreams'] = getdatasets #if None in session['figstocapture']: picnicsession.storesession(session)
def makeDPLFromSession(session, doSearch=True): copyToInit = { 'dataset': 'instrument', 'maxtimeslice': 'maxtimeslice_timedelta', 'data_request': 'data_request', 'lib_filetype': 'filetype', } copyToSearch = { 'starttime': 'start_time_datetime', 'endtime': 'end_time_datetime', 'altmin': 'min_alt_m', 'altmax': 'max_alt_m', 'timeres': 'timeres_timedelta', 'altres': 'altres_m', } hasProgress = False from hsrl.dpl_experimental.dpl_hsrl import dpl_hsrl process_control = None if os.access(picnicsession.sessionfile(session, 'process_parameters.json'), os.R_OK): process_control = picnicsession.loadjson(session, 'process_parameters.json') import hsrl.utils.json_config as jc process_control = jc.json_config(process_control, default_key='process_defaults') dplobj = dpl_hsrl(process_control=process_control, **fromSession(session, copyToInit)) if not doSearch: return dplobj, fromSession(session, copyToSearch) searchparms = fromSession(session, copyToSearch) #try: # import hsrl.utils.threaded_generator # dplc=hsrl.utils.threaded_generator.threaded_generator(dplobj,**searchparms) #except: hsrlnar = dplobj(**searchparms) dplc = hsrlnar if 'merge' in session['datastreams']: #add merge to rs_mmcr, refit import hsrl.dpl_tools.time_frame as time_slicing import hsrl.dpl_tools.resample_altitude as altitude_resampling import hsrl.dpl_tools.substruct as frame_substruct from hsrl.dpl_netcdf.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper import hsrl.dpl_netcdf.MMCRMergeLibrarian as mmcr import hsrl.utils.hsrl_array_utils as hau if session['dataset'] == 'ahsrl': mmcrzoo = GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge') mmcrlib = mmcr.MMCRMergeLibrarian( session['dataset'], ['eurmmcrmerge.C1.c1.', 'nsaarscl1clothC1.c1.'], zoo=mmcrzoo) elif session['dataset'] == 'mf2hsrl': pass #set up zoo and lib for mf2 mmcrnar = mmcr.MMCRMergeCorrector( mmcrlib(start=searchparms['start_time_datetime'], end=searchparms['start_time_datetime'])) mmcrnar = mmcr.MMCRMergeBackscatterToReflectivity( altitude_resampling.ResampleXd( time_slicing.TimeGinsu(mmcrnar, 'times'), 'heights', dplc.altitudeAxis)) hsrlnarsplitter = frame_substruct.SubstructBrancher(hsrlnar) hsrlinvnar = time_slicing.TimeGinsu( hsrlnarsplitter.narrateSubstruct('rs_inv'), 'times') #,isEnd=True) from dplkit.simple.blender import TimeInterpolatedMerge merge = TimeInterpolatedMerge(hsrlinvnar, [mmcrnar], allow_nans=True, channels=[ 'heights', 'Reflectivity', 'MeanDopplerVelocity', 'Backscatter', 'SpectralWidth' ]) merge = frame_substruct.Retyper(merge, hau.Time_Z_Group, { 'timevarname': 'times', 'altname': 'heights' }) dplc = frame_substruct.SubstructMerger( 'rs_inv', { 'rs_mean': hsrlnarsplitter.narrateSubstruct('rs_mean'), 'rs_raw': hsrlnarsplitter.narrateSubstruct('rs_raw'), 'rs_inv': hsrlnarsplitter.narrateSubstruct('rs_inv'), 'rs_mmcr': merge, 'rs_init': hsrlnarsplitter.narrateSubstruct('rs_init'), 'rs_static': hsrlnarsplitter.narrateSubstruct('rs_static'), 'profiles': hsrlnarsplitter.narrateSubstruct('profiles', sparse=True), 'rs_Cxx': hsrlnarsplitter.narrateSubstruct('rs_Cxx', sparse=True) }, hau.Time_Z_Group) #merge=picnicsession.PicnicProgressNarrator(dplc,getLastOf('start'), searchparms['start_time_datetime'],searchparms['end_time_datetime'],session) #hasProgress=True if not os.access( picnicsession.sessionfile(session, 'process_parameters.json'), os.R_OK): picnicsession.storejson( session, hsrlnar.get_process_control().json_representation(), 'process_parameters.json') picnicsession.updateSessionComment(session, 'processing with DPL') if hasProgress: return dplc return picnicsession.PicnicProgressNarrator( dplc, getLastOf('times', ['rs_inv', 'rs_mean', 'rs_raw']), searchparms['start_time_datetime'], searchparms['end_time_datetime'], session)
def parseImageParametersBackground(request, session): picnicsession.updateSessionComment(session, 'setup') if 'custom_display' in request.params and request.params.getone( 'custom_display') != 'default': import hsrl.utils.json_config as jc disp = jc.json_config( picnicsession.loadjson( session, 'display_parameters.json')) #session['display_defaults']) else: import hsrl.data_stream.display_utilities as du (disp, conf) = du.get_display_defaults(session['display_defaults_file']) #else:#fixme should this be enable_all()? # (disp,conf)=du.get_display_defaults('web_plots.json') allfigs = session['figstocapture'] == None if not allfigs: for k, v in session['figstocapture'].items(): if None in v: allfigs = True break if not allfigs: # None indicates all should be captured, so if its not, scan structure data_req = 'images' lib_filetype = 'data' for fi in disp.get_attrs(): # for each figure if 'enable' in disp.get_labels( fi): # if it can be enabled/disabled disp.set_value(fi, 'enable', 0) for inst, figset in session['figstocapture'].items(): for fi in disp.get_attrs(): # for each figure if 'enable' in disp.get_labels( fi): # if it can be enabled/disabled if fi in figset or ( '#' + fi) in figset: #if requested, enable it disp.set_value(fi, 'enable', 1) if not fi.endswith('_image') and inst == 'hsrl': data_req = 'images housekeeping' lib_filetype = None elif ('-' + fi) in figset or ( '-#' + fi) in figset: #if explicitly disabled, disable it disp.set_value(fi, 'enable', 0) else: if session['figstocapture'] != None: for inst, figset in session['figstocapture'].items(): for fi in disp.get_attrs(): # for each figure if 'enable' in disp.get_labels( fi): # if it can be enabled/disabled if ('-' + fi) in figset or ( '-#' + fi ) in figset: #if explicitly disabled, disable it disp.set_value(fi, 'enable', 0) data_req = 'images housekeeping' lib_filetype = None picnicsession.storejson(session, disp.json_representation(), 'display_parameters.json') if 'data_request' not in session: session['data_request'] = data_req if 'lib_filetype' not in session: session['lib_filetype'] = lib_filetype picnicsession.storesession(session)
def parseImageParameters(request,session): methods=['site','dataset','instrument'] for m in methods: if m not in request.params: continue method=m session['method']=method try: session[method]=int(request.params.getone(method)); session['methodkey']=int(request.params.getone(method)); except: session[method]=request.params.getone(method); session['methodkey']=request.params.getone(method); break starttime=datetime(int(request.params.getone('byr')), int(request.params.getone('bmo')), int(request.params.getone('bdy')), int(request.params.getone('bhr')), int(request.params.getone('bmn')), 0) endtime=datetime(int(request.params.getone('eyr')), int(request.params.getone('emo')), int(request.params.getone('edy')), int(request.params.getone('ehr')), int(request.params.getone('emn')), 0) session['altmin']=float(request.params.getone('lheight'))*1000 session['altmax']=float(request.params.getone('height'))*1000 session['starttime']=starttime.strftime(picnicsession.json_dateformat) session['endtime']=endtime.strftime(picnicsession.json_dateformat) #contstruct dpl datinfo=lib(**{method:session[method]}) instruments=datinfo['Instruments'] name=datinfo['Name'] datasetname=instruments[0].lower() #print figstocapture datasets=[] for inst in instruments: datasets.extend(lib.instrument(inst)['datasets']) #dplc=dpl_rti(datasetname,starttime,endtime,timedelta(seconds=timeres),endtime-starttime,altmin,altmax,altres);#alt in m #construct image generation parameters session['dataset']=datasetname session['name']=name if 'custom_processing' in request.params and request.params.getone('custom_processing')!='default': cust=request.params.getone('custom_processing') if cust=='custom': try: pd=request.params.getone('process_parameters_content') pdc=pd.file.read() d=json.loads(pdc)#.file.read()) except: traceback.format_exc() return HTTPBadRequest() else: d=server_archive.get_archived_json(request.params.getone('custom_processing_token'),cust) #print 'Storing custom process parameters ',request.params.getone('process_parameters_content') picnicsession.storejson(session,d,'process_parameters.json') #return HTTPTemporaryRedirect(location=request.route_path('progress_withid',session=sessionid)) if 'custom_display' in request.params and request.params.getone('custom_display')!='default': cust=request.params.getone('custom_display') if cust=='custom': pd=request.params.getone('display_defaults_content') print 'Storing custom image parameters to',session['sessionid'],'display_parameters.json' try: d=json.loads(pd.file.read()) except: traceback.format_exc() return HTTPBadRequest() else: d=server_archive.get_archived_json(request.params.getone('custom_display_token'),cust) picnicsession.storejson(session,d,'display_parameters.json') session['figstocapture']=None getdatasets=datasets imagesetlist=jsgen.formsetsForInstruments(datasets,'images') session['figrequest']={} for i in imagesetlist: session['figrequest'][i['formname']]='custom' elif 'display_defaults_file' in request.params: session['display_defaults_file']=request.params.getone('display_defaults_file') if os.path.sep in session['display_defaults_file']: session['display_defaults_file']=picnicsession.sessionfile(session,session['display_defaults_file']) session['figstocapture']=None getdatasets=datasets imagesetlist=jsgen.formsetsForInstruments(datasets,'images') session['figrequest']={} for i in imagesetlist: session['figrequest'][i['formname']]='custom' else: session['display_defaults_file']='all_plots.json' imagesetlist=jsgen.formsetsForInstruments(datasets,'images') getdatasets=[] figstocapture={} session['figrequest']={} for i in imagesetlist: #print i try: setmode=request.params.getone(i['formname']) session['figrequest'][i['formname']]=setmode figstocapture[i['setenum']]=i['sets'][setmode]['figs'] if len(i['sets'][setmode]['figs'])>0:#radio buttons if 'enabled' in i['sets'][setmode]: for dat in i['sets'][setmode]['enabled']: if dat not in getdatasets: getdatasets.append(dat) if 'required' in i['sets'][setmode]: for dat in i['sets'][setmode]['required']: if dat not in getdatasets: getdatasets.append(dat) if "options" in i and len(i["options"])>0:#checkboxes only currently, may extend to choicebox for opt in i["options"]: if opt["formname"] in request.params and request.params.getone(opt['formname']): if 'enabled' in opt: for dat in opt['enabled']: if dat not in getdatasets: getdatasets.append(dat) if 'required' in opt: for dat in opt['required']: if dat not in getdatasets: getdatasets.append(dat) figstocapture[i['setenum']].extend(opt['included']) else: for f in opt['included']: figstocapture[i['setenum']].append('-'+f) except: pass session['figstocapture']=figstocapture session['datastreams']=getdatasets #if None in session['figstocapture']: picnicsession.storesession(session)
def makeDPLFromSession(session,doSearch=True): copyToInit={ 'dataset':'instrument', 'maxtimeslice':'maxtimeslice_timedelta', 'data_request':'data_request', 'lib_filetype':'filetype', } copyToSearch={ 'starttime':'start_time_datetime', 'endtime':'end_time_datetime', 'altmin':'min_alt_m', 'altmax':'max_alt_m', 'timeres':'timeres_timedelta', 'altres':'altres_m', } hasProgress=False from hsrl.dpl_experimental.dpl_hsrl import dpl_hsrl process_control=None if os.access(picnicsession.sessionfile(session,'process_parameters.json'),os.R_OK): process_control=picnicsession.loadjson(session,'process_parameters.json') import hsrl.utils.json_config as jc process_control=jc.json_config(process_control,default_key='process_defaults') dplobj=dpl_hsrl(process_control=process_control,**fromSession(session,copyToInit)) if not doSearch: return dplobj,fromSession(session,copyToSearch) searchparms=fromSession(session,copyToSearch) #try: # import hsrl.utils.threaded_generator # dplc=hsrl.utils.threaded_generator.threaded_generator(dplobj,**searchparms) #except: hsrlnar=dplobj(**searchparms) dplc=hsrlnar if 'merge' in session['datastreams']:#add merge to rs_mmcr, refit import hsrl.dpl_tools.time_frame as time_slicing import hsrl.dpl_tools.resample_altitude as altitude_resampling import hsrl.dpl_tools.substruct as frame_substruct from hsrl.dpl_netcdf.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper import hsrl.dpl_netcdf.MMCRMergeLibrarian as mmcr import hsrl.utils.hsrl_array_utils as hau if session['dataset']=='ahsrl': mmcrzoo=GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge') mmcrlib=mmcr.MMCRMergeLibrarian(session['dataset'],['eurmmcrmerge.C1.c1.','nsaarscl1clothC1.c1.'],zoo=mmcrzoo) elif session['dataset']=='mf2hsrl': pass #set up zoo and lib for mf2 mmcrnar=mmcr.MMCRMergeCorrector(mmcrlib(start=searchparms['start_time_datetime'],end=searchparms['start_time_datetime'])) mmcrnar=mmcr.MMCRMergeBackscatterToReflectivity(altitude_resampling.ResampleXd(time_slicing.TimeGinsu(mmcrnar,'times'),'heights',dplc.altitudeAxis)) hsrlnarsplitter=frame_substruct.SubstructBrancher(hsrlnar) hsrlinvnar=time_slicing.TimeGinsu(hsrlnarsplitter.narrateSubstruct('rs_inv'),'times')#,isEnd=True) from dplkit.simple.blender import TimeInterpolatedMerge merge=TimeInterpolatedMerge(hsrlinvnar,[mmcrnar],allow_nans=True,channels=['heights','Reflectivity','MeanDopplerVelocity','Backscatter','SpectralWidth']) merge=frame_substruct.Retyper(merge,hau.Time_Z_Group,{'timevarname':'times','altname':'heights'}) dplc=frame_substruct.SubstructMerger('rs_inv',{ 'rs_mean':hsrlnarsplitter.narrateSubstruct('rs_mean'), 'rs_raw':hsrlnarsplitter.narrateSubstruct('rs_raw'), 'rs_inv':hsrlnarsplitter.narrateSubstruct('rs_inv'), 'rs_mmcr':merge, 'rs_init':hsrlnarsplitter.narrateSubstruct('rs_init'), 'rs_static':hsrlnarsplitter.narrateSubstruct('rs_static'), 'profiles':hsrlnarsplitter.narrateSubstruct('profiles',sparse=True), 'rs_Cxx':hsrlnarsplitter.narrateSubstruct('rs_Cxx',sparse=True) } ,hau.Time_Z_Group) #merge=picnicsession.PicnicProgressNarrator(dplc,getLastOf('start'), searchparms['start_time_datetime'],searchparms['end_time_datetime'],session) #hasProgress=True if not os.access(picnicsession.sessionfile(session,'process_parameters.json'),os.R_OK): picnicsession.storejson(session,hsrlnar.get_process_control().json_representation(),'process_parameters.json') picnicsession.updateSessionComment(session,'processing with DPL') if hasProgress: return dplc return picnicsession.PicnicProgressNarrator(dplc,getLastOf('times',['rs_inv','rs_mean','rs_raw']), searchparms['start_time_datetime'],searchparms['end_time_datetime'],session)