示例#1
0
def makeNetCDFFromDPL(session,DPLgen,templatefilename,netcdffilename):
    picnicsession.updateSessionComment(session,'loading artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists

    #folder=picnicsession.sessionfolder(sessionid);
    picnicsession.updateSessionComment(session,'opening blank netcdf file')
    
    ncfilename=picnicsession.sessionfile(session,netcdffilename,create=True)

    artist=artists.dpl_netcdf_artist(DPLgen,templatefilename,ncfilename,selected_bindings=session['selected_fields'])
  
    picnicsession.updateSessionComment(session,'processing')
 
    findTimes=['rs_raw','rs_mean','rs_inv']
    
    dumped=False

    for frame in artist:
        timewindow='blank'
        if not dumped and frame!=None:
            doFrameDumpToFile(artist.provides,frame,picnicsession.sessionfile(session,'frame.json'))
            dumped=True
        for f in findTimes:
            if hasattr(frame,f) and hasattr(getattr(frame,f),'times') and len(getattr(frame,f).times)>0:
                t=getattr(frame,f).times
                timewindow=t[0].strftime('%Y.%m.%d %H:%M') + ' - ' + t[-1].strftime('%Y.%m.%d %H:%M')

        picnicsession.updateSessionComment(session,'appended data %s' % (timewindow))
  
    doNcDumpToFile(ncfilename,picnicsession.sessionfile(session,'output.cdl'))

    del artist
示例#2
0
def netcdfresult(request):
    #print 'URLREQ: ',request.matched_route.name
    sessionid = request.matchdict[
        'session']  #request.session.get_csrf_token();#params.getone('csrf_token')
    #folder=picnicsession.sessionfolder(sessionid);
    #sessiontask=tasks[sessionid]
    #session=sessiontask['session']
    #scan session folder for images
    session = picnicsession.loadsession(sessionid)

    if 'starttime' in session:
        session['starttime'] = datetime.strptime(session['starttime'],
                                                 picnicsession.json_dateformat)
    if 'endtime' in session:
        session['endtime'] = datetime.strptime(session['endtime'],
                                               picnicsession.json_dateformat)
    fullfilename = picnicsession.sessionfile(
        sessionid, session['filename'])  #safejoin(folder,session['filename'])
    try:
        from netCDF4 import Dataset
        nc = Dataset(fullfilename, 'r')
        inf = picnicsession.infoOfFile(fullfilename)
        e = None
    except Exception, err:
        nc = None
        inf = [None, None, 0, None]
        e = err
示例#3
0
def makeNetCDFFromDPL(session, DPLgen, templatefilename, netcdffilename):
    picnicsession.updateSessionComment(session, 'loading artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists

    #folder=picnicsession.sessionfolder(sessionid);
    picnicsession.updateSessionComment(session, 'opening blank netcdf file')

    ncfilename = picnicsession.sessionfile(session,
                                           netcdffilename,
                                           create=True)

    artist = artists.dpl_netcdf_artist(
        DPLgen,
        templatefilename,
        ncfilename,
        selected_bindings=session['selected_fields'])

    picnicsession.updateSessionComment(session, 'processing')

    findTimes = ['rs_raw', 'rs_mean', 'rs_inv']

    dumped = False

    for frame in artist:
        timewindow = 'blank'
        if not dumped and frame != None:
            doFrameDumpToFile(artist.provides, frame,
                              picnicsession.sessionfile(session, 'frame.json'))
            dumped = True
        for f in findTimes:
            if hasattr(frame, f) and hasattr(getattr(
                    frame, f), 'times') and len(getattr(frame, f).times) > 0:
                t = getattr(frame, f).times
                timewindow = t[0].strftime('%Y.%m.%d %H:%M') + ' - ' + t[
                    -1].strftime('%Y.%m.%d %H:%M')

        picnicsession.updateSessionComment(session,
                                           'appended data %s' % (timewindow))

    doNcDumpToFile(ncfilename,
                   picnicsession.sessionfile(session, 'output.cdl'))

    del artist
示例#4
0
文件: views.py 项目: CodaP/DplTools
def sessionUrls(request,sessionid,extensions):
    try:
        fl=os.listdir(picnicsession.sessionfile(sessionid,None))
        fl.sort()
    except:
        return
    for f in fl:
        for e in extensions:
            if f.endswith(e):
                yield {'url':request.route_path('session_resource',session=sessionid,filename=f),'name':f}
                break
示例#5
0
def sessionUrls(request, sessionid, extensions):
    try:
        fl = os.listdir(picnicsession.sessionfile(sessionid, None))
        fl.sort()
    except:
        return
    for f in fl:
        for e in extensions:
            if f.endswith(e):
                yield {
                    'url':
                    request.route_path('session_resource',
                                       session=sessionid,
                                       filename=f),
                    'name':
                    f
                }
                break
示例#6
0
文件: views.py 项目: CodaP/DplTools
def netcdfresult(request):
    #print 'URLREQ: ',request.matched_route.name
    sessionid=request.matchdict['session']#request.session.get_csrf_token();#params.getone('csrf_token')
    #folder=picnicsession.sessionfolder(sessionid);
    #sessiontask=tasks[sessionid]
    #session=sessiontask['session']
    #scan session folder for images
    session=picnicsession.loadsession(sessionid)

    if 'starttime' in session:
        session['starttime']=datetime.strptime(session['starttime'],picnicsession.json_dateformat)
    if 'endtime' in session:
        session['endtime']=datetime.strptime(session['endtime'],picnicsession.json_dateformat)
    fullfilename=picnicsession.sessionfile(sessionid,session['filename'])#safejoin(folder,session['filename'])
    try:
        from netCDF4 import Dataset
        nc=Dataset(fullfilename,'r')
        inf=picnicsession.infoOfFile(fullfilename)
        e=None
    except Exception, err:
        nc=None
        inf=[None,None,0,None]
        e=err
示例#7
0
def parseImageParameters(request, session):

    methods = ['site', 'dataset', 'instrument']
    for m in methods:
        if m not in request.params:
            continue
        method = m
        session['method'] = method
        try:
            session[method] = int(request.params.getone(method))
            session['methodkey'] = int(request.params.getone(method))
        except:
            session[method] = request.params.getone(method)
            session['methodkey'] = request.params.getone(method)
        break
    starttime = datetime(int(request.params.getone('byr')),
                         int(request.params.getone('bmo')),
                         int(request.params.getone('bdy')),
                         int(request.params.getone('bhr')),
                         int(request.params.getone('bmn')), 0)
    endtime = datetime(int(request.params.getone('eyr')),
                       int(request.params.getone('emo')),
                       int(request.params.getone('edy')),
                       int(request.params.getone('ehr')),
                       int(request.params.getone('emn')), 0)
    session['altmin'] = float(request.params.getone('lheight')) * 1000
    session['altmax'] = float(request.params.getone('height')) * 1000
    session['starttime'] = starttime.strftime(picnicsession.json_dateformat)
    session['endtime'] = endtime.strftime(picnicsession.json_dateformat)
    #contstruct dpl
    datinfo = lib(**{method: session[method]})
    instruments = datinfo['Instruments']
    name = datinfo['Name']
    datasetname = instruments[0].lower()
    #print figstocapture
    datasets = []
    for inst in instruments:
        datasets.extend(lib.instrument(inst)['datasets'])

    #dplc=dpl_rti(datasetname,starttime,endtime,timedelta(seconds=timeres),endtime-starttime,altmin,altmax,altres);#alt in m
    #construct image generation parameters
    session['dataset'] = datasetname
    session['name'] = name

    if 'custom_processing' in request.params and request.params.getone(
            'custom_processing') != 'default':
        cust = request.params.getone('custom_processing')
        if cust == 'custom':
            try:
                pd = request.params.getone('process_parameters_content')
                pdc = pd.file.read()
                d = json.loads(pdc)  #.file.read())
            except:
                traceback.format_exc()
                return HTTPBadRequest()
        else:
            d = server_archive.get_archived_json(
                request.params.getone('custom_processing_token'), cust)
        #print 'Storing custom process parameters ',request.params.getone('process_parameters_content')
        picnicsession.storejson(session, d, 'process_parameters.json')
    #return HTTPTemporaryRedirect(location=request.route_path('progress_withid',session=sessionid))
    if 'custom_display' in request.params and request.params.getone(
            'custom_display') != 'default':
        cust = request.params.getone('custom_display')
        if cust == 'custom':
            pd = request.params.getone('display_defaults_content')
            print 'Storing custom image parameters to', session[
                'sessionid'], 'display_parameters.json'
            try:
                d = json.loads(pd.file.read())
            except:
                traceback.format_exc()
                return HTTPBadRequest()
        else:
            d = server_archive.get_archived_json(
                request.params.getone('custom_display_token'), cust)
        picnicsession.storejson(session, d, 'display_parameters.json')
        session['figstocapture'] = None
        getdatasets = datasets
        imagesetlist = jsgen.formsetsForInstruments(datasets, 'images')
        session['figrequest'] = {}
        for i in imagesetlist:
            session['figrequest'][i['formname']] = 'custom'
    elif 'display_defaults_file' in request.params:
        session['display_defaults_file'] = request.params.getone(
            'display_defaults_file')
        if os.path.sep in session['display_defaults_file']:
            session['display_defaults_file'] = picnicsession.sessionfile(
                session, session['display_defaults_file'])
        session['figstocapture'] = None
        getdatasets = datasets
        imagesetlist = jsgen.formsetsForInstruments(datasets, 'images')
        session['figrequest'] = {}
        for i in imagesetlist:
            session['figrequest'][i['formname']] = 'custom'
    else:
        session['display_defaults_file'] = 'all_plots.json'
        imagesetlist = jsgen.formsetsForInstruments(datasets, 'images')
        getdatasets = []
        figstocapture = {}
        session['figrequest'] = {}
        for i in imagesetlist:
            #print i
            try:
                setmode = request.params.getone(i['formname'])
                session['figrequest'][i['formname']] = setmode
                figstocapture[i['setenum']] = i['sets'][setmode]['figs']
                if len(i['sets'][setmode]['figs']) > 0:  #radio buttons
                    if 'enabled' in i['sets'][setmode]:
                        for dat in i['sets'][setmode]['enabled']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
                    if 'required' in i['sets'][setmode]:
                        for dat in i['sets'][setmode]['required']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
                if "options" in i and len(
                        i["options"]
                ) > 0:  #checkboxes only currently, may extend to choicebox
                    for opt in i["options"]:
                        if opt["formname"] in request.params and request.params.getone(
                                opt['formname']):
                            if 'enabled' in opt:
                                for dat in opt['enabled']:
                                    if dat not in getdatasets:
                                        getdatasets.append(dat)
                            if 'required' in opt:
                                for dat in opt['required']:
                                    if dat not in getdatasets:
                                        getdatasets.append(dat)
                            figstocapture[i['setenum']].extend(opt['included'])
                        else:
                            for f in opt['included']:
                                figstocapture[i['setenum']].append('-' + f)
            except:
                pass
        session['figstocapture'] = figstocapture
    session['datastreams'] = getdatasets
    #if None in session['figstocapture']:
    picnicsession.storesession(session)
示例#8
0
def makeDPLFromNetCDF(session, netcdffilename):
    import hsrl.dpl_experimental.dpl_read_templatenetcdf as dpl_rtnc
    ncfilename = picnicsession.sessionfile(session, netcdffilename)

    #doNcDumpToFile(ncfilename,picnicsession.sessionfile(session,'output.cdl'))
    return dpl_rtnc.dpl_read_templatenetcdf(ncfilename)
示例#9
0
def makeImagesFromDPL(session, DPLgen):
    picnicsession.updateSessionComment(session, 'loading graphics artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists
    import hsrl.utils.json_config as jc
    #import hsrl.calibration.cal_read_utilities as cru
    #import hsrl.graphics.graphics_toolkit as gt
    instrument = session['dataset']
    #sessionid=session['sessionid']
    disp = jc.json_config(
        picnicsession.loadjson(
            session, 'display_parameters.json'))  #session['display_defaults'])
    params = jc.json_config(
        picnicsession.loadjson(session, 'process_parameters.json'),
        'process_defaults')
    #print session

    #folder=picnicsession.sessionfolder(sessionid)#safejoin('.','sessions',sessionid);
    artistlist = {}
    if True:
        picnicsession.updateSessionComment(session, 'creating artist')
        artist = artists.dpl_images_artist(framestream=DPLgen,
                                           instrument=session['dataset'],
                                           max_alt=session['altmax'],
                                           processing_defaults=params,
                                           display_defaults=disp)
        artistlist['hsrl'] = artist
        if 'merge' in session['datastreams']:
            artist = artists.dpl_radar_images_artist(framestream=artist,
                                                     display_defaults=disp)
            artistlist['merge'] = artist
        picnicsession.updateSessionComment(session, 'processing')
        artist()
        picnicsession.updateSessionComment(session, 'rendering figures')
        fignum = 0

        capturingfigsgroups = session['figstocapture'].copy()
        if capturingfigsgroups == None:
            capturingfigsgroups = {}
            for k in artistlist.keys():
                capturingfigsgroups[k] = [None]
        #print capturingfigs
        for inst, capturingfigs in capturingfigsgroups.items():
            if not inst in artistlist:
                continue
            alreadycaptured = []
            figs = artistlist[inst].figs
            for x in capturingfigs:  #plt._pylab_helpers.Gcf.get_all_fig_managers():
                if x in alreadycaptured or (x != None and
                                            (x.startswith('#')
                                             or x.startswith('-'))):
                    continue
                alreadycaptured.append(x)
                if x == None:
                    tmp = [f for f in figs]
                    tmp.sort()
                    capturingfigs.extend(tmp)
                    continue
                figname = picnicsession.sessionfile(
                    session, 'figure%04i_%s_%s.png' % (fignum, inst, x))
                fignum = fignum + 1
                #      print 'updating  %d' % x.num
                picnicsession.updateSessionComment(
                    session, 'capturing ' + inst + ' figure ' + x)
                if x not in figs:
                    f = file(figname, 'w')
                    f.close()
                    continue

                fig = figs.figure(x)  #plt.figure(x.num)

                # QApplication.processEvents()

                fig.canvas.draw()
                #fig.canvas.
                fig.savefig(figname, format='png', bbox_inches='tight')
    picnicsession.updateSessionComment(session, 'done')
示例#10
0
def makeDPLFromSession(session, doSearch=True):
    copyToInit = {
        'dataset': 'instrument',
        'maxtimeslice': 'maxtimeslice_timedelta',
        'data_request': 'data_request',
        'lib_filetype': 'filetype',
    }
    copyToSearch = {
        'starttime': 'start_time_datetime',
        'endtime': 'end_time_datetime',
        'altmin': 'min_alt_m',
        'altmax': 'max_alt_m',
        'timeres': 'timeres_timedelta',
        'altres': 'altres_m',
    }
    hasProgress = False
    from hsrl.dpl_experimental.dpl_hsrl import dpl_hsrl
    process_control = None
    if os.access(picnicsession.sessionfile(session, 'process_parameters.json'),
                 os.R_OK):
        process_control = picnicsession.loadjson(session,
                                                 'process_parameters.json')
        import hsrl.utils.json_config as jc
        process_control = jc.json_config(process_control,
                                         default_key='process_defaults')
    dplobj = dpl_hsrl(process_control=process_control,
                      **fromSession(session, copyToInit))
    if not doSearch:
        return dplobj, fromSession(session, copyToSearch)
    searchparms = fromSession(session, copyToSearch)
    #try:
    #    import hsrl.utils.threaded_generator
    #    dplc=hsrl.utils.threaded_generator.threaded_generator(dplobj,**searchparms)
    #except:
    hsrlnar = dplobj(**searchparms)
    dplc = hsrlnar
    if 'merge' in session['datastreams']:  #add merge to rs_mmcr, refit
        import hsrl.dpl_tools.time_frame as time_slicing
        import hsrl.dpl_tools.resample_altitude as altitude_resampling
        import hsrl.dpl_tools.substruct as frame_substruct
        from hsrl.dpl_netcdf.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper
        import hsrl.dpl_netcdf.MMCRMergeLibrarian as mmcr
        import hsrl.utils.hsrl_array_utils as hau

        if session['dataset'] == 'ahsrl':
            mmcrzoo = GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge')
            mmcrlib = mmcr.MMCRMergeLibrarian(
                session['dataset'],
                ['eurmmcrmerge.C1.c1.', 'nsaarscl1clothC1.c1.'],
                zoo=mmcrzoo)
        elif session['dataset'] == 'mf2hsrl':
            pass  #set up zoo and lib for mf2
        mmcrnar = mmcr.MMCRMergeCorrector(
            mmcrlib(start=searchparms['start_time_datetime'],
                    end=searchparms['start_time_datetime']))
        mmcrnar = mmcr.MMCRMergeBackscatterToReflectivity(
            altitude_resampling.ResampleXd(
                time_slicing.TimeGinsu(mmcrnar, 'times'), 'heights',
                dplc.altitudeAxis))

        hsrlnarsplitter = frame_substruct.SubstructBrancher(hsrlnar)
        hsrlinvnar = time_slicing.TimeGinsu(
            hsrlnarsplitter.narrateSubstruct('rs_inv'), 'times')  #,isEnd=True)

        from dplkit.simple.blender import TimeInterpolatedMerge

        merge = TimeInterpolatedMerge(hsrlinvnar, [mmcrnar],
                                      allow_nans=True,
                                      channels=[
                                          'heights', 'Reflectivity',
                                          'MeanDopplerVelocity', 'Backscatter',
                                          'SpectralWidth'
                                      ])
        merge = frame_substruct.Retyper(merge, hau.Time_Z_Group, {
            'timevarname': 'times',
            'altname': 'heights'
        })

        dplc = frame_substruct.SubstructMerger(
            'rs_inv', {
                'rs_mean':
                hsrlnarsplitter.narrateSubstruct('rs_mean'),
                'rs_raw':
                hsrlnarsplitter.narrateSubstruct('rs_raw'),
                'rs_inv':
                hsrlnarsplitter.narrateSubstruct('rs_inv'),
                'rs_mmcr':
                merge,
                'rs_init':
                hsrlnarsplitter.narrateSubstruct('rs_init'),
                'rs_static':
                hsrlnarsplitter.narrateSubstruct('rs_static'),
                'profiles':
                hsrlnarsplitter.narrateSubstruct('profiles', sparse=True),
                'rs_Cxx':
                hsrlnarsplitter.narrateSubstruct('rs_Cxx', sparse=True)
            }, hau.Time_Z_Group)
    #merge=picnicsession.PicnicProgressNarrator(dplc,getLastOf('start'), searchparms['start_time_datetime'],searchparms['end_time_datetime'],session)
    #hasProgress=True

    if not os.access(
            picnicsession.sessionfile(session, 'process_parameters.json'),
            os.R_OK):
        picnicsession.storejson(
            session,
            hsrlnar.get_process_control().json_representation(),
            'process_parameters.json')
    picnicsession.updateSessionComment(session, 'processing with DPL')
    if hasProgress:
        return dplc
    return picnicsession.PicnicProgressNarrator(
        dplc, getLastOf('times', ['rs_inv', 'rs_mean', 'rs_raw']),
        searchparms['start_time_datetime'], searchparms['end_time_datetime'],
        session)
示例#11
0
def parseNetCDFParameters(request, session):
    session['timeres'] = float(request.params.getone('timeres'))
    session['altres'] = float(request.params.getone('altres'))
    session['maxtimeslice_timedelta'] = 60 * 60 * 2
    session['data_request'] = "images housekeeping"
    session['template'] = request.params.getone('cdltemplatename')
    if session['template'] == 'custom':
        fn = picnicsession.sessionfile(session, 'template.cdl', create=True)
        file(fn, 'w').write(
            request.params.getone('cdltemplate_content').file.read())
        session['template'] = fn
    stf = datetime.strptime(
        session['starttime'],
        picnicsession.json_dateformat).strftime('_%Y%m%dT%H%M')
    etf = datetime.strptime(
        session['endtime'],
        picnicsession.json_dateformat).strftime('_%Y%m%dT%H%M')
    session['filesuffix'] = ('_%gs_%gm' %
                             (session['timeres'], session['altres']))
    session['filemode'] = request.params.getone('filemode')
    session['fileprefix'] = session['dataset'] + '_' + session['filemode']
    session['filename'] = session['dataset'] + stf + etf + session[
        'filesuffix'] + '.nc'
    session['username'] = request.params.getone('username')

    datinfo = lib(**{session['method']: session[session['method']]})
    instruments = datinfo['Instruments']
    #print figstocapture
    datasets = []
    for inst in instruments:
        datasets.extend(lib.instrument(inst)['datasets'])

    fieldstocapture = []
    if not 'allfields' in request.params or not request.params.getone(
            'allfields'):
        fieldsetlist = jsgen.formsetsForInstruments(datasets, 'netcdf')
        getdatasets = []
        for inst in fieldsetlist:  #per instrument
            for subset in inst['sets']:
                subsetincluded = False
                for checkbox in subset['options']:
                    formname = checkbox['formname']
                    if formname not in request.params or not request.params.getone(
                            formname):
                        continue
                    subsetincluded = True
                    for fieldname in checkbox['included']:
                        if fieldname not in fieldstocapture:
                            fieldstocapture.append(fieldname)
                    if 'enabled' in checkbox:
                        for dat in checkbox['enabled']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
                    if 'required' in checkbox:
                        for dat in checkbox['required']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
                if subsetincluded:
                    if 'included' in subset:
                        for fieldname in subset['included']:
                            if fieldname not in fieldstocapture:
                                fieldstocapture.append(fieldname)
                    if 'enabled' in subset:
                        for dat in subset['enabled']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
                    if 'required' in subset:
                        for dat in subset['required']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
    else:
        getdatasets = datasets
    session['datastreams'] = getdatasets

    print fieldstocapture
    session['selected_fields'] = fieldstocapture

    figstocapture = {}

    if 'custom_display' not in request.params or request.params.getone(
            'custom_display') == 'default':
        imagesetlist = jsgen.formsetsForInstruments(datasets, 'images')
        session['display_defaults_file'] = 'all_plots.json'

        for i in imagesetlist:
            #print i
            try:
                defmode = i['default']
                figstocapture[i['setenum']] = []
                for figname in i['sets'][defmode][
                        'figs']:  #get default images of default set
                    if 'image' in figname and figname not in figstocapture[
                            i['setenum']]:
                        figstocapture[i['setenum']].append(figname)
                if 'options' in i:  #and default options
                    for opt in i['options']:
                        if opt['default']:  #checkbox default is true
                            for figname in i['included']:
                                if 'image' in figname and figname not in figstocapture[
                                        i['setenum']]:
                                    figstocapture[i['setenum']].append(figname)
            except:
                pass
    else:
        figstocapture = None
    session['figstocapture'] = figstocapture
    session['lib_filetype'] = None
    picnicsession.storesession(session)
示例#12
0
def parseImageParameters(request,session):

    methods=['site','dataset','instrument']
    for m in methods:
        if m not in request.params:
            continue
        method=m
        session['method']=method
        try:
            session[method]=int(request.params.getone(method));
            session['methodkey']=int(request.params.getone(method));
        except:
            session[method]=request.params.getone(method);
            session['methodkey']=request.params.getone(method);
        break
    starttime=datetime(int(request.params.getone('byr')),
                       int(request.params.getone('bmo')),
                       int(request.params.getone('bdy')),
                       int(request.params.getone('bhr')),
                       int(request.params.getone('bmn')),
                       0)
    endtime=datetime(int(request.params.getone('eyr')),
                     int(request.params.getone('emo')),
                     int(request.params.getone('edy')),
                     int(request.params.getone('ehr')),
                     int(request.params.getone('emn')),
                     0)
    session['altmin']=float(request.params.getone('lheight'))*1000
    session['altmax']=float(request.params.getone('height'))*1000
    session['starttime']=starttime.strftime(picnicsession.json_dateformat)
    session['endtime']=endtime.strftime(picnicsession.json_dateformat)
    #contstruct dpl
    datinfo=lib(**{method:session[method]})
    instruments=datinfo['Instruments']
    name=datinfo['Name']
    datasetname=instruments[0].lower()
    #print figstocapture
    datasets=[]
    for inst in instruments:
        datasets.extend(lib.instrument(inst)['datasets'])

    #dplc=dpl_rti(datasetname,starttime,endtime,timedelta(seconds=timeres),endtime-starttime,altmin,altmax,altres);#alt in m
    #construct image generation parameters
    session['dataset']=datasetname
    session['name']=name

    if 'custom_processing' in request.params and request.params.getone('custom_processing')!='default':
        cust=request.params.getone('custom_processing')
        if cust=='custom':
            try:
                pd=request.params.getone('process_parameters_content')
                pdc=pd.file.read()
                d=json.loads(pdc)#.file.read())
            except:
                traceback.format_exc()
                return HTTPBadRequest()
        else:
            d=server_archive.get_archived_json(request.params.getone('custom_processing_token'),cust)
        #print 'Storing custom process parameters ',request.params.getone('process_parameters_content')
        picnicsession.storejson(session,d,'process_parameters.json')
    #return HTTPTemporaryRedirect(location=request.route_path('progress_withid',session=sessionid))
    if 'custom_display' in request.params and request.params.getone('custom_display')!='default':
        cust=request.params.getone('custom_display')
        if cust=='custom':
            pd=request.params.getone('display_defaults_content')
            print 'Storing custom image parameters to',session['sessionid'],'display_parameters.json'
            try:
                d=json.loads(pd.file.read())
            except:
                traceback.format_exc()
                return HTTPBadRequest()
        else:
            d=server_archive.get_archived_json(request.params.getone('custom_display_token'),cust)
        picnicsession.storejson(session,d,'display_parameters.json')
        session['figstocapture']=None
        getdatasets=datasets
        imagesetlist=jsgen.formsetsForInstruments(datasets,'images')
        session['figrequest']={}
        for i in imagesetlist:
            session['figrequest'][i['formname']]='custom'
    elif 'display_defaults_file' in request.params:
        session['display_defaults_file']=request.params.getone('display_defaults_file')
        if os.path.sep in session['display_defaults_file']:
            session['display_defaults_file']=picnicsession.sessionfile(session,session['display_defaults_file'])
        session['figstocapture']=None
        getdatasets=datasets
        imagesetlist=jsgen.formsetsForInstruments(datasets,'images')
        session['figrequest']={}
        for i in imagesetlist:
            session['figrequest'][i['formname']]='custom'
    else:
        session['display_defaults_file']='all_plots.json'
        imagesetlist=jsgen.formsetsForInstruments(datasets,'images')
        getdatasets=[]
        figstocapture={}
        session['figrequest']={}
        for i in imagesetlist:
            #print i
            try:
                setmode=request.params.getone(i['formname'])
                session['figrequest'][i['formname']]=setmode
                figstocapture[i['setenum']]=i['sets'][setmode]['figs']
                if len(i['sets'][setmode]['figs'])>0:#radio buttons
                    if 'enabled' in i['sets'][setmode]:
                        for dat in i['sets'][setmode]['enabled']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
                    if 'required' in i['sets'][setmode]:
                        for dat in i['sets'][setmode]['required']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
                if "options" in i and len(i["options"])>0:#checkboxes only currently, may extend to choicebox
                    for opt in i["options"]:
                        if opt["formname"] in request.params and request.params.getone(opt['formname']):
                                if 'enabled' in opt:
                                    for dat in opt['enabled']:
                                        if dat not in getdatasets:
                                            getdatasets.append(dat)
                                if 'required' in opt:
                                    for dat in opt['required']:
                                        if dat not in getdatasets:
                                            getdatasets.append(dat)
                                figstocapture[i['setenum']].extend(opt['included'])
                        else:
                            for f in opt['included']:
                                figstocapture[i['setenum']].append('-'+f)
            except:
                pass
        session['figstocapture']=figstocapture
    session['datastreams']=getdatasets
        #if None in session['figstocapture']:
    picnicsession.storesession(session)
示例#13
0
def makeDPLFromNetCDF(session,netcdffilename):
    import hsrl.dpl_experimental.dpl_read_templatenetcdf as dpl_rtnc
    ncfilename=picnicsession.sessionfile(session,netcdffilename)
  
    #doNcDumpToFile(ncfilename,picnicsession.sessionfile(session,'output.cdl'))
    return dpl_rtnc.dpl_read_templatenetcdf(ncfilename)
示例#14
0
def makeImagesFromDPL(session,DPLgen):
    picnicsession.updateSessionComment(session,'loading graphics artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists
    import hsrl.utils.json_config as jc
    #import hsrl.calibration.cal_read_utilities as cru
    #import hsrl.graphics.graphics_toolkit as gt
    instrument=session['dataset']
    #sessionid=session['sessionid']
    disp=jc.json_config(picnicsession.loadjson(session,'display_parameters.json'))#session['display_defaults'])
    params=jc.json_config(picnicsession.loadjson(session,'process_parameters.json'),'process_defaults')
    #print session

    #folder=picnicsession.sessionfolder(sessionid)#safejoin('.','sessions',sessionid);
    artistlist={}
    if True:
        picnicsession.updateSessionComment(session,'creating artist')    
        artist=artists.dpl_images_artist(framestream=DPLgen,instrument=session['dataset'],
            max_alt=session['altmax'],
            processing_defaults=params,
            display_defaults=disp)
        artistlist['hsrl']=artist
        if 'merge' in session['datastreams']:
            artist=artists.dpl_radar_images_artist(framestream=artist,display_defaults=disp)
            artistlist['merge']=artist
        picnicsession.updateSessionComment(session,'processing')
        artist()
        picnicsession.updateSessionComment(session,'rendering figures')
        fignum=0

        capturingfigsgroups=session['figstocapture'].copy()
        if capturingfigsgroups==None:
            capturingfigsgroups={}
            for k in artistlist.keys():
                capturingfigsgroups[k]=[None]
        #print capturingfigs
        for inst,capturingfigs in capturingfigsgroups.items():
          if not inst in artistlist:
            continue
          alreadycaptured=[]
          figs=artistlist[inst].figs
          for x in capturingfigs:#plt._pylab_helpers.Gcf.get_all_fig_managers():
            if x in alreadycaptured or (x!=None and (x.startswith('#') or x.startswith('-'))):
                continue
            alreadycaptured.append(x)
            if x == None:
                tmp=[ f for f in figs ];
                tmp.sort()
                capturingfigs.extend(tmp)
                continue
            figname=picnicsession.sessionfile(session,'figure%04i_%s_%s.png' % (fignum,inst,x))
            fignum = fignum + 1
        #      print 'updating  %d' % x.num
            picnicsession.updateSessionComment(session,'capturing '+inst+' figure ' + x)
            if x not in figs:
                f=file(figname,'w')
                f.close()
                continue
        
            fig = figs.figure(x)#plt.figure(x.num)
        
      # QApplication.processEvents()
            
            fig.canvas.draw()
            #fig.canvas.
            fig.savefig(figname,format='png',bbox_inches='tight')
    picnicsession.updateSessionComment(session,'done')
示例#15
0
def makeDPLFromSession(session,doSearch=True):
    copyToInit={
        'dataset':'instrument',
        'maxtimeslice':'maxtimeslice_timedelta',
        'data_request':'data_request',
        'lib_filetype':'filetype',
    }
    copyToSearch={
        'starttime':'start_time_datetime',
        'endtime':'end_time_datetime',
        'altmin':'min_alt_m',
        'altmax':'max_alt_m',
        'timeres':'timeres_timedelta',
        'altres':'altres_m',
    }
    hasProgress=False
    from hsrl.dpl_experimental.dpl_hsrl import dpl_hsrl
    process_control=None
    if os.access(picnicsession.sessionfile(session,'process_parameters.json'),os.R_OK):
        process_control=picnicsession.loadjson(session,'process_parameters.json')
        import hsrl.utils.json_config as jc
        process_control=jc.json_config(process_control,default_key='process_defaults')
    dplobj=dpl_hsrl(process_control=process_control,**fromSession(session,copyToInit))
    if not doSearch:
        return dplobj,fromSession(session,copyToSearch)
    searchparms=fromSession(session,copyToSearch)
    #try:
    #    import hsrl.utils.threaded_generator
    #    dplc=hsrl.utils.threaded_generator.threaded_generator(dplobj,**searchparms)
    #except:
    hsrlnar=dplobj(**searchparms)
    dplc=hsrlnar
    if 'merge' in session['datastreams']:#add merge to rs_mmcr, refit
        import hsrl.dpl_tools.time_frame as time_slicing
        import hsrl.dpl_tools.resample_altitude as altitude_resampling
        import hsrl.dpl_tools.substruct as frame_substruct
        from hsrl.dpl_netcdf.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper 
        import hsrl.dpl_netcdf.MMCRMergeLibrarian as mmcr
        import hsrl.utils.hsrl_array_utils as hau

        if session['dataset']=='ahsrl':
            mmcrzoo=GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge')
            mmcrlib=mmcr.MMCRMergeLibrarian(session['dataset'],['eurmmcrmerge.C1.c1.','nsaarscl1clothC1.c1.'],zoo=mmcrzoo)
        elif session['dataset']=='mf2hsrl':
            pass #set up zoo and lib for mf2
        mmcrnar=mmcr.MMCRMergeCorrector(mmcrlib(start=searchparms['start_time_datetime'],end=searchparms['start_time_datetime']))
        mmcrnar=mmcr.MMCRMergeBackscatterToReflectivity(altitude_resampling.ResampleXd(time_slicing.TimeGinsu(mmcrnar,'times'),'heights',dplc.altitudeAxis))

        hsrlnarsplitter=frame_substruct.SubstructBrancher(hsrlnar)
        hsrlinvnar=time_slicing.TimeGinsu(hsrlnarsplitter.narrateSubstruct('rs_inv'),'times')#,isEnd=True)

        from dplkit.simple.blender import TimeInterpolatedMerge

        merge=TimeInterpolatedMerge(hsrlinvnar,[mmcrnar],allow_nans=True,channels=['heights','Reflectivity','MeanDopplerVelocity','Backscatter','SpectralWidth'])
        merge=frame_substruct.Retyper(merge,hau.Time_Z_Group,{'timevarname':'times','altname':'heights'})
 
        dplc=frame_substruct.SubstructMerger('rs_inv',{
            'rs_mean':hsrlnarsplitter.narrateSubstruct('rs_mean'),
            'rs_raw':hsrlnarsplitter.narrateSubstruct('rs_raw'),
            'rs_inv':hsrlnarsplitter.narrateSubstruct('rs_inv'),
            'rs_mmcr':merge,
            'rs_init':hsrlnarsplitter.narrateSubstruct('rs_init'),
            'rs_static':hsrlnarsplitter.narrateSubstruct('rs_static'),
            'profiles':hsrlnarsplitter.narrateSubstruct('profiles',sparse=True),
            'rs_Cxx':hsrlnarsplitter.narrateSubstruct('rs_Cxx',sparse=True)
            }
        ,hau.Time_Z_Group)
       #merge=picnicsession.PicnicProgressNarrator(dplc,getLastOf('start'), searchparms['start_time_datetime'],searchparms['end_time_datetime'],session)
        #hasProgress=True

    if not os.access(picnicsession.sessionfile(session,'process_parameters.json'),os.R_OK):
        picnicsession.storejson(session,hsrlnar.get_process_control().json_representation(),'process_parameters.json')
    picnicsession.updateSessionComment(session,'processing with DPL')
    if hasProgress:
        return dplc
    return picnicsession.PicnicProgressNarrator(dplc,getLastOf('times',['rs_inv','rs_mean','rs_raw']),
        searchparms['start_time_datetime'],searchparms['end_time_datetime'],session)
示例#16
0
def parseNetCDFParameters(request,session):
    session['timeres']=float(request.params.getone('timeres'))
    session['altres']=float(request.params.getone('altres'))
    session['maxtimeslice_timedelta']=60*60*2
    session['data_request']="images housekeeping"
    session['template']=request.params.getone('cdltemplatename')
    if session['template']=='custom':
        fn=picnicsession.sessionfile(session,'template.cdl',create=True)
        file(fn,'w').write(request.params.getone('cdltemplate_content').file.read())
        session['template']=fn
    stf=datetime.strptime(session['starttime'],picnicsession.json_dateformat).strftime('_%Y%m%dT%H%M')
    etf=datetime.strptime(session['endtime'],picnicsession.json_dateformat).strftime('_%Y%m%dT%H%M')
    session['filesuffix']=('_%gs_%gm' % (session['timeres'],session['altres']))
    session['filemode']=request.params.getone('filemode')
    session['fileprefix']=session['dataset']+ '_' + session['filemode']
    session['filename']=session['dataset'] + stf + etf + session['filesuffix'] + '.nc'
    session['username']=request.params.getone('username')

    datinfo=lib(**{session['method']:session[session['method']]})
    instruments=datinfo['Instruments']
    #print figstocapture
    datasets=[]
    for inst in instruments:
        datasets.extend(lib.instrument(inst)['datasets'])
 
    fieldstocapture=[]
    if not 'allfields' in request.params or not request.params.getone('allfields'):
        fieldsetlist=jsgen.formsetsForInstruments(datasets,'netcdf')
        getdatasets=[]
        for inst in fieldsetlist:#per instrument
            for subset in inst['sets']:
                subsetincluded=False
                for checkbox in subset['options']:
                    formname=checkbox['formname']
                    if formname not in request.params or not request.params.getone(formname):
                        continue
                    subsetincluded=True
                    for fieldname in checkbox['included']:
                        if fieldname not in fieldstocapture:
                            fieldstocapture.append(fieldname)
                    if 'enabled' in checkbox:
                        for dat in checkbox['enabled']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
                    if 'required' in checkbox:
                        for dat in checkbox['required']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
                if subsetincluded:
                    if 'included' in subset:
                        for fieldname in subset['included']:
                            if fieldname not in fieldstocapture:
                                fieldstocapture.append(fieldname)
                    if 'enabled' in subset:
                        for dat in subset['enabled']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
                    if 'required' in subset:
                        for dat in subset['required']:
                            if dat not in getdatasets:
                                getdatasets.append(dat)
    else:
        getdatasets=datasets
    session['datastreams']=getdatasets

    print fieldstocapture
    session['selected_fields']=fieldstocapture

    figstocapture={}

    if 'custom_display' not in request.params or request.params.getone('custom_display')=='default':
        imagesetlist=jsgen.formsetsForInstruments(datasets,'images')
        session['display_defaults_file']='all_plots.json'
      
        for i in imagesetlist:
            #print i
            try:
                defmode=i['default']
                figstocapture[i['setenum']]=[]
                for figname in i['sets'][defmode]['figs']:#get default images of default set
                    if 'image' in figname and figname not in figstocapture[i['setenum']]:
                        figstocapture[i['setenum']].append(figname)
                if 'options' in i:#and default options
                    for opt in i['options']:
                        if opt['default']:#checkbox default is true
                            for figname in i['included']:
                                if 'image' in figname and figname not in figstocapture[i['setenum']]:
                                    figstocapture[i['setenum']].append(figname)
            except:
                pass
    else:
        figstocapture=None
    session['figstocapture']=figstocapture
    session['lib_filetype']=None
    picnicsession.storesession(session)