Example #1
0
def parseImageParametersBackground(request,session):
    picnicsession.updateSessionComment(session,'setup')
    if 'custom_display' in request.params and request.params.getone('custom_display')!='default':
        import hsrl.utils.json_config as jc
        disp=jc.json_config(picnicsession.loadjson(session,'display_parameters.json'))#session['display_defaults'])
    else:
        import hsrl.data_stream.display_utilities as du
        (disp,conf)=du.get_display_defaults(session['display_defaults_file'])

    #else:#fixme should this be enable_all()?
    #    (disp,conf)=du.get_display_defaults('web_plots.json')
    allfigs=session['figstocapture']==None
    if not allfigs:
        for k,v in session['figstocapture'].items():
            if None in v:
                allfigs=True
                break
    if not allfigs: # None indicates all should be captured, so if its not, scan structure
        data_req='images'
        lib_filetype='data'
        for fi in disp.get_attrs(): # for each figure
            if 'enable' in disp.get_labels(fi): # if it can be enabled/disabled
                disp.set_value(fi,'enable',0)
        for inst,figset in session['figstocapture'].items():
            for fi in disp.get_attrs(): # for each figure
                if 'enable' in disp.get_labels(fi): # if it can be enabled/disabled        
                    if fi in figset or ('#'+fi) in figset: #if requested, enable it
                        disp.set_value(fi,'enable',1)
                        if not fi.endswith('_image') and inst=='hsrl':
                            data_req='images housekeeping'
                            lib_filetype=None
                    elif ('-'+fi) in figset or ('-#'+fi) in figset:#if explicitly disabled, disable it
                        disp.set_value(fi,'enable',0)
    else:
        if session['figstocapture']!=None:
          for inst,figset in session['figstocapture'].items():
            for fi in disp.get_attrs(): # for each figure
                if 'enable' in disp.get_labels(fi): # if it can be enabled/disabled        
                    if ('-'+fi) in figset or ('-#'+fi) in figset:#if explicitly disabled, disable it
                        disp.set_value(fi,'enable',0)

        data_req= 'images housekeeping'
        lib_filetype=None

    picnicsession.storejson(session,disp.json_representation(),'display_parameters.json')
    if 'data_request' not in session:
        session['data_request']=data_req
    if 'lib_filetype' not in session:
        session['lib_filetype']=lib_filetype
    picnicsession.storesession(session)
Example #2
0
def makeImagesFromDPL(session, DPLgen):
    picnicsession.updateSessionComment(session, 'loading graphics artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists
    import hsrl.utils.json_config as jc
    #import hsrl.calibration.cal_read_utilities as cru
    #import hsrl.graphics.graphics_toolkit as gt
    instrument = session['dataset']
    #sessionid=session['sessionid']
    disp = jc.json_config(
        picnicsession.loadjson(
            session, 'display_parameters.json'))  #session['display_defaults'])
    params = jc.json_config(
        picnicsession.loadjson(session, 'process_parameters.json'),
        'process_defaults')
    #print session

    #folder=picnicsession.sessionfolder(sessionid)#safejoin('.','sessions',sessionid);
    artistlist = {}
    if True:
        picnicsession.updateSessionComment(session, 'creating artist')
        artist = artists.dpl_images_artist(framestream=DPLgen,
                                           instrument=session['dataset'],
                                           max_alt=session['altmax'],
                                           processing_defaults=params,
                                           display_defaults=disp)
        artistlist['hsrl'] = artist
        if 'merge' in session['datastreams']:
            artist = artists.dpl_radar_images_artist(framestream=artist,
                                                     display_defaults=disp)
            artistlist['merge'] = artist
        picnicsession.updateSessionComment(session, 'processing')
        artist()
        picnicsession.updateSessionComment(session, 'rendering figures')
        fignum = 0

        capturingfigsgroups = session['figstocapture'].copy()
        if capturingfigsgroups == None:
            capturingfigsgroups = {}
            for k in artistlist.keys():
                capturingfigsgroups[k] = [None]
        #print capturingfigs
        for inst, capturingfigs in capturingfigsgroups.items():
            if not inst in artistlist:
                continue
            alreadycaptured = []
            figs = artistlist[inst].figs
            for x in capturingfigs:  #plt._pylab_helpers.Gcf.get_all_fig_managers():
                if x in alreadycaptured or (x != None and
                                            (x.startswith('#')
                                             or x.startswith('-'))):
                    continue
                alreadycaptured.append(x)
                if x == None:
                    tmp = [f for f in figs]
                    tmp.sort()
                    capturingfigs.extend(tmp)
                    continue
                figname = picnicsession.sessionfile(
                    session, 'figure%04i_%s_%s.png' % (fignum, inst, x))
                fignum = fignum + 1
                #      print 'updating  %d' % x.num
                picnicsession.updateSessionComment(
                    session, 'capturing ' + inst + ' figure ' + x)
                if x not in figs:
                    f = file(figname, 'w')
                    f.close()
                    continue

                fig = figs.figure(x)  #plt.figure(x.num)

                # QApplication.processEvents()

                fig.canvas.draw()
                #fig.canvas.
                fig.savefig(figname, format='png', bbox_inches='tight')
    picnicsession.updateSessionComment(session, 'done')
Example #3
0
def makeDPLFromSession(session, doSearch=True):
    copyToInit = {
        'dataset': 'instrument',
        'maxtimeslice': 'maxtimeslice_timedelta',
        'data_request': 'data_request',
        'lib_filetype': 'filetype',
    }
    copyToSearch = {
        'starttime': 'start_time_datetime',
        'endtime': 'end_time_datetime',
        'altmin': 'min_alt_m',
        'altmax': 'max_alt_m',
        'timeres': 'timeres_timedelta',
        'altres': 'altres_m',
    }
    hasProgress = False
    from hsrl.dpl_experimental.dpl_hsrl import dpl_hsrl
    process_control = None
    if os.access(picnicsession.sessionfile(session, 'process_parameters.json'),
                 os.R_OK):
        process_control = picnicsession.loadjson(session,
                                                 'process_parameters.json')
        import hsrl.utils.json_config as jc
        process_control = jc.json_config(process_control,
                                         default_key='process_defaults')
    dplobj = dpl_hsrl(process_control=process_control,
                      **fromSession(session, copyToInit))
    if not doSearch:
        return dplobj, fromSession(session, copyToSearch)
    searchparms = fromSession(session, copyToSearch)
    #try:
    #    import hsrl.utils.threaded_generator
    #    dplc=hsrl.utils.threaded_generator.threaded_generator(dplobj,**searchparms)
    #except:
    hsrlnar = dplobj(**searchparms)
    dplc = hsrlnar
    if 'merge' in session['datastreams']:  #add merge to rs_mmcr, refit
        import hsrl.dpl_tools.time_frame as time_slicing
        import hsrl.dpl_tools.resample_altitude as altitude_resampling
        import hsrl.dpl_tools.substruct as frame_substruct
        from hsrl.dpl_netcdf.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper
        import hsrl.dpl_netcdf.MMCRMergeLibrarian as mmcr
        import hsrl.utils.hsrl_array_utils as hau

        if session['dataset'] == 'ahsrl':
            mmcrzoo = GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge')
            mmcrlib = mmcr.MMCRMergeLibrarian(
                session['dataset'],
                ['eurmmcrmerge.C1.c1.', 'nsaarscl1clothC1.c1.'],
                zoo=mmcrzoo)
        elif session['dataset'] == 'mf2hsrl':
            pass  #set up zoo and lib for mf2
        mmcrnar = mmcr.MMCRMergeCorrector(
            mmcrlib(start=searchparms['start_time_datetime'],
                    end=searchparms['start_time_datetime']))
        mmcrnar = mmcr.MMCRMergeBackscatterToReflectivity(
            altitude_resampling.ResampleXd(
                time_slicing.TimeGinsu(mmcrnar, 'times'), 'heights',
                dplc.altitudeAxis))

        hsrlnarsplitter = frame_substruct.SubstructBrancher(hsrlnar)
        hsrlinvnar = time_slicing.TimeGinsu(
            hsrlnarsplitter.narrateSubstruct('rs_inv'), 'times')  #,isEnd=True)

        from dplkit.simple.blender import TimeInterpolatedMerge

        merge = TimeInterpolatedMerge(hsrlinvnar, [mmcrnar],
                                      allow_nans=True,
                                      channels=[
                                          'heights', 'Reflectivity',
                                          'MeanDopplerVelocity', 'Backscatter',
                                          'SpectralWidth'
                                      ])
        merge = frame_substruct.Retyper(merge, hau.Time_Z_Group, {
            'timevarname': 'times',
            'altname': 'heights'
        })

        dplc = frame_substruct.SubstructMerger(
            'rs_inv', {
                'rs_mean':
                hsrlnarsplitter.narrateSubstruct('rs_mean'),
                'rs_raw':
                hsrlnarsplitter.narrateSubstruct('rs_raw'),
                'rs_inv':
                hsrlnarsplitter.narrateSubstruct('rs_inv'),
                'rs_mmcr':
                merge,
                'rs_init':
                hsrlnarsplitter.narrateSubstruct('rs_init'),
                'rs_static':
                hsrlnarsplitter.narrateSubstruct('rs_static'),
                'profiles':
                hsrlnarsplitter.narrateSubstruct('profiles', sparse=True),
                'rs_Cxx':
                hsrlnarsplitter.narrateSubstruct('rs_Cxx', sparse=True)
            }, hau.Time_Z_Group)
    #merge=picnicsession.PicnicProgressNarrator(dplc,getLastOf('start'), searchparms['start_time_datetime'],searchparms['end_time_datetime'],session)
    #hasProgress=True

    if not os.access(
            picnicsession.sessionfile(session, 'process_parameters.json'),
            os.R_OK):
        picnicsession.storejson(
            session,
            hsrlnar.get_process_control().json_representation(),
            'process_parameters.json')
    picnicsession.updateSessionComment(session, 'processing with DPL')
    if hasProgress:
        return dplc
    return picnicsession.PicnicProgressNarrator(
        dplc, getLastOf('times', ['rs_inv', 'rs_mean', 'rs_raw']),
        searchparms['start_time_datetime'], searchparms['end_time_datetime'],
        session)
Example #4
0
def parseImageParametersBackground(request, session):
    picnicsession.updateSessionComment(session, 'setup')
    if 'custom_display' in request.params and request.params.getone(
            'custom_display') != 'default':
        import hsrl.utils.json_config as jc
        disp = jc.json_config(
            picnicsession.loadjson(
                session,
                'display_parameters.json'))  #session['display_defaults'])
    else:
        import hsrl.data_stream.display_utilities as du
        (disp,
         conf) = du.get_display_defaults(session['display_defaults_file'])

    #else:#fixme should this be enable_all()?
    #    (disp,conf)=du.get_display_defaults('web_plots.json')
    allfigs = session['figstocapture'] == None
    if not allfigs:
        for k, v in session['figstocapture'].items():
            if None in v:
                allfigs = True
                break
    if not allfigs:  # None indicates all should be captured, so if its not, scan structure
        data_req = 'images'
        lib_filetype = 'data'
        for fi in disp.get_attrs():  # for each figure
            if 'enable' in disp.get_labels(
                    fi):  # if it can be enabled/disabled
                disp.set_value(fi, 'enable', 0)
        for inst, figset in session['figstocapture'].items():
            for fi in disp.get_attrs():  # for each figure
                if 'enable' in disp.get_labels(
                        fi):  # if it can be enabled/disabled
                    if fi in figset or (
                            '#' + fi) in figset:  #if requested, enable it
                        disp.set_value(fi, 'enable', 1)
                        if not fi.endswith('_image') and inst == 'hsrl':
                            data_req = 'images housekeeping'
                            lib_filetype = None
                    elif ('-' + fi) in figset or (
                            '-#' +
                            fi) in figset:  #if explicitly disabled, disable it
                        disp.set_value(fi, 'enable', 0)
    else:
        if session['figstocapture'] != None:
            for inst, figset in session['figstocapture'].items():
                for fi in disp.get_attrs():  # for each figure
                    if 'enable' in disp.get_labels(
                            fi):  # if it can be enabled/disabled
                        if ('-' + fi) in figset or (
                                '-#' + fi
                        ) in figset:  #if explicitly disabled, disable it
                            disp.set_value(fi, 'enable', 0)

        data_req = 'images housekeeping'
        lib_filetype = None

    picnicsession.storejson(session, disp.json_representation(),
                            'display_parameters.json')
    if 'data_request' not in session:
        session['data_request'] = data_req
    if 'lib_filetype' not in session:
        session['lib_filetype'] = lib_filetype
    picnicsession.storesession(session)
Example #5
0
def makeImagesFromDPL(session,DPLgen):
    picnicsession.updateSessionComment(session,'loading graphics artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists
    import hsrl.utils.json_config as jc
    #import hsrl.calibration.cal_read_utilities as cru
    #import hsrl.graphics.graphics_toolkit as gt
    instrument=session['dataset']
    #sessionid=session['sessionid']
    disp=jc.json_config(picnicsession.loadjson(session,'display_parameters.json'))#session['display_defaults'])
    params=jc.json_config(picnicsession.loadjson(session,'process_parameters.json'),'process_defaults')
    #print session

    #folder=picnicsession.sessionfolder(sessionid)#safejoin('.','sessions',sessionid);
    artistlist={}
    if True:
        picnicsession.updateSessionComment(session,'creating artist')    
        artist=artists.dpl_images_artist(framestream=DPLgen,instrument=session['dataset'],
            max_alt=session['altmax'],
            processing_defaults=params,
            display_defaults=disp)
        artistlist['hsrl']=artist
        if 'merge' in session['datastreams']:
            artist=artists.dpl_radar_images_artist(framestream=artist,display_defaults=disp)
            artistlist['merge']=artist
        picnicsession.updateSessionComment(session,'processing')
        artist()
        picnicsession.updateSessionComment(session,'rendering figures')
        fignum=0

        capturingfigsgroups=session['figstocapture'].copy()
        if capturingfigsgroups==None:
            capturingfigsgroups={}
            for k in artistlist.keys():
                capturingfigsgroups[k]=[None]
        #print capturingfigs
        for inst,capturingfigs in capturingfigsgroups.items():
          if not inst in artistlist:
            continue
          alreadycaptured=[]
          figs=artistlist[inst].figs
          for x in capturingfigs:#plt._pylab_helpers.Gcf.get_all_fig_managers():
            if x in alreadycaptured or (x!=None and (x.startswith('#') or x.startswith('-'))):
                continue
            alreadycaptured.append(x)
            if x == None:
                tmp=[ f for f in figs ];
                tmp.sort()
                capturingfigs.extend(tmp)
                continue
            figname=picnicsession.sessionfile(session,'figure%04i_%s_%s.png' % (fignum,inst,x))
            fignum = fignum + 1
        #      print 'updating  %d' % x.num
            picnicsession.updateSessionComment(session,'capturing '+inst+' figure ' + x)
            if x not in figs:
                f=file(figname,'w')
                f.close()
                continue
        
            fig = figs.figure(x)#plt.figure(x.num)
        
      # QApplication.processEvents()
            
            fig.canvas.draw()
            #fig.canvas.
            fig.savefig(figname,format='png',bbox_inches='tight')
    picnicsession.updateSessionComment(session,'done')
Example #6
0
def makeDPLFromSession(session,doSearch=True):
    copyToInit={
        'dataset':'instrument',
        'maxtimeslice':'maxtimeslice_timedelta',
        'data_request':'data_request',
        'lib_filetype':'filetype',
    }
    copyToSearch={
        'starttime':'start_time_datetime',
        'endtime':'end_time_datetime',
        'altmin':'min_alt_m',
        'altmax':'max_alt_m',
        'timeres':'timeres_timedelta',
        'altres':'altres_m',
    }
    hasProgress=False
    from hsrl.dpl_experimental.dpl_hsrl import dpl_hsrl
    process_control=None
    if os.access(picnicsession.sessionfile(session,'process_parameters.json'),os.R_OK):
        process_control=picnicsession.loadjson(session,'process_parameters.json')
        import hsrl.utils.json_config as jc
        process_control=jc.json_config(process_control,default_key='process_defaults')
    dplobj=dpl_hsrl(process_control=process_control,**fromSession(session,copyToInit))
    if not doSearch:
        return dplobj,fromSession(session,copyToSearch)
    searchparms=fromSession(session,copyToSearch)
    #try:
    #    import hsrl.utils.threaded_generator
    #    dplc=hsrl.utils.threaded_generator.threaded_generator(dplobj,**searchparms)
    #except:
    hsrlnar=dplobj(**searchparms)
    dplc=hsrlnar
    if 'merge' in session['datastreams']:#add merge to rs_mmcr, refit
        import hsrl.dpl_tools.time_frame as time_slicing
        import hsrl.dpl_tools.resample_altitude as altitude_resampling
        import hsrl.dpl_tools.substruct as frame_substruct
        from hsrl.dpl_netcdf.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper 
        import hsrl.dpl_netcdf.MMCRMergeLibrarian as mmcr
        import hsrl.utils.hsrl_array_utils as hau

        if session['dataset']=='ahsrl':
            mmcrzoo=GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge')
            mmcrlib=mmcr.MMCRMergeLibrarian(session['dataset'],['eurmmcrmerge.C1.c1.','nsaarscl1clothC1.c1.'],zoo=mmcrzoo)
        elif session['dataset']=='mf2hsrl':
            pass #set up zoo and lib for mf2
        mmcrnar=mmcr.MMCRMergeCorrector(mmcrlib(start=searchparms['start_time_datetime'],end=searchparms['start_time_datetime']))
        mmcrnar=mmcr.MMCRMergeBackscatterToReflectivity(altitude_resampling.ResampleXd(time_slicing.TimeGinsu(mmcrnar,'times'),'heights',dplc.altitudeAxis))

        hsrlnarsplitter=frame_substruct.SubstructBrancher(hsrlnar)
        hsrlinvnar=time_slicing.TimeGinsu(hsrlnarsplitter.narrateSubstruct('rs_inv'),'times')#,isEnd=True)

        from dplkit.simple.blender import TimeInterpolatedMerge

        merge=TimeInterpolatedMerge(hsrlinvnar,[mmcrnar],allow_nans=True,channels=['heights','Reflectivity','MeanDopplerVelocity','Backscatter','SpectralWidth'])
        merge=frame_substruct.Retyper(merge,hau.Time_Z_Group,{'timevarname':'times','altname':'heights'})
 
        dplc=frame_substruct.SubstructMerger('rs_inv',{
            'rs_mean':hsrlnarsplitter.narrateSubstruct('rs_mean'),
            'rs_raw':hsrlnarsplitter.narrateSubstruct('rs_raw'),
            'rs_inv':hsrlnarsplitter.narrateSubstruct('rs_inv'),
            'rs_mmcr':merge,
            'rs_init':hsrlnarsplitter.narrateSubstruct('rs_init'),
            'rs_static':hsrlnarsplitter.narrateSubstruct('rs_static'),
            'profiles':hsrlnarsplitter.narrateSubstruct('profiles',sparse=True),
            'rs_Cxx':hsrlnarsplitter.narrateSubstruct('rs_Cxx',sparse=True)
            }
        ,hau.Time_Z_Group)
       #merge=picnicsession.PicnicProgressNarrator(dplc,getLastOf('start'), searchparms['start_time_datetime'],searchparms['end_time_datetime'],session)
        #hasProgress=True

    if not os.access(picnicsession.sessionfile(session,'process_parameters.json'),os.R_OK):
        picnicsession.storejson(session,hsrlnar.get_process_control().json_representation(),'process_parameters.json')
    picnicsession.updateSessionComment(session,'processing with DPL')
    if hasProgress:
        return dplc
    return picnicsession.PicnicProgressNarrator(dplc,getLastOf('times',['rs_inv','rs_mean','rs_raw']),
        searchparms['start_time_datetime'],searchparms['end_time_datetime'],session)