예제 #1
0
파일: dispatch.py 프로젝트: CodaP/DplTools
def createnetcdf(request,session,isBackground):
    if isBackground in [True, None]:
        makeNetCDFFromDPL(session,makeDPLFromSession(session),session['template'],session['filename'])
        tmp=[]
        if session['figstocapture']!=None:
            for k,v in session['figstocapture'].items():
                tmp.extend(v)
        if len(tmp)>0:
            picnicsession.updateSessionComment(session,'done. capturing images')
            readnetcdf(request,session,isBackground)
        else:
            picnicsession.updateSessionComment(session,'done.')
예제 #2
0
def createnetcdf(request, session, isBackground):
    if isBackground in [True, None]:
        makeNetCDFFromDPL(session, makeDPLFromSession(session),
                          session['template'], session['filename'])
        tmp = []
        if session['figstocapture'] != None:
            for k, v in session['figstocapture'].items():
                tmp.extend(v)
        if len(tmp) > 0:
            picnicsession.updateSessionComment(session,
                                               'done. capturing images')
            readnetcdf(request, session, isBackground)
        else:
            picnicsession.updateSessionComment(session, 'done.')
예제 #3
0
파일: dispatch.py 프로젝트: CodaP/DplTools
def makeNetCDFFromDPL(session,DPLgen,templatefilename,netcdffilename):
    picnicsession.updateSessionComment(session,'loading artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists

    #folder=picnicsession.sessionfolder(sessionid);
    picnicsession.updateSessionComment(session,'opening blank netcdf file')
    
    ncfilename=picnicsession.sessionfile(session,netcdffilename,create=True)

    artist=artists.dpl_netcdf_artist(DPLgen,templatefilename,ncfilename,selected_bindings=session['selected_fields'])
  
    picnicsession.updateSessionComment(session,'processing')
 
    findTimes=['rs_raw','rs_mean','rs_inv']
    
    dumped=False

    for frame in artist:
        timewindow='blank'
        if not dumped and frame!=None:
            doFrameDumpToFile(artist.provides,frame,picnicsession.sessionfile(session,'frame.json'))
            dumped=True
        for f in findTimes:
            if hasattr(frame,f) and hasattr(getattr(frame,f),'times') and len(getattr(frame,f).times)>0:
                t=getattr(frame,f).times
                timewindow=t[0].strftime('%Y.%m.%d %H:%M') + ' - ' + t[-1].strftime('%Y.%m.%d %H:%M')

        picnicsession.updateSessionComment(session,'appended data %s' % (timewindow))
  
    doNcDumpToFile(ncfilename,picnicsession.sessionfile(session,'output.cdl'))

    del artist
예제 #4
0
파일: dispatch.py 프로젝트: CodaP/DplTools
def parseImageParametersBackground(request,session):
    picnicsession.updateSessionComment(session,'setup')
    if 'custom_display' in request.params and request.params.getone('custom_display')!='default':
        import hsrl.utils.json_config as jc
        disp=jc.json_config(picnicsession.loadjson(session,'display_parameters.json'))#session['display_defaults'])
    else:
        import hsrl.data_stream.display_utilities as du
        (disp,conf)=du.get_display_defaults(session['display_defaults_file'])

    #else:#fixme should this be enable_all()?
    #    (disp,conf)=du.get_display_defaults('web_plots.json')
    allfigs=session['figstocapture']==None
    if not allfigs:
        for k,v in session['figstocapture'].items():
            if None in v:
                allfigs=True
                break
    if not allfigs: # None indicates all should be captured, so if its not, scan structure
        data_req='images'
        lib_filetype='data'
        for fi in disp.get_attrs(): # for each figure
            if 'enable' in disp.get_labels(fi): # if it can be enabled/disabled
                disp.set_value(fi,'enable',0)
        for inst,figset in session['figstocapture'].items():
            for fi in disp.get_attrs(): # for each figure
                if 'enable' in disp.get_labels(fi): # if it can be enabled/disabled        
                    if fi in figset or ('#'+fi) in figset: #if requested, enable it
                        disp.set_value(fi,'enable',1)
                        if not fi.endswith('_image') and inst=='hsrl':
                            data_req='images housekeeping'
                            lib_filetype=None
                    elif ('-'+fi) in figset or ('-#'+fi) in figset:#if explicitly disabled, disable it
                        disp.set_value(fi,'enable',0)
    else:
        if session['figstocapture']!=None:
          for inst,figset in session['figstocapture'].items():
            for fi in disp.get_attrs(): # for each figure
                if 'enable' in disp.get_labels(fi): # if it can be enabled/disabled        
                    if ('-'+fi) in figset or ('-#'+fi) in figset:#if explicitly disabled, disable it
                        disp.set_value(fi,'enable',0)

        data_req= 'images housekeeping'
        lib_filetype=None

    picnicsession.storejson(session,disp.json_representation(),'display_parameters.json')
    if 'data_request' not in session:
        session['data_request']=data_req
    if 'lib_filetype' not in session:
        session['lib_filetype']=lib_filetype
    picnicsession.storesession(session)
예제 #5
0
def makeNetCDFFromDPL(session, DPLgen, templatefilename, netcdffilename):
    picnicsession.updateSessionComment(session, 'loading artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists

    #folder=picnicsession.sessionfolder(sessionid);
    picnicsession.updateSessionComment(session, 'opening blank netcdf file')

    ncfilename = picnicsession.sessionfile(session,
                                           netcdffilename,
                                           create=True)

    artist = artists.dpl_netcdf_artist(
        DPLgen,
        templatefilename,
        ncfilename,
        selected_bindings=session['selected_fields'])

    picnicsession.updateSessionComment(session, 'processing')

    findTimes = ['rs_raw', 'rs_mean', 'rs_inv']

    dumped = False

    for frame in artist:
        timewindow = 'blank'
        if not dumped and frame != None:
            doFrameDumpToFile(artist.provides, frame,
                              picnicsession.sessionfile(session, 'frame.json'))
            dumped = True
        for f in findTimes:
            if hasattr(frame, f) and hasattr(getattr(
                    frame, f), 'times') and len(getattr(frame, f).times) > 0:
                t = getattr(frame, f).times
                timewindow = t[0].strftime('%Y.%m.%d %H:%M') + ' - ' + t[
                    -1].strftime('%Y.%m.%d %H:%M')

        picnicsession.updateSessionComment(session,
                                           'appended data %s' % (timewindow))

    doNcDumpToFile(ncfilename,
                   picnicsession.sessionfile(session, 'output.cdl'))

    del artist
예제 #6
0
파일: dispatch.py 프로젝트: CodaP/DplTools
def makeMultiNetCDFFromDPL(session,DPL,DPLParms,templatefilename):
    picnicsession.updateSessionComment(session,'loading artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists
    ftpbase=os.getenv('FTPPATH','/var/ftp/data')
    ftpurlbase=os.getenv('FTPURL','ftp://lidar.ssec.wisc.edu/data')
    if len(session['username'])==0:
        print 'bad username'
        raise RuntimeError,'Bad username'
    baseftpdir=picnicsession.safejoin(ftpbase,session['username'])
    sessiondir=picnicsession.safejoin(baseftpdir,session['sessionid'])
    try:
        os.mkdir(baseftpdir)
    except:
        pass
    try:
        os.mkdir(sessiondir)
    except:
        pass
    tarname=session['fileprefix'] + DPLParms['start_time_datetime'].strftime('_%Y%m%dT%H%M') + DPLParms['end_time_datetime'].strftime('_%Y%m%dT%H%M') + session['filesuffix'] + '_' + session['sessionid'] + '.tar.bz2'
    tarcompoutputfilename=picnicsession.safejoin(baseftpdir,tarname)
    session['ftpfolder']=ftpurlbase+'/'+session['username']+'/'+session['sessionid']
    session['ftpfile']=ftpurlbase+'/'+session['username']+'/'+tarname
    namer=artists.default_multi_netcdf_namer(sessiondir,session['fileprefix'],session['filesuffix']+'.nc')
    times=artists.multi_netcdf_filewindow('start_time_datetime','end_time_datetime',
        DPLParms['start_time_datetime'],DPLParms['end_time_datetime'],session['filemode'])

    artist=artists.dpl_multi_netcdf_artist(DPL,DPLParms,template=templatefilename,filewindowgenerator=times,filename_maker=namer,selected_bindings=session['selected_fields'])
  
    picnicsession.updateSessionComment(session,'processing')
 
    findTimes=['rs_raw','rs_mean','rs_inv']
    for frame in artist:
        timewindow='blank'
        for f in findTimes:
            if hasattr(frame,f) and hasattr(getattr(frame,f),'times') and len(getattr(frame,f).times)>0:
                t=getattr(frame,f).times
                timewindow=t[0].strftime('%Y.%m.%d %H:%M') + ' - ' + t[-1].strftime('%Y.%m.%d %H:%M')

        picnicsession.updateSessionComment(session,'appended data %s' % (timewindow))
  
    del artist

    pid=os.fork()
    if pid==0:
        os.execvp('tar',('tar','-jcvf',tarcompoutputfilename,'--directory='+baseftpdir,session['sessionid']))
    if pid<0:
        raise RuntimeError,"compression failed due to fork"
    (pid,status)=os.waitpid(pid,0)
    if os.WEXITSTATUS(status)!=0:
        raise RuntimeError,"Compression failed on error %i" % os.WEXITSTATUS(status)
예제 #7
0
def makeImagesFromDPL(session, DPLgen):
    picnicsession.updateSessionComment(session, 'loading graphics artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists
    import hsrl.utils.json_config as jc
    #import hsrl.calibration.cal_read_utilities as cru
    #import hsrl.graphics.graphics_toolkit as gt
    instrument = session['dataset']
    #sessionid=session['sessionid']
    disp = jc.json_config(
        picnicsession.loadjson(
            session, 'display_parameters.json'))  #session['display_defaults'])
    params = jc.json_config(
        picnicsession.loadjson(session, 'process_parameters.json'),
        'process_defaults')
    #print session

    #folder=picnicsession.sessionfolder(sessionid)#safejoin('.','sessions',sessionid);
    artistlist = {}
    if True:
        picnicsession.updateSessionComment(session, 'creating artist')
        artist = artists.dpl_images_artist(framestream=DPLgen,
                                           instrument=session['dataset'],
                                           max_alt=session['altmax'],
                                           processing_defaults=params,
                                           display_defaults=disp)
        artistlist['hsrl'] = artist
        if 'merge' in session['datastreams']:
            artist = artists.dpl_radar_images_artist(framestream=artist,
                                                     display_defaults=disp)
            artistlist['merge'] = artist
        picnicsession.updateSessionComment(session, 'processing')
        artist()
        picnicsession.updateSessionComment(session, 'rendering figures')
        fignum = 0

        capturingfigsgroups = session['figstocapture'].copy()
        if capturingfigsgroups == None:
            capturingfigsgroups = {}
            for k in artistlist.keys():
                capturingfigsgroups[k] = [None]
        #print capturingfigs
        for inst, capturingfigs in capturingfigsgroups.items():
            if not inst in artistlist:
                continue
            alreadycaptured = []
            figs = artistlist[inst].figs
            for x in capturingfigs:  #plt._pylab_helpers.Gcf.get_all_fig_managers():
                if x in alreadycaptured or (x != None and
                                            (x.startswith('#')
                                             or x.startswith('-'))):
                    continue
                alreadycaptured.append(x)
                if x == None:
                    tmp = [f for f in figs]
                    tmp.sort()
                    capturingfigs.extend(tmp)
                    continue
                figname = picnicsession.sessionfile(
                    session, 'figure%04i_%s_%s.png' % (fignum, inst, x))
                fignum = fignum + 1
                #      print 'updating  %d' % x.num
                picnicsession.updateSessionComment(
                    session, 'capturing ' + inst + ' figure ' + x)
                if x not in figs:
                    f = file(figname, 'w')
                    f.close()
                    continue

                fig = figs.figure(x)  #plt.figure(x.num)

                # QApplication.processEvents()

                fig.canvas.draw()
                #fig.canvas.
                fig.savefig(figname, format='png', bbox_inches='tight')
    picnicsession.updateSessionComment(session, 'done')
예제 #8
0
def createmultinetcdf(request, session, isBackground):
    if isBackground in [True, None]:
        dpl, dplp = makeDPLFromSession(session, doSearch=False)
        makeMultiNetCDFFromDPL(session, dpl, dplp, session['template'])
        picnicsession.updateSessionComment(session, 'done.')
예제 #9
0
def makeMultiNetCDFFromDPL(session, DPL, DPLParms, templatefilename):
    picnicsession.updateSessionComment(session, 'loading artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists
    ftpbase = os.getenv('FTPPATH', '/var/ftp/data')
    ftpurlbase = os.getenv('FTPURL', 'ftp://lidar.ssec.wisc.edu/data')
    if len(session['username']) == 0:
        print 'bad username'
        raise RuntimeError, 'Bad username'
    baseftpdir = picnicsession.safejoin(ftpbase, session['username'])
    sessiondir = picnicsession.safejoin(baseftpdir, session['sessionid'])
    try:
        os.mkdir(baseftpdir)
    except:
        pass
    try:
        os.mkdir(sessiondir)
    except:
        pass
    tarname = session['fileprefix'] + DPLParms['start_time_datetime'].strftime(
        '_%Y%m%dT%H%M') + DPLParms['end_time_datetime'].strftime(
            '_%Y%m%dT%H%M'
        ) + session['filesuffix'] + '_' + session['sessionid'] + '.tar.bz2'
    tarcompoutputfilename = picnicsession.safejoin(baseftpdir, tarname)
    session['ftpfolder'] = ftpurlbase + '/' + session[
        'username'] + '/' + session['sessionid']
    session['ftpfile'] = ftpurlbase + '/' + session['username'] + '/' + tarname
    namer = artists.default_multi_netcdf_namer(sessiondir,
                                               session['fileprefix'],
                                               session['filesuffix'] + '.nc')
    times = artists.multi_netcdf_filewindow('start_time_datetime',
                                            'end_time_datetime',
                                            DPLParms['start_time_datetime'],
                                            DPLParms['end_time_datetime'],
                                            session['filemode'])

    artist = artists.dpl_multi_netcdf_artist(
        DPL,
        DPLParms,
        template=templatefilename,
        filewindowgenerator=times,
        filename_maker=namer,
        selected_bindings=session['selected_fields'])

    picnicsession.updateSessionComment(session, 'processing')

    findTimes = ['rs_raw', 'rs_mean', 'rs_inv']
    for frame in artist:
        timewindow = 'blank'
        for f in findTimes:
            if hasattr(frame, f) and hasattr(getattr(
                    frame, f), 'times') and len(getattr(frame, f).times) > 0:
                t = getattr(frame, f).times
                timewindow = t[0].strftime('%Y.%m.%d %H:%M') + ' - ' + t[
                    -1].strftime('%Y.%m.%d %H:%M')

        picnicsession.updateSessionComment(session,
                                           'appended data %s' % (timewindow))

    del artist

    pid = os.fork()
    if pid == 0:
        os.execvp('tar', ('tar', '-jcvf', tarcompoutputfilename,
                          '--directory=' + baseftpdir, session['sessionid']))
    if pid < 0:
        raise RuntimeError, "compression failed due to fork"
    (pid, status) = os.waitpid(pid, 0)
    if os.WEXITSTATUS(status) != 0:
        raise RuntimeError, "Compression failed on error %i" % os.WEXITSTATUS(
            status)
예제 #10
0
def makeDPLFromSession(session, doSearch=True):
    copyToInit = {
        'dataset': 'instrument',
        'maxtimeslice': 'maxtimeslice_timedelta',
        'data_request': 'data_request',
        'lib_filetype': 'filetype',
    }
    copyToSearch = {
        'starttime': 'start_time_datetime',
        'endtime': 'end_time_datetime',
        'altmin': 'min_alt_m',
        'altmax': 'max_alt_m',
        'timeres': 'timeres_timedelta',
        'altres': 'altres_m',
    }
    hasProgress = False
    from hsrl.dpl_experimental.dpl_hsrl import dpl_hsrl
    process_control = None
    if os.access(picnicsession.sessionfile(session, 'process_parameters.json'),
                 os.R_OK):
        process_control = picnicsession.loadjson(session,
                                                 'process_parameters.json')
        import hsrl.utils.json_config as jc
        process_control = jc.json_config(process_control,
                                         default_key='process_defaults')
    dplobj = dpl_hsrl(process_control=process_control,
                      **fromSession(session, copyToInit))
    if not doSearch:
        return dplobj, fromSession(session, copyToSearch)
    searchparms = fromSession(session, copyToSearch)
    #try:
    #    import hsrl.utils.threaded_generator
    #    dplc=hsrl.utils.threaded_generator.threaded_generator(dplobj,**searchparms)
    #except:
    hsrlnar = dplobj(**searchparms)
    dplc = hsrlnar
    if 'merge' in session['datastreams']:  #add merge to rs_mmcr, refit
        import hsrl.dpl_tools.time_frame as time_slicing
        import hsrl.dpl_tools.resample_altitude as altitude_resampling
        import hsrl.dpl_tools.substruct as frame_substruct
        from hsrl.dpl_netcdf.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper
        import hsrl.dpl_netcdf.MMCRMergeLibrarian as mmcr
        import hsrl.utils.hsrl_array_utils as hau

        if session['dataset'] == 'ahsrl':
            mmcrzoo = GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge')
            mmcrlib = mmcr.MMCRMergeLibrarian(
                session['dataset'],
                ['eurmmcrmerge.C1.c1.', 'nsaarscl1clothC1.c1.'],
                zoo=mmcrzoo)
        elif session['dataset'] == 'mf2hsrl':
            pass  #set up zoo and lib for mf2
        mmcrnar = mmcr.MMCRMergeCorrector(
            mmcrlib(start=searchparms['start_time_datetime'],
                    end=searchparms['start_time_datetime']))
        mmcrnar = mmcr.MMCRMergeBackscatterToReflectivity(
            altitude_resampling.ResampleXd(
                time_slicing.TimeGinsu(mmcrnar, 'times'), 'heights',
                dplc.altitudeAxis))

        hsrlnarsplitter = frame_substruct.SubstructBrancher(hsrlnar)
        hsrlinvnar = time_slicing.TimeGinsu(
            hsrlnarsplitter.narrateSubstruct('rs_inv'), 'times')  #,isEnd=True)

        from dplkit.simple.blender import TimeInterpolatedMerge

        merge = TimeInterpolatedMerge(hsrlinvnar, [mmcrnar],
                                      allow_nans=True,
                                      channels=[
                                          'heights', 'Reflectivity',
                                          'MeanDopplerVelocity', 'Backscatter',
                                          'SpectralWidth'
                                      ])
        merge = frame_substruct.Retyper(merge, hau.Time_Z_Group, {
            'timevarname': 'times',
            'altname': 'heights'
        })

        dplc = frame_substruct.SubstructMerger(
            'rs_inv', {
                'rs_mean':
                hsrlnarsplitter.narrateSubstruct('rs_mean'),
                'rs_raw':
                hsrlnarsplitter.narrateSubstruct('rs_raw'),
                'rs_inv':
                hsrlnarsplitter.narrateSubstruct('rs_inv'),
                'rs_mmcr':
                merge,
                'rs_init':
                hsrlnarsplitter.narrateSubstruct('rs_init'),
                'rs_static':
                hsrlnarsplitter.narrateSubstruct('rs_static'),
                'profiles':
                hsrlnarsplitter.narrateSubstruct('profiles', sparse=True),
                'rs_Cxx':
                hsrlnarsplitter.narrateSubstruct('rs_Cxx', sparse=True)
            }, hau.Time_Z_Group)
    #merge=picnicsession.PicnicProgressNarrator(dplc,getLastOf('start'), searchparms['start_time_datetime'],searchparms['end_time_datetime'],session)
    #hasProgress=True

    if not os.access(
            picnicsession.sessionfile(session, 'process_parameters.json'),
            os.R_OK):
        picnicsession.storejson(
            session,
            hsrlnar.get_process_control().json_representation(),
            'process_parameters.json')
    picnicsession.updateSessionComment(session, 'processing with DPL')
    if hasProgress:
        return dplc
    return picnicsession.PicnicProgressNarrator(
        dplc, getLastOf('times', ['rs_inv', 'rs_mean', 'rs_raw']),
        searchparms['start_time_datetime'], searchparms['end_time_datetime'],
        session)
예제 #11
0
def parseImageParametersBackground(request, session):
    picnicsession.updateSessionComment(session, 'setup')
    if 'custom_display' in request.params and request.params.getone(
            'custom_display') != 'default':
        import hsrl.utils.json_config as jc
        disp = jc.json_config(
            picnicsession.loadjson(
                session,
                'display_parameters.json'))  #session['display_defaults'])
    else:
        import hsrl.data_stream.display_utilities as du
        (disp,
         conf) = du.get_display_defaults(session['display_defaults_file'])

    #else:#fixme should this be enable_all()?
    #    (disp,conf)=du.get_display_defaults('web_plots.json')
    allfigs = session['figstocapture'] == None
    if not allfigs:
        for k, v in session['figstocapture'].items():
            if None in v:
                allfigs = True
                break
    if not allfigs:  # None indicates all should be captured, so if its not, scan structure
        data_req = 'images'
        lib_filetype = 'data'
        for fi in disp.get_attrs():  # for each figure
            if 'enable' in disp.get_labels(
                    fi):  # if it can be enabled/disabled
                disp.set_value(fi, 'enable', 0)
        for inst, figset in session['figstocapture'].items():
            for fi in disp.get_attrs():  # for each figure
                if 'enable' in disp.get_labels(
                        fi):  # if it can be enabled/disabled
                    if fi in figset or (
                            '#' + fi) in figset:  #if requested, enable it
                        disp.set_value(fi, 'enable', 1)
                        if not fi.endswith('_image') and inst == 'hsrl':
                            data_req = 'images housekeeping'
                            lib_filetype = None
                    elif ('-' + fi) in figset or (
                            '-#' +
                            fi) in figset:  #if explicitly disabled, disable it
                        disp.set_value(fi, 'enable', 0)
    else:
        if session['figstocapture'] != None:
            for inst, figset in session['figstocapture'].items():
                for fi in disp.get_attrs():  # for each figure
                    if 'enable' in disp.get_labels(
                            fi):  # if it can be enabled/disabled
                        if ('-' + fi) in figset or (
                                '-#' + fi
                        ) in figset:  #if explicitly disabled, disable it
                            disp.set_value(fi, 'enable', 0)

        data_req = 'images housekeeping'
        lib_filetype = None

    picnicsession.storejson(session, disp.json_representation(),
                            'display_parameters.json')
    if 'data_request' not in session:
        session['data_request'] = data_req
    if 'lib_filetype' not in session:
        session['lib_filetype'] = lib_filetype
    picnicsession.storesession(session)
예제 #12
0
파일: dispatch.py 프로젝트: CodaP/DplTools
def makeImagesFromDPL(session,DPLgen):
    picnicsession.updateSessionComment(session,'loading graphics artist')
    #import hsrl.data_stream.open_config as oc
    import hsrl.dpl_experimental.dpl_artists as artists
    import hsrl.utils.json_config as jc
    #import hsrl.calibration.cal_read_utilities as cru
    #import hsrl.graphics.graphics_toolkit as gt
    instrument=session['dataset']
    #sessionid=session['sessionid']
    disp=jc.json_config(picnicsession.loadjson(session,'display_parameters.json'))#session['display_defaults'])
    params=jc.json_config(picnicsession.loadjson(session,'process_parameters.json'),'process_defaults')
    #print session

    #folder=picnicsession.sessionfolder(sessionid)#safejoin('.','sessions',sessionid);
    artistlist={}
    if True:
        picnicsession.updateSessionComment(session,'creating artist')    
        artist=artists.dpl_images_artist(framestream=DPLgen,instrument=session['dataset'],
            max_alt=session['altmax'],
            processing_defaults=params,
            display_defaults=disp)
        artistlist['hsrl']=artist
        if 'merge' in session['datastreams']:
            artist=artists.dpl_radar_images_artist(framestream=artist,display_defaults=disp)
            artistlist['merge']=artist
        picnicsession.updateSessionComment(session,'processing')
        artist()
        picnicsession.updateSessionComment(session,'rendering figures')
        fignum=0

        capturingfigsgroups=session['figstocapture'].copy()
        if capturingfigsgroups==None:
            capturingfigsgroups={}
            for k in artistlist.keys():
                capturingfigsgroups[k]=[None]
        #print capturingfigs
        for inst,capturingfigs in capturingfigsgroups.items():
          if not inst in artistlist:
            continue
          alreadycaptured=[]
          figs=artistlist[inst].figs
          for x in capturingfigs:#plt._pylab_helpers.Gcf.get_all_fig_managers():
            if x in alreadycaptured or (x!=None and (x.startswith('#') or x.startswith('-'))):
                continue
            alreadycaptured.append(x)
            if x == None:
                tmp=[ f for f in figs ];
                tmp.sort()
                capturingfigs.extend(tmp)
                continue
            figname=picnicsession.sessionfile(session,'figure%04i_%s_%s.png' % (fignum,inst,x))
            fignum = fignum + 1
        #      print 'updating  %d' % x.num
            picnicsession.updateSessionComment(session,'capturing '+inst+' figure ' + x)
            if x not in figs:
                f=file(figname,'w')
                f.close()
                continue
        
            fig = figs.figure(x)#plt.figure(x.num)
        
      # QApplication.processEvents()
            
            fig.canvas.draw()
            #fig.canvas.
            fig.savefig(figname,format='png',bbox_inches='tight')
    picnicsession.updateSessionComment(session,'done')
예제 #13
0
파일: dispatch.py 프로젝트: CodaP/DplTools
def createmultinetcdf(request,session,isBackground):
    if isBackground in [True, None]:
        dpl,dplp=makeDPLFromSession(session,doSearch=False)
        makeMultiNetCDFFromDPL(session,dpl,dplp,session['template'])
        picnicsession.updateSessionComment(session,'done.')
예제 #14
0
파일: dispatch.py 프로젝트: CodaP/DplTools
def makeDPLFromSession(session,doSearch=True):
    copyToInit={
        'dataset':'instrument',
        'maxtimeslice':'maxtimeslice_timedelta',
        'data_request':'data_request',
        'lib_filetype':'filetype',
    }
    copyToSearch={
        'starttime':'start_time_datetime',
        'endtime':'end_time_datetime',
        'altmin':'min_alt_m',
        'altmax':'max_alt_m',
        'timeres':'timeres_timedelta',
        'altres':'altres_m',
    }
    hasProgress=False
    from hsrl.dpl_experimental.dpl_hsrl import dpl_hsrl
    process_control=None
    if os.access(picnicsession.sessionfile(session,'process_parameters.json'),os.R_OK):
        process_control=picnicsession.loadjson(session,'process_parameters.json')
        import hsrl.utils.json_config as jc
        process_control=jc.json_config(process_control,default_key='process_defaults')
    dplobj=dpl_hsrl(process_control=process_control,**fromSession(session,copyToInit))
    if not doSearch:
        return dplobj,fromSession(session,copyToSearch)
    searchparms=fromSession(session,copyToSearch)
    #try:
    #    import hsrl.utils.threaded_generator
    #    dplc=hsrl.utils.threaded_generator.threaded_generator(dplobj,**searchparms)
    #except:
    hsrlnar=dplobj(**searchparms)
    dplc=hsrlnar
    if 'merge' in session['datastreams']:#add merge to rs_mmcr, refit
        import hsrl.dpl_tools.time_frame as time_slicing
        import hsrl.dpl_tools.resample_altitude as altitude_resampling
        import hsrl.dpl_tools.substruct as frame_substruct
        from hsrl.dpl_netcdf.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper 
        import hsrl.dpl_netcdf.MMCRMergeLibrarian as mmcr
        import hsrl.utils.hsrl_array_utils as hau

        if session['dataset']=='ahsrl':
            mmcrzoo=GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge')
            mmcrlib=mmcr.MMCRMergeLibrarian(session['dataset'],['eurmmcrmerge.C1.c1.','nsaarscl1clothC1.c1.'],zoo=mmcrzoo)
        elif session['dataset']=='mf2hsrl':
            pass #set up zoo and lib for mf2
        mmcrnar=mmcr.MMCRMergeCorrector(mmcrlib(start=searchparms['start_time_datetime'],end=searchparms['start_time_datetime']))
        mmcrnar=mmcr.MMCRMergeBackscatterToReflectivity(altitude_resampling.ResampleXd(time_slicing.TimeGinsu(mmcrnar,'times'),'heights',dplc.altitudeAxis))

        hsrlnarsplitter=frame_substruct.SubstructBrancher(hsrlnar)
        hsrlinvnar=time_slicing.TimeGinsu(hsrlnarsplitter.narrateSubstruct('rs_inv'),'times')#,isEnd=True)

        from dplkit.simple.blender import TimeInterpolatedMerge

        merge=TimeInterpolatedMerge(hsrlinvnar,[mmcrnar],allow_nans=True,channels=['heights','Reflectivity','MeanDopplerVelocity','Backscatter','SpectralWidth'])
        merge=frame_substruct.Retyper(merge,hau.Time_Z_Group,{'timevarname':'times','altname':'heights'})
 
        dplc=frame_substruct.SubstructMerger('rs_inv',{
            'rs_mean':hsrlnarsplitter.narrateSubstruct('rs_mean'),
            'rs_raw':hsrlnarsplitter.narrateSubstruct('rs_raw'),
            'rs_inv':hsrlnarsplitter.narrateSubstruct('rs_inv'),
            'rs_mmcr':merge,
            'rs_init':hsrlnarsplitter.narrateSubstruct('rs_init'),
            'rs_static':hsrlnarsplitter.narrateSubstruct('rs_static'),
            'profiles':hsrlnarsplitter.narrateSubstruct('profiles',sparse=True),
            'rs_Cxx':hsrlnarsplitter.narrateSubstruct('rs_Cxx',sparse=True)
            }
        ,hau.Time_Z_Group)
       #merge=picnicsession.PicnicProgressNarrator(dplc,getLastOf('start'), searchparms['start_time_datetime'],searchparms['end_time_datetime'],session)
        #hasProgress=True

    if not os.access(picnicsession.sessionfile(session,'process_parameters.json'),os.R_OK):
        picnicsession.storejson(session,hsrlnar.get_process_control().json_representation(),'process_parameters.json')
    picnicsession.updateSessionComment(session,'processing with DPL')
    if hasProgress:
        return dplc
    return picnicsession.PicnicProgressNarrator(dplc,getLastOf('times',['rs_inv','rs_mean','rs_raw']),
        searchparms['start_time_datetime'],searchparms['end_time_datetime'],session)