def __init__(self, instrument, *args, **kwargs): super(self.__class__, self).__init__(None) self.instrument = instrument import lg_base.core.open_config as oc self.oc = oc import lg_base.core.json_config as jc self.jc = jc from lg_base.core.locate_file import locate_file self.locate_file = locate_file import lg_base.core.array_utils as hau #import T_Array,Z_Array,TZ_Array,Time_Z_Group self.hau = hau from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper import MarinemetLibrarian as mm if instrument in ('marinemet', 'mf2hsrl', 'mf2marinemet', 'magmarinemet'): self.instrument = 'marinemet' self.zoo = GenericTemplateRemapNetCDFZookeeper('marinemet') self.lib = mm.MarinemetLibrarian('mf2hsrl', ['magmarinemetM1'], zoo=self.zoo) self.instrumentbase = 'mf2hsrl' elif instrument in ('met', 'mf2met', 'tmpmet'): self.instrument = 'met' self.zoo = GenericTemplateRemapNetCDFZookeeper('met') self.lib = mm.MarinemetLibrarian('mf2hsrl', ['tmpmetM1'], zoo=self.zoo) self.instrumentbase = 'mf2hsrl' else: raise RuntimeError('Unknown met source ' + instrument)
def __init__(self, instrument,process_control=None):#,*args, **kwargs): super(self.__class__,self).__init__(None) self.instrument=instrument from lg_base.core.locate_file import locate_file self.process_control_file=locate_file(process_control or 'radar_processing_defaults.json',systemOnly=process_control==None) import lg_base.core.open_config as oc self.oc=oc import lg_base.core.json_config as jc self.jc=jc #import hsrl.utils.hsrl_array_utils as hau #import T_Array,Z_Array,TZ_Array,Time_Z_Group #self.hau=hau from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper import RadarFilters as rf self.rf=rf #self.callableargs=kwargs if instrument in ('mmcr','ahsrl','ammcr'): import MMCRMergeLibrarian as mmcr self.instrument='mmcr' self.zoo=GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge') self.lib=mmcr.MMCRMergeLibrarian('ahsrl',['eurmmcrmerge.C1.c1.','nsaarscl1clothC1.c1.'],zoo=self.zoo) self.instrumentbase='ahsrl' elif instrument.endswith(('kazr','kazrge','kazrmd','mwacr','nshsrl','mf2hsrl')): allinsts=None patterns=None if instrument=='kazr':#TOO GENERIC print 'WARNING Specifying "kazr" is too generic. use tmpkazr, magkazr or nsakazr' instrument='mf2hsrl'#assume this is default if instrument=='mf2hsrl': instrument='mf2kazr' elif instrument=='nshsrl': instrument='nskazr' if instrument.endswith('kazr'): instrument+='ge'#if unspecified, use ge self.instrument=instrument if instrument.startswith(('mag','tmp','mf2')): self.instrumentbase='mf2hsrl' suffix='M1.a1.' elif instrument.startswith(('nsa','ns')): self.instrumentbase='nshsrl' suffix='C1.a1.' else: raise RuntimeError('Unknown instrument base for '+instrument) if allinsts!=None: patterns=[(p+suffix) for p in allinsts] else: patterns=[self.instrument+suffix] if 'kazr' in instrument: import KAZRLibrarian as kazr self.zoo=GenericTemplateRemapNetCDFZookeeper('kazr') self.lib=kazr.KAZRLibrarian(self.instrumentbase,self.instrument,patterns,zoo=self.zoo) elif 'mwacr' in instrument: import MWACRLibrarian as mwacr self.zoo=GenericTemplateRemapNetCDFZookeeper('mwacr') self.lib=mwacr.MWACRLibrarian(self.instrumentbase,self.instrument,patterns,zoo=self.zoo) else: raise RuntimeError('Unknown Librarian for source '+instrument) else: raise RuntimeError('Unknown radar source '+instrument)
def search(self, start, end, *args, **kwargs): """ Librarian Generator function extra parameters given here will be passed to the returned narrator's init """ ret = ARMSondeNarrator(self, self.basedir, start, end) zoo = self.zoo if zoo is None and not kwargs.pop('filenames', False): from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper zoo = GenericTemplateRemapNetCDFZookeeper('arm_profiles') if zoo is not None: ret = ARMNarrator.ARMFileNarrator(ret, zoo, preYield=ret.preYield, *args, **kwargs) if self.requested_altitudes is not None: import atmospheric_profiles.dpl.dpl_temperature_profiles as dtp ret = dtp.dpl_radiosonderesample( ret, 'altitudes', self.requested_altitudes, { 'temps': [50, 500], 'pressures': [1, 1000], 'dew_points': [150, 500], 'frost_points': [50, 500], 'altitudes': None }) return ret
def __init__(self, instrument,*args, **kwargs): super(self.__class__,self).__init__(None) self.instrument=instrument import lg_base.core.open_config as oc self.oc=oc import lg_base.core.json_config as jc self.jc=jc from lg_base.core.locate_file import locate_file self.locate_file=locate_file #import hsrl.utils.hsrl_array_utils as hau #import T_Array,Z_Array,TZ_Array,Time_Z_Group #self.hau=hau from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper if instrument in ('mf2hsrl','magpars2','magpars2S1','magpars2S2'): if instrument=='mf2hsrl': instrument='magpars2' if instrument.endswith('pars2'): instrument+='S1'#if unspecified, use ge self.instrument=instrument suffix='.b1.' if instrument.startswith('mag'): self.instrumentbase='mf2hsrl' else: raise RuntimeError('Unknown instrument base for '+instrument) import PARSLibrarian as pars self.zoo=GenericTemplateRemapNetCDFZookeeper('pars') self.lib=pars.PARSLibrarian(self.instrumentbase,self.instrument,[self.instrument+suffix],zoo=self.zoo) else: raise RuntimeError('Unknown pars source '+instrument)
def main(): from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper zoo = None #GenericTemplateRemapNetCDFZookeeper('mwacr') lib = MWACRLibrarian('/data/mf2hsrldata', 'magmwacr', 'magmwacrM1.a1.', zoo=zoo) zoo = GenericTemplateRemapNetCDFZookeeper('mwacr') m = lib(start=datetime(2013, 6, 21, 20, 0, 0), end=datetime(2013, 6, 25, 0, 0, 0)) for f in m: #print 'from librarian:',f if zoo: res = zoo.open(zoo(uri=f)) print 'uri from zoo:', res else: print f
def search(self,start,end,*args,**kwargs): """ Librarian Generator function extra parameters given here will be passed to the returned narrator's init """ ret=RamanNarrator(self,self.basedir,self.datatype,start,end) zoo=self.zoo if zoo is None and not kwargs.pop('filenames',False): from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper zoo=GenericTemplateRemapNetCDFZookeeper('raman_'+self.datatype) if zoo is not None: ret=ARMNarrator.ARMFileNarrator(ret,zoo,preYield=ret.preYield,*args,**kwargs) return ret
def main(): from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper zoo = GenericTemplateRemapNetCDFZookeeper('vdis') lib = VDisLibrarian('/data/mf2hsrldata', 'mf2vdis', 'mf2vdisM1', zoo=zoo) zoo = None m = lib(start=datetime(2014, 6, 24, 20, 0, 0), end=datetime(2014, 7, 24, 22, 0, 0)) for f in m: #print 'from librarian:',f if zoo: res = zoo(uri=f) print 'uri from zoo:', res else: print f print vars(f)
def main(): from dpl_temperature_profiles import dpl_virtualradiosonde import sys, os from datetime import datetime, timedelta import numpy from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper import hsrl.dpl.HSRLLibrarian as hsrllib from lg_dpl_toolbox.filters.time_frame import TimeGinsu import lg_dpl_toolbox.dpl.dpl_artists as dpl_artists datatype = sys.argv[1] filename = sys.argv[2] deltat = float(sys.argv[3]) st = datetime.strptime(sys.argv[4], '%Y%m%dT%H%M%S') if len(sys.argv) < 6: import calendar monthrange = calendar.monthrange(st.year, st.month) monthdur = timedelta(days=monthrange[1]) tmp = st + monthdur et = datetime(tmp.year, tmp.month, 1, 0, 0, 0) if et > datetime.utcnow(): et = datetime.utcnow() else: et = datetime.strptime(sys.argv[5], '%Y%m%dT%H%M%S') #fields=['times','telescope_position','telescope_rotation','telescope_rotation_measured','telescope_elevation','telescope_accelerometer_raw']#,'superseedlasercontrollog','laserpowervalues'] zoo = GenericTemplateRemapNetCDFZookeeper( datatype, user_read_mode='position', forModule=hsrllib) #,keepfields=fields) lib = hsrllib.HSRLLibrarian(instrument=datatype, zoo=zoo) #site=16)#None,datatype) m = lib(start=st, end=et) #,filetype='data') m = TimeGinsu(m, 'times', None) vr_lib = dpl_virtualradiosonde('name', os.getenv('GRIB_CACHE', '/arcueid/data/grib_cache'), timedelta(minutes=deltat), numpy.arange(0, 30000 + .1, 15), do_interpolate=False) m = vr_lib(m, expire_duration=timedelta(minutes=deltat)) art = dpl_artists.dpl_netcdf_artist(m, 'NWS_Profile_Archive.cdl', filename, format='NETCDF4', usecfradial=False) #art() for f in art: print vars(f) print f.temps.shape, f.temps
def main(): import sys from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper from hsrl.dpl.HSRLLibrarian import HSRLLibrarian from lg_dpl_toolbox.filters.time_frame import TimeGinsu datatype = 'ahsrl' if len(sys.argv) < 2 else sys.argv[1] et = datetime.utcnow() if len(sys.argv) < 4 else datetime.strptime( sys.argv[3], '%Y%m%dT%H%M%S') st = (et - timedelta(days=.5)) if len(sys.argv) < 3 else datetime.strptime( sys.argv[2], '%Y%m%dT%H%M%S') #fields=['times','telescope_position','telescope_rotation','telescope_rotation_measured','telescope_elevation','telescope_accelerometer_raw']#,'superseedlasercontrollog','laserpowervalues'] zoo = GenericTemplateRemapNetCDFZookeeper( datatype, user_read_mode='position') #,keepfields=fields) lib = HSRLLibrarian(instrument=datatype, zoo=zoo) #site=16)#None,datatype) m = lib(start=st, end=et) #,filetype='data') m = TimeGinsu(m, 'times') vr_lib = dpl_virtualradiosonde( '/home/jpgarcia/sonde/VirtualRadiosondeFromNWP/tmp_cache', timedelta(minutes=5)) m = vr_lib(m, numpy.arange(0, 30000 + .1, 15), timedelta(minutes=5)) for f in m: print vars(f) print f.temps.shape, f.temps
def read(self): """ main read generator """ import hsrl.data_stream.hsrl_read_utilities as hru import hsrl.data_stream.input_translators as it import hsrl.data_stream.preprocess_raw as ppr import hsrl.data_stream.preprocess_level2 as ppl2 params=self.params intervalTime=None intervalEnd=None zoo=self.zoo #if params['timeres']!=None and params['timeres']<datetime.timedelta(seconds=float(self.cal_narr.hsrl_constants['integration_time'])): # params['timeres']=None #pure native end_time_datetime=params['finalTime'] #timemodoffset=time_mod(params['realStartTime'],params['timeres']) #noframe='noframe' cdf_to_hsrl = None preprocess_ave = None instrument=self.hsrl_instrument ntime_ave=1 streamratemult=int(os.getenv('DEBUG_RAW_FRAME_WIDTH','50')) for calv in self.const_narr:#self.timegen(): #self.cal_narr: if intervalTime is None: intervalTime=calv['chunk_start_time'] if self.inclusive: intervalTime-=datetime.timedelta(seconds=5) intervalEnd=intervalTime chunk_end_to_use=calv['chunk_end_time']#-time_mod(calv['chunk_end_time'],params['timeres'],timemodoffset) rs_constants=calv['rs_constants'] #print 'old end',calv['chunk_end_time'],'vs new end',chunk_end_to_use,'mod base',params['timeres'],'offset',timemodoffset if calv['chunk_end_time']==calv['chunk_start_time'] and end_time_datetime is None: if params['block_when_out_of_data']: if 'timeres' not in params or params['timeres'] is None: sleep(rs_constants['integration_time']) else: sleep(params['timeres'].total_seconds()) else: yield None #this is done to get out of here, and not get stuck in a tight loop continue if cdf_to_hsrl is None: cdf_to_hsrl = it.raw_translator(instrument,rs_constants,self.hsrl_corr_adjusts) else: cdf_to_hsrl.update_constants(rs_constants) while intervalTime<chunk_end_to_use: # BEGIN 'init' section that couldn't start without a constants set from calibration #initialize the preprocess and average class that operates on raw data #after it is read from the netcdf and before the main processing if zoo is None: if preprocess_ave is None: #listed in reverse order ntime_ave=1 if 'quarter_wave_plate_rotation' in rs_constants and rs_constants['quarter_wave_plate_rotation'] == 'rotating': ntime_ave = 1 elif 'timeres' in params and not params['timeres'] is None: integration_time = rs_constants['integration_time'] ntime_ave = max(int(0.5*params['timeres'].total_seconds()/integration_time),1) if ntime_ave!=1: preprocess_ave=ppl2.preprocess_level2(instrument,preprocess_ave) preprocess_ave=ppr.time_ave(ntime_ave,preprocess_ave) preprocess_ave=ppr.time_frame(preprocess_ave) else: preprocess_ave=ppr.time_frame(preprocess_ave) from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper zoo=GenericTemplateRemapNetCDFZookeeper(instrument,self.netcdf_defaults, self.max_range_bin,preprocess_ave) #END init section if intervalEnd>=chunk_end_to_use: print 'Breaking calibration on endtime. raw ',intervalEnd,chunk_end_to_use,end_time_datetime break else: intervalEnd=chunk_end_to_use print ' new raw hsrl window is ', intervalTime, ' to ' , intervalEnd if True:# requested_times==None or requested_times.shape[0]>0: rs=None try: for rs_raw in hru.fetch_data(#FIXME this should use the dpl objects instrument, intervalTime, intervalEnd, self.max_range_bin, self.netcdf_defaults, cdf_to_hsrl, dpl_librarian=self.lib,dpl_zookeeper=zoo): print 'read in raw frame ',rs_raw if rs_raw is not None and rs_raw.times.size>0: assert(rs_raw.times[-1] is not None) if True: for rs_raw1 in rs_raw.iterateAllTimes(ntime_ave*streamratemult): rs=hau.Time_Z_Group()#can_append=False) setattr(rs,'rs_raw',rs_raw1) yield rs else: rs=hau.Time_Z_Group()#can_append=False) setattr(rs,'rs_raw',rs_raw) yield rs if rs_raw.times[-1]>intervalTime and rs_raw.times[-1]<intervalEnd: intervalTime=rs_raw.times[-1] if hasattr(rs_raw,'delta_t') and rs_raw.delta_t.size>0: intervalTime=intervalTime+datetime.timedelta(seconds=rs_raw.delta_t[-1]) else: print 'WARNING HSRL HAS NO DELTA_T' intervalTime=intervalTime+datetime.timedelta(seconds=.01) intervalTime=intervalEnd #if rs!=None and hasattr(rs,'profiles'): # delattr(rs,'profiles') except Exception, e: print 'Exception occured in raw reading' print 'Exception = ',e print traceback.format_exc() if isinstance(e,(MemoryError,)): print 'Please Adjust Your Parameters to be more Server-friendly and try again' raise if not isinstance(e,(AttributeError,)): raise
def make_default_sounding(self): import hsrl.dpl.calibration.dpl_dynamic_atmospheric_profile_narrator as ddapn return ddapn.dpl_dynamic_hsrl_atmospheric_profile_narrator( self.raman_platform, self.tables.timeinfo, self.requested_altitudes, edgepadding=timedelta(days=5), calvals=cru.cal_file_reader(self.raman_platform), soundingdatapath=self.raman_sourcepath) import atmospheric_profiles.dpl.dpl_temperature_profiles as dtp #import dpl_soundingarchive,dpl_virtualradiosonde interval_start_time = self.tables.timeinfo['starttime'] interval_end_time = self.tables.timeinfo['endtime'] rs_constants = self.raman_constants_first requested_altitudes = None if not hasattr( self, 'requested_altitudes') else self.requested_altitudes if rs_constants['sounding_type'] in ('virtual', 'remote_virtual', 'model', 'virtual_remote', 'virtual_cache'): raise NotImplementedError(rs_constants['sounding_type']) from hsrl.dpl.HSRLLibrarian import HSRLLibrarian from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper #import lg_dpl_toolbox.filters.time_frame as time_frame# TimeGinsu tempzoo = GenericTemplateRemapNetCDFZookeeper( self.instrument, user_read_mode='position') #,keepfields=fields) templib = HSRLLibrarian(instrument=self.instrument, zoo=tempzoo) #site=16)#None,datatype) m = templib(start=interval_start_time, end=interval_end_time) #,filetype='data') delt = 15 #90 if 'virtual_sounding_update_interval' in rs_constants: delt = rs_constants['virtual_sounding_update_interval'] elif 'sounding_update_interval' in rs_constants: delt = rs_constants['sounding_update_interval'] delt = timedelta(minutes=delt) gribcache = os.getenv('GRIB_CACHE', '/arcueid/data/grib_cache') try: os.makedirs(gribcache) except: pass if not os.path.exists(gribcache): raise RuntimeError( "Can't Access GRIB Cache directory %s. If this isn't an expected error, set environment variable \"GRIB_CACHE\" to something better and try again." % gribcache) parameters=dict(requested_altitudes=requested_altitudes \ ,remote=False ,download=True) extrasearchparams = dict() if 'remote' in rs_constants['sounding_type']: parameters['remote'] = True if rs_constants['sounding_type'] in ('model', 'virtual_cache'): parameters['download'] = False if rs_constants['sounding_type'] == 'model': parameters['format'] = 'model.grib2' parameters['predict_horizon'] = 24 * 365 if 'model_filename' in rs_constants: parameters['format'] = rs_constants['model_filename'] print 'VRS Forecast model will use grib2 file at ' + os.path.join( gribcache, parameters['format']) if 'static_cal_time' in rs_constants and rs_constants[ 'static_cal_time'] is not None and len( rs_constants['static_cal_time']) > 2: extrasearchparams['fixed_time'] = hru.convert_date_str( rs_constants['static_cal_time'])['datetime'] if 'sounding_horizon' in rs_constants and rs_constants[ 'sounding_horizon'] is not None: if 'predict_horizon' not in parameters or rs_constants[ 'sounding_horizon'] > parameters['predict_horizon']: parameters['predict_horizon'] = rs_constants[ 'sounding_horizon'] if 'predict_horizon' in parameters: print "Predict horizon is ", parameters[ 'predict_horizon'], 'hours' interval_start_time = interval_start_time - timedelta( hours=parameters['predict_horizon']) soundinglib = dtp.dpl_virtualradiosonde( rs_constants['sounding_type'], gribcache, delt, **parameters) if m.provides != None and 'latitude' in m.provides: print 'Got latitude stream' m = time_frame.TimeGinsu(m, 'times', None) soundingnarr = soundinglib(m, starttime=interval_start_time, **extrasearchparams) else: print 'no position stream/ using config' soundingnarr = soundinglib( starttime=interval_start_time, endtime=interval_end_time, fixed_position=(rs_constants['latitude'], rs_constants['longitude']), **extrasearchparams) ret = soundingnarr elif rs_constants['sounding_type'] in ('sparc', 'ssec'): import atmospheric_profiles.dpl.sparc_profiles as sparc #import dpl_soundingarchive,dpl_virtualradiosonde soundinglib = sparc.SPARCSondeLibrarian(self.raman_sourcepath, requested_altitudes) ret = soundinglib(interval_start_time, interval_end_time) elif rs_constants['sounding_type'] in ('arm', ): import atmospheric_profiles.dpl.arm_profiles as armp #import dpl_soundingarchive,dpl_virtualradiosonde soundinglib = armp.ARMSondeLibrarian(self.raman_sourcepath, requested_altitudes) ret = soundinglib(interval_start_time, interval_end_time) else: expire_duration = None if 'sounding_update_interval' in rs_constants: expire_duration = timedelta( minutes=rs_constants['sounding_update_interval']) elif 'installation' not in rs_constants or rs_constants[ 'installation'] == 'ground': expire_duration = timedelta(hours=1) elif rs_constants[ 'installation'] == 'airborne': #these are the defaults based on platform expire_duration = timedelta(minutes=5) elif rs_constants['installation'] == 'shipborne': expire_duration = timedelta(minutes=60) else: raise RuntimeError('Installation of ' + rs_constants['installation'] + ' in calvals is unknown') soundinglib = dtp.dpl_soundingarchive( self.raman_sourcepath, rs_constants['sounding_type'], rs_constants['sounding_id'], requested_altitudes, expire_duration=expire_duration) ret = soundinglib(interval_start_time, interval_end_time) if hasattr(self, 'requested_altitudes'): import lg_dpl_toolbox.filters.resample_altitude as altitude_resampling ret = altitude_resampling.ResampleXd(ret, 'altitudes', self.requested_altitudes) return ret
if 'filetype' in kwargs: ft=kwargs['filetype'] del kwargs['filetype'] ret=HSRLFileNarrator(self,self.basedir,self.dataprefix,start,end,ft) if self.zoo is not None: ret= HSRLRawNarrator(ret,zoo=self.zoo,*args,**kwargs) return ret if __name__=='__main__': from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper datatype='ahsrl' if len(sys.argv)<2 else sys.argv[1] et=datetime.utcnow() if len(sys.argv)<4 else datetime.strptime(sys.argv[3],'%Y%m%dT%H%M%S') st=(et-timedelta(days=.5)) if len(sys.argv)<3 else datetime.strptime(sys.argv[2],'%Y%m%dT%H%M%S') fields=['times','telescope_position','telescope_rotation','telescope_rotation_measured','telescope_elevation','telescope_accelerometer_raw']#,'superseedlasercontrollog','laserpowervalues'] lib=HSRLLibrarian(instrument=datatype)#site=16)#None,datatype) zoo=GenericTemplateRemapNetCDFZookeeper(datatype,keepfields=fields) m=lib(start=st,end=et)#,filetype='data') outf=file('telescopeaccel_'+ st.strftime('%Y%m%d_%H%M%S') +'_'+ et.strftime('%Y%m%d_%H%M%S') +'.csv','w') outf.write('seconds,pos,rot,arot,aele,ax,ay,az\n')#current,voltage,LDD_Temp,2HG_Temp,LC_ksd,LD_ksd,Amb\n') for f in m: print 'from librarian:',f continue res=zoo(uri=f) print 'uri from zoo:',res r=zoo.open(res) print f print r.times print r for i in range(0,r.times.shape[0]): if r.times[i]<st or r.times[i]>et:
def main(inst, startdate, enddate, sig=None): history = dict() parts = (('thermal1', 'thermal2'), ('records', 'goodrecords', 'errorCount')) alltherms = list(makelist(parts).keys()) + ['times'] began = datetime.utcnow() import hsrl.dpl.dpl_hsrl as dpl_hsrl from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper zoo = GenericTemplateRemapNetCDFZookeeper(inst, forModule=dpl_hsrl, keepfields=alltherms) dpllib = dpl_hsrl.dpl_hsrl(inst, zoo=zoo) dpl = dpllib(start_time_datetime=startdate, end_time_datetime=enddate, min_alt_m=0, max_alt_m=500, forimage=False, raw_only=True, with_profiles=False) dpl = RawFieldSelection(dpl, alltherms, subscope='rs_raw') for fr in dpl: appendContent(history, fr) print('timed at ' + repr(datetime.utcnow() - began)) print(repr(history.keys())) #print(retlist) for x in alltherms: print(x) if 'errorCount' in x: history[x] = history[x].astype('float') history[x][history[x] < 0.0] = -1 history[x][history[x] > 1e5] = -1 print('%f %f' % (np.nanmin(history[x]), np.nanmax(history[x]))) elif '_records' in x: history[x] = history[x].astype('float') history[x][history[x] <= 0] = -1 history[x][history[x] > 1e7] = -1 elif '_goodrecords' in x: history[x] = history[x].astype('float') history[x][history[x] <= 0] = 0 history[x][history[x] > 1e7] = 0 plt.figure() plt.title(inst + " error") starts = history['times'] plt.plot(starts, history['thermal1_errorCount'], 'b', starts, history['thermal2_errorCount'], 'r') plt.legend(('thermal1', 'thermal2')) plt.grid() plt.figure() plt.title(inst + " badcount") plt.plot(starts, history['thermal1_records'] - history['thermal1_goodrecords'], 'b', starts, history['thermal2_records'] - history['thermal2_goodrecords'], 'r') plt.legend(('thermal1', 'thermal2')) plt.grid() print('with all graphs timed at ' + repr(datetime.utcnow() - began)) if sig is not None: with sig: sig.notifyAll() plt.show()
def make_sounding(self, rs_constants, interval_start_time, interval_end_time, isFirst=False, isLast=False): import atmospheric_profiles.dpl.dpl_temperature_profiles as dtp #import dpl_soundingarchive,dpl_virtualradiosonde if rs_constants['sounding_type'] in ('virtual', 'remote_virtual', 'model', 'virtual_remote', 'virtual_cache', 'virtual cache'): from hsrl.dpl.HSRLLibrarian import HSRLLibrarian from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper import lg_dpl_toolbox.filters.time_frame as time_frame # TimeGinsu m = None if 'gps_stream' not in rs_constants or rs_constants[ 'gps_stream'] == 'hsrl': tempzoo = GenericTemplateRemapNetCDFZookeeper( self.instrument, user_read_mode='position') #,keepfields=fields) templib = HSRLLibrarian(instrument=self.instrument, zoo=tempzoo) #site=16)#None,datatype) m = templib(start=interval_start_time, end=interval_end_time) #,filetype='data') m = time_frame.TimeGinsu(m, 'times', None) elif rs_constants['gps_stream'] == 'surfacemet': raise NotImplemented( 'GPS stream from automatic surfacemet CSV not implemented') else: raise RuntimeError('VRS needs a GPS stream. ' + rs_constants['gps_stream'] + ' is not a known type') delt = 5 #90 #print 'default' if 'installation' not in rs_constants or rs_constants[ 'installation'] == 'ground': #print 'ground default' delt = 60 elif rs_constants['installation'] == 'shipborne': #print 'shipdefault' delt = 15 if 'virtual_sounding_update_interval' in rs_constants: delt = rs_constants['virtual_sounding_update_interval'] elif 'sounding_update_interval' in rs_constants: delt = rs_constants['sounding_update_interval'] delt = timedelta(minutes=delt) if isFirst: interval_start_time = interval_start_time - delt * self.paddingends if isLast and interval_end_time is not None: interval_end_time = interval_end_time + delt * self.paddingends gribcache = os.getenv('GRIB_CACHE', '/arcueid/data/grib_cache') try: os.makedirs(gribcache) except: pass if not os.path.exists(gribcache): raise RuntimeError( "Can't Access GRIB Cache directory %s. If this isn't an expected error, set environment variable \"GRIB_CACHE\" to something better and try again." % gribcache) parameters=dict(requested_altitudes=self.requested_altitudes \ ,remote=False ,download=True) extrasearchparams = dict() if 'remote' in rs_constants['sounding_type']: parameters['remote'] = True if rs_constants['sounding_type'] in ('model', 'virtual_cache', 'virtual cache'): parameters['download'] = False if rs_constants['sounding_type'] == 'model': parameters['format'] = 'model.grib2' parameters['predict_horizon'] = 24 * 365 if 'model_filename' in rs_constants: parameters['format'] = rs_constants['model_filename'] print 'VRS Forecast model will use grib2 file at ' + os.path.join( gribcache, parameters['format']) if 'static_cal_time' in rs_constants and rs_constants[ 'static_cal_time'] is not None and len( rs_constants['static_cal_time']) > 2: import lg_base.core.read_utilities as hru extrasearchparams['fixed_time'] = hru.convert_date_str( rs_constants['static_cal_time'])['datetime'] if 'sounding_horizon' in rs_constants and rs_constants[ 'sounding_horizon'] is not None: if 'predict_horizon' not in parameters or rs_constants[ 'sounding_horizon'] > parameters['predict_horizon']: parameters['predict_horizon'] = rs_constants[ 'sounding_horizon'] if 'predict_horizon' in parameters: print "Predict horizon is ", parameters[ 'predict_horizon'], 'hours' interval_start_time = interval_start_time - timedelta( hours=parameters['predict_horizon']) print "VRS SAMPLING FREQUENCY: ", delt soundinglib = dtp.dpl_virtualradiosonde( rs_constants['sounding_type'], gribcache, delt, **parameters) if m.provides != None and 'latitude' in m.provides: print 'Got latitude stream' soundingnarr = soundinglib(m, starttime=interval_start_time, **extrasearchparams) else: fixed_position = (rs_constants['latitude'], rs_constants['longitude']) print 'no position stream/ using config', fixed_position soundingnarr = soundinglib(starttime=interval_start_time, endtime=interval_end_time, fixed_position=fixed_position, **extrasearchparams) ret = soundingnarr elif rs_constants['sounding_type'] in ('sparc', 'ssec'): import atmospheric_profiles.dpl.sparc_profiles as sparc #import dpl_soundingarchive,dpl_virtualradiosonde delt = timedelta(hours=3) if isFirst: interval_start_time = interval_start_time - delt * self.paddingends if isLast and interval_end_time is not None: interval_end_time = interval_end_time + delt * self.paddingends soundinglib = sparc.SPARCSondeLibrarian( self.soundingdatapath or self.instrument, self.requested_altitudes) ret = soundinglib(interval_start_time, interval_end_time) elif rs_constants['sounding_type'] in ('arm', ): import atmospheric_profiles.dpl.arm_profiles as armp #import dpl_soundingarchive,dpl_virtualradiosonde delt = timedelta(hours=3) if isFirst: interval_start_time = interval_start_time - delt * self.paddingends if isLast and interval_end_time is not None: interval_end_time = interval_end_time + delt * self.paddingends soundinglib = armp.ARMSondeLibrarian( self.soundingdatapath or self.instrument, self.requested_altitudes) ret = soundinglib(interval_start_time, interval_end_time) elif rs_constants['sounding_type'] in ('NOAA raob', ): import atmospheric_profiles.dpl.raob_profiles as raob #import dpl_soundingarchive,dpl_virtualradiosonde delt = timedelta(hours=12) if isFirst: interval_start_time = interval_start_time - delt * self.paddingends if isLast and interval_end_time is not None: interval_end_time = interval_end_time + delt * self.paddingends soundinglib = raob.dpl_raob( self.soundingdatapath or self.instrument, rs_constants['sounding_id'], self.requested_altitudes) ret = soundinglib(interval_start_time, interval_end_time) else: expire_duration = None if 'sounding_update_interval' in rs_constants: expire_duration = timedelta( minutes=rs_constants['sounding_update_interval']) elif 'installation' not in rs_constants or rs_constants[ 'installation'] == 'ground': expire_duration = timedelta(hours=1) elif rs_constants[ 'installation'] == 'airborne': #these are the defaults based on platform expire_duration = timedelta(minutes=5) elif rs_constants['installation'] == 'shipborne': expire_duration = timedelta(minutes=60) else: raise RuntimeError('Installation of ' + rs_constants['installation'] + ' in calvals is unknown') if expire_duration: if isFirst: interval_start_time = interval_start_time - expire_duration * self.paddingends if isLast and interval_end_time is not None: interval_end_time = interval_end_time + expire_duration * self.paddingends soundinglib = dtp.dpl_soundingarchive( self.soundingdatapath or self.instrument, rs_constants['sounding_type'], rs_constants['sounding_id'], self.requested_altitudes, expire_duration=expire_duration) ret = soundinglib(interval_start_time - timedelta(days=3), interval_end_time) return ret
super(self.__class__,self).__init__() self.basedir=ARMNarrator.getBasedir(siteid) self.dataprefix=dataprefix #self.instrumentname=instrumentname self.zoo=zoo def search(self,start,end,*args,**kwargs): """ Librarian Generator function extra parameters given here will be passed to the returned narrator's init """ ret=MMCRMergeNarrator(self,self.basedir,self.dataprefix,start,end) if self.zoo is not None: ret=ARMNarrator.ARMFileNarrator(ret,self.zoo,ret.preYield,*args,**kwargs) return ret if __name__=='__main__': from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper zoo=GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge') lib=MMCRMergeLibrarian('/data/ahsrldata','eurmmcrmerge.C1.c1.',zoo=zoo) zoo=None m=lib(start=datetime(2006,12,24,0,0,0),end=datetime(2006,12,25,0,0,0)) for f in m: #print 'from librarian:',f if zoo: res=zoo(uri=f) print 'uri from zoo:',res else: print f #print 'content=',zoo.open(res)
def fetch_data(instrument, interval_start_time, interval_end_time, max_range_bin, requested_vars, cdf_to_hsrl, pre_process=None, dpl_librarian=None, dpl_zookeeper=None): """appends data to rs_mem until it contains requested time interval or extends to current time. instrument = which data set----'ahsrl','gvhsrl','mf2hsrl','nshsrl' rs_mem = append new data to this structure interval_start_time = start_time of requested data as a python datetime interval_end_time = end_time of requested data as a python datetime data_end_time = time of last measurement in rs_mem as a python datetime max_range_bin = largest range bin to read data_request = read this subset of data from raw netcdf e.g. 'images','housekeeping' small_memory =[enable,number_of_shots_to_pre_average] """ from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper from hsrl.dpl.HSRLLibrarian import HSRLLibrarian # print num2date(c_time) if dpl_librarian: lib = dpl_librarian else: lib = HSRLLibrarian(instrument=instrument) if dpl_zookeeper: zoo = dpl_zookeeper else: zoo = GenericTemplateRemapNetCDFZookeeper(instrument, requested_vars, max_range_bin, pre_process) #rs_mem=None for uri in lib(start=interval_start_time, end=interval_end_time): # reading first record with no prior data in rs_mem? try: filename = zoo(uri) print 'datafile ', filename rs_tail = zoo.open(filename, firsttime=interval_start_time, lasttime=interval_end_time) found_vars = zoo.getFoundVars(filename) if rs_tail is None or rs_tail.times.size == 0: continue missing_data_check(rs_tail) cdf_to_hsrl(rs_tail, found_vars) if rs_tail.times.size == 0: continue yield rs_tail except MemoryError as e: import traceback traceback.print_exc() print e continue #assert(len(rs_mem.times.shape) == 1) return #rs_mem
def main(): from hsrl.dpl.dpl_hsrl import dpl_hsrl from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper import radar.dpl.MMCRMergeLibrarian as mmcr import radar.dpl.RadarFilters as rf import hsrl.dpl.dpl_artists as artists import functools import substruct import resample_altitude starttime=datetime(2006,12,24,22,0,0) endtime=datetime(2006,12,25,0,0,0) #stitcher=TimeStitch() #restr=SubstructRestractor('rs_mmcr') hsrllib=dpl_hsrl(instrument='ahsrl') hsrlnar=hsrllib(start_time_datetime=starttime,end_time_datetime=endtime,min_alt_m=0.0,max_alt_m=20000.0,timeres_timedelta=timedelta(seconds=30),altres_m=15) mmcrzoo=GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge') mmcrlib=mmcr.MMCRMergeLibrarian('ahsrl','eurmmcrmerge.C1.c1.',zoo=mmcrzoo) mmcrnar=rf.RadarPrefilter(mmcrlib(start=starttime,end=endtime)) #mmcrnar=mmcr.MMCRMergeBackscatterToReflectivity(resample_altitude.ResampleXd(TimeGinsu(substruct.SubstructExtractor(mmcrnar,None),'times',stitcherbase=stitcher),'heights',hsrlnar.getAltitudeAxis)) mmcrnar=rf.RadarBackscatterToReflectivity(resample_altitude.ResampleXd(TimeGinsu(mmcrnar,'times'),'heights',hsrlnar.altitudeAxis.copy())) hsrlnarsplitter=substruct.SubstructBrancher(hsrlnar) #hsrlnar=TimeGinsu(substruct.SubstructExtractor(hsrlnar,'rs_inv',restractor=restr),'times',isEnd=False,stitchersync=stitcher) hsrlnar=TimeGinsu(hsrlnarsplitter.narrateSubstruct('rs_inv'),'times',isEnd=False) from dplkit.simple.blender import TimeInterpolatedMerge merge=TimeInterpolatedMerge(hsrlnar,[mmcrnar],allow_nans=True,channels=['heights','Reflectivity','MeanDopplerVelocity','Backscatter','SpectralWidth']) merge=substruct.Retyper(merge,functools.partial(hau.Time_Z_Group,timevarname='times',altname='heights')) #stitcher.setFramestream(merge) #restr.setFramestream(stitcher) #curs=restr curs=substruct.SubstructMerger('rs_inv',{ 'rs_mean':hsrlnarsplitter.narrateSubstruct('rs_mean'), 'rs_raw':hsrlnarsplitter.narrateSubstruct('rs_raw'), 'rs_inv':hsrlnarsplitter.narrateSubstruct('rs_inv'), 'rs_mmcr':merge, 'rs_init':hsrlnarsplitter.narrateSubstruct('rs_init'), 'rs_static':hsrlnarsplitter.narrateSubstruct('rs_static'), 'rs_Cxx':hsrlnarsplitter.narrateSubstruct('rs_Cxx'), 'profiles':hsrlnarsplitter.narrateSubstruct('profiles',sparse=True), } ,hau.Time_Z_Group,{'timevarname':'times','altname':'heights'}) artist=artists.dpl_images_artist(framestream=curs, instrument='ahsrl',max_alt=30*1000.0,processing_defaults=curs.hsrl_process_control, display_defaults='all_plots.json') curs=artist for frame in curs: print 'frame',frame print 'frame keys',vars(frame).keys() print 'rs_inv',frame.rs_inv print 'rs_mmcr',frame.rs_mmcr print 'RefShape',frame.rs_mmcr.Reflectivity.shape print 'MMCRTimes',frame.rs_mmcr.times.shape print 'invTimes',frame.rs_inv.times.shape print 'heights',frame.rs_mmcr.heights.shape for x in range(frame.rs_mmcr.times.shape[0]-1): if frame.rs_mmcr.times[x]==frame.rs_mmcr.times[x+1]: print 'dupe at',x,frame.rs_mmcr.times[x] #print type(frame),type(frame['Reflectivity']) if 'Reflectivity' in frame else 'no ref',type(frame['beta_a_backscat_par']) if 'beta_a_backscat_par' in frame else 'no backscat' time.sleep(5)