Esempio n. 1
0
 def __init__(self,
              framestream,
              display_defaults,
              instrument=None,
              figurecontainer=None,
              subframe='rs_mmcr'):
     super(dpl_radar_images_artist, self).__init__(framestream)
     self.display_defaults = display_defaults
     if isinstance(display_defaults, basestring):
         self.display_defaults = jc.json_config(
             lf.locate_file(display_defaults),
             'display_defaults',
             allow_missing_values=True)
     self.figcontainer = figurecontainer
     if instrument is None:
         print 'WARNING: RADAR Type not specified at init! Might not be right if multiple radar streams given'
     self.instrument = instrument or (framestream.radarType if hasattr(
         framestream, 'radarType') else 'Radar')
     self.framestream = framestream
     self.subframe = subframe
     import radar.graphics.radar_display as rd
     self.rd = rd
     import lg_base.graphics.graphics_toolkit as gt
     self.gt = gt
     #if isinstance(self.display_defaults,basestring):
     #    [self.display_defaults, dummy]= du.get_display_defaults(self.display_defaults,"new")
     self.stepper = None
     if self.figcontainer == None:
         self.figcontainer = self.gt.figurelist()
 def __init__(self,
              framestream,
              display_defaults,
              subframe=None,
              figurecontainer=None,
              includenestedframes={}):
     super(dpl_allradar_hsrl_images_artist, self).__init__(framestream)
     self.display_defaults = display_defaults
     if isinstance(display_defaults, basestring):
         self.display_defaults = jc.json_config(
             lf.locate_file(display_defaults),
             'display_defaults',
             allow_missing_values=True)
     self.figcontainer = figurecontainer
     self.subframename = subframe
     self.framestream = framestream
     self.includenestedframes = includenestedframes
     import cooperative.hsrl_radar.radar_hsrl_display as rhd
     self.rhd = rhd
     import lg_base.graphics.graphics_toolkit as gt
     self.gt = gt
     #if isinstance(self.display_defaults,basestring):
     #    [self.display_defaults, dummy]= du.get_display_defaults(self.display_defaults,"new")
     self.stepper = None
     if self.figcontainer == None:
         self.figcontainer = self.gt.figurelist()
Esempio n. 3
0
 def __init__(self,
              framestream,
              display_defaults,
              figurecontainer=None,
              subframe=None,
              streamname=None):
     super(dpl_raman_inverted_profile_images_artist,
           self).__init__(framestream)
     self.display_defaults = display_defaults
     if isinstance(display_defaults, basestring):
         self.display_defaults = jc.json_config(
             lf.locate_file(display_defaults),
             'display_defaults',
             allow_missing_values=True)
     self.figcontainer = figurecontainer
     self.instrument = self.platform + '-' + (streamname or self.ramanType
                                              )  #self.ramanType
     self.framestream = framestream
     self.subframe = subframe
     import raman.graphics.raman_display as rd
     self.rd = rd
     import lg_base.graphics.graphics_toolkit as gt
     self.gt = gt
     #if isinstance(self.display_defaults,basestring):
     #    [self.display_defaults, dummy]= du.get_display_defaults(self.display_defaults,"new")
     self.stepper = None
     if self.figcontainer is None:
         self.figcontainer = self.gt.figurelist()
Esempio n. 4
0
 def __init__(self,
              framestream,
              display_defaults,
              framedomain=None,
              instrument=None,
              figurecontainer=None):
     super(dpl_met_images_artist, self).__init__(framestream)
     self.display_defaults = display_defaults
     if isinstance(display_defaults, basestring):
         self.display_defaults = jc.json_config(
             lf.locate_file(display_defaults),
             'display_defaults',
             allow_missing_values=True)
     self.figcontainer = figurecontainer
     self.instrument = instrument if instrument != None else 'Met'
     self.framestream = framestream
     self.framedomain = framedomain
     import met.graphics.met_display as rd
     self.rd = rd
     import lg_base.graphics.graphics_toolkit as gt
     self.gt = gt
     #if isinstance(self.display_defaults,basestring):
     #    [self.display_defaults, dummy]= du.get_display_defaults(self.display_defaults,"new")
     self.stepper = None
     if self.figcontainer == None:
         self.figcontainer = self.gt.figurelist()
Esempio n. 5
0
 def __init__(self,
              framestream,
              display_defaults,
              subframe='rs_multiple_scattering',
              figurecontainer=None,
              includenestedframes={}):
     super(dpl_multiple_scattering_artist, self).__init__(framestream)
     self.display_defaults = display_defaults
     if isinstance(display_defaults, basestring):
         self.display_defaults = jc.json_config(
             lf.locate_file(display_defaults),
             'display_defaults',
             allow_missing_values=True)
     self.figcontainer = figurecontainer
     try:
         self.instrument = getattr(self.framestream,
                                   'multiple_scattering_instrument')
     except:
         self.instrument = 'Multiple Scattering'
     self.subframename = subframe
     self.framestream = framestream
     self.includenestedframes = includenestedframes
     import cooperative.graphics.multiple_scattering_display as msd
     self.msd = msd
     import lg_base.graphics.graphics_toolkit as gt
     self.gt = gt
     #if isinstance(self.display_defaults,basestring):
     #    [self.display_defaults, dummy]= du.get_display_defaults(self.display_defaults,"new")
     self.stepper = None
     if self.figcontainer == None:
         self.figcontainer = self.gt.figurelist()
Esempio n. 6
0
 def reload_process_control(self):
     if self.process_control_file is not None:
         self.process_control = jc.json_config(
             locate_file(self.process_control_file,
                         systemOnly=self.system_config), 'process_defaults')
     if self.system_config and os.getenv('OVERRIDE_SYSTEM_DEFAULTS',
                                         None) is None:
         altpath = self.process_control.get_value('alternate_cal_dir',
                                                  'full_dir_path')
         if not (altpath is None or altpath == "None"):
             print "System Default process_defaults.json should not specify an alternative calibration path."
             hconfig = os.getenv(
                 "HSRL_CONFIG",
                 "a custom location, set HSRL_CONFIG to that path")
             print "Strongly recommended one of the two options:"
             print "- copy process_defaults.json to %s, customize the json there, and specify this non-default parameter at the commandline" % (
                 hconfig)
             print "- or set HSRL_ALTERNATE_CAL_DIR to %s" % (altpath)
             print "Or, alternatively, do all of the following:"
             print "set OVERRIDE_SYSTEM_DEFAULTS to yes"
             if "custom location" in hconfig:
                 hconfig = "~/hsrl_config"
                 print "make directory " + hconfig
                 print "set HSRL_CONFIG to " + hconfig
             print "copy process_defaults.json to " + hconfig
             print "modify it without renaming it, and it becomes the new system default for you"
Esempio n. 7
0
def maybeMakeJC(displays, majorkey, *args, **kwargs):
    """ Attempt to make a json_config object from a display and majorkey (required subsection used as default)
        if the first parameter is a string, an attempt to locate it will be made.
           if this fails, the string will be returned as-is
           if this succeeds, the resulting json_config is loaded
        if its not a string, it will be treated like a dictionary for a json_config initialization
        if the loading above fails because the major key is missing, the located string or dictionary is returned as is
        if it fails for another reason, the exception as to why is raised
    """
    if majorkey is not None and not isinstance(displays, jc.json_config):
        jsd = rip_json_config_parameters(kwargs)
        try:
            if isinstance(displays, basestring):
                try:
                    displays = lf.locate_file(
                        displays, **only_locate_file_parameters(kwargs))
                except IOError:
                    return displays
            #if its located or is not a string, try loading as a json_config
            return jc.json_config(displays, majorkey, **jsd)
        except KeyError:
            pass
        except ValueError:
            if displays.endswith('.json'):
                raise
    return displays
Esempio n. 8
0
    def load_parameters(self, parms):
        if parms == None:
            self._parameters = None
            return
        if parms == None:
            try:
                cv = self.framestream.hsrl_constants_first if self.framestream != None else self.hsrlscope.hsrl_constants_first
                if 'default_particle_parameters' in cv:
                    parms = cv['default_particle_parameters']
            except AttributeError:
                print 'WARNING: HSRL is missing. can\'t find hsrl_constants_first'
                #raise RuntimeError("Framestream doesn't have hsrl_constants. HSRL is missing?")
        systemOnly = (parms == None)
        if parms == None:
            parms = 'particle_parameters_default.json'
        if isinstance(parms, basestring):
            from lg_base.core.locate_file import locate_file
            parms = json.load(
                open(locate_file(parms, systemOnly=systemOnly), 'r'))
        if not isinstance(parms, dict):
            raise RuntimeError(
                'Particle Parameters need to be a json filename or a dictionary'
            )

        self._particle_parameters = parms
Esempio n. 9
0
def get_display_defaults(display_defaults_file):
    """get display defaults from display_default.json.
       image selections"""
    display_defaults = json_config(lf.locate_file(display_defaults_file),
                                   'display_defaults',
                                   allow_missing_values=True)
    config = display_defaults
    return (display_defaults, config)
Esempio n. 10
0
 def __init__(self, instrument,process_control=None):#,*args, **kwargs):
     super(self.__class__,self).__init__(None)
     self.instrument=instrument
     from lg_base.core.locate_file import locate_file
     self.process_control_file=locate_file(process_control or 'radar_processing_defaults.json',systemOnly=process_control==None)
     import lg_base.core.open_config as oc
     self.oc=oc
     import lg_base.core.json_config as jc
     self.jc=jc
     #import hsrl.utils.hsrl_array_utils as hau #import T_Array,Z_Array,TZ_Array,Time_Z_Group
     #self.hau=hau
     from lg_dpl_toolbox.dpl.NetCDFZookeeper import GenericTemplateRemapNetCDFZookeeper 
     import RadarFilters as rf
     self.rf=rf
     #self.callableargs=kwargs
     if instrument in ('mmcr','ahsrl','ammcr'):            
         import MMCRMergeLibrarian as mmcr
         self.instrument='mmcr'
         self.zoo=GenericTemplateRemapNetCDFZookeeper('eurmmcrmerge')
         self.lib=mmcr.MMCRMergeLibrarian('ahsrl',['eurmmcrmerge.C1.c1.','nsaarscl1clothC1.c1.'],zoo=self.zoo)
         self.instrumentbase='ahsrl'
     elif instrument.endswith(('kazr','kazrge','kazrmd','mwacr','nshsrl','mf2hsrl')):
         allinsts=None
         patterns=None
         if instrument=='kazr':#TOO GENERIC
             print 'WARNING Specifying "kazr" is too generic. use tmpkazr, magkazr or nsakazr'
             instrument='mf2hsrl'#assume this is default
         if instrument=='mf2hsrl':
             instrument='mf2kazr'
         elif instrument=='nshsrl':
             instrument='nskazr'
         if instrument.endswith('kazr'):
             instrument+='ge'#if unspecified, use ge
         self.instrument=instrument
         if instrument.startswith(('mag','tmp','mf2')):
             self.instrumentbase='mf2hsrl'
             suffix='M1.a1.'
         elif instrument.startswith(('nsa','ns')):
             self.instrumentbase='nshsrl'
             suffix='C1.a1.'
         else:
             raise RuntimeError('Unknown instrument base for '+instrument)
         if allinsts!=None:
             patterns=[(p+suffix) for p in allinsts]
         else:
             patterns=[self.instrument+suffix]
         if 'kazr' in instrument:
             import KAZRLibrarian as kazr
             self.zoo=GenericTemplateRemapNetCDFZookeeper('kazr')
             self.lib=kazr.KAZRLibrarian(self.instrumentbase,self.instrument,patterns,zoo=self.zoo)
         elif 'mwacr' in instrument:
             import MWACRLibrarian as mwacr
             self.zoo=GenericTemplateRemapNetCDFZookeeper('mwacr')
             self.lib=mwacr.MWACRLibrarian(self.instrumentbase,self.instrument,patterns,zoo=self.zoo)
         else:
             raise RuntimeError('Unknown Librarian for source '+instrument)
     else:
         raise RuntimeError('Unknown radar source '+instrument)
    def load_mscatter_parameters(self,parms,particle_parameters):
       
        systemOnly=(parms==None)
      
        if parms==None:
            parms='multiple_scatter_parameters_default.json'
        if isinstance(parms,basestring):
            from lg_base.core.locate_file import locate_file
            parms=json.load(open(locate_file(parms,systemOnly=systemOnly),'r'))
        if not isinstance(parms,dict):
            raise RuntimeError('multiple_scatter_parameters need to be a json filename or a dictionary')
        self.multiple_scatter_parameters=parms

        # if using lidar_radar derived particle info get particle size info from "particle_parameters.json"
        print
        if not particle_parameters == None \
                  and self.multiple_scatter_parameters['particle_info_source'] == 'measured' :
            self.multiple_scatter_parameters['p180_water'] \
                        = particle_parameters['p180_water']['value']
            self.multiple_scatter_parameters['p180_ice'] \
                        = particle_parameters['p180_ice'] 
            self.multiple_scatter_parameters['h2o_depol_threshold'] \
                        = particle_parameters['h2o_depol_threshold']
            self.multiple_scatter_parameters['alpha_water'] \
                        = particle_parameters['alpha_water']
            self.multiple_scatter_parameters['alpha_ice'] \
                        = particle_parameters['alpha_ice']
            self.multiple_scatter_parameters['g_water'] \
                        = particle_parameters['g_water']
            self.multiple_scatter_parameters['g_ice'] \
                        = particle_parameters['g_ice']
            print 'multiple scatttering particle mode diameter from lidar-radar measured profile'
            print 'particle parameters for multiple scattering taken from "particle_parameters.json"'
        elif particle_parameters == None \
               and not self.multiple_scatter_parameters['particle_info_source'] == 'constant' :
            raise RuntimeError, '"particle_parameters.json" not present and multiple_scatter["particle_info_source"] !="constant"'
            if self.multiple_scatter_parameters['processing_mode'] == '1d':
                print '    processing single 1-d profile as mean over time interval requested'
            else:
                print '    processing 2-d (time,altitude) multiple scatter correction'
        else:
            print 'multiple scattering parmeters taken from "multiple_scatter_parameters.json"'
            if self.multiple_scatter_parameters['processing_mode'] == '1d':
                print '    processing single 1-d mean profile over requested time interval'
            else:
                print '    processing 2-d (time,altitude) multiple scattering'
            print '    mode_diameter_water  = ',self.multiple_scatter_parameters['mode_diameter_water'], '(m)'
            print '    mode_diameter_ice    = ',self.multiple_scatter_parameters['mode_diameter_ice'], '(m)' 
        print  '    starting altitude   = ', self.multiple_scatter_parameters['lowest_altitude'], '(m)'
        print  '    p180_water          = ', self.multiple_scatter_parameters['p180_water'] 
        print  '    p180_ice            = ', self.multiple_scatter_parameters['p180_ice']
        print  '    h2o_depol_threshold = ', self.multiple_scatter_parameters['h2o_depol_threshold']*100.0, '(%)'
        print  '    alpha_water         = ', self.multiple_scatter_parameters['alpha_water']
        print  '    alpha_ice           = ', self.multiple_scatter_parameters['alpha_ice']
        print  '    g_water             = ', self.multiple_scatter_parameters['g_water']
        print  '    g_ice               = ', self.multiple_scatter_parameters['g_ice']
        print
Esempio n. 12
0
 def load_parameters(self, parms):
     if parms == None:
         self._parameters = None
         return
     systemOnly = (parms == None)
     if parms == None:
         parms = 'spheroid_particle_parameters_default.json'
     if isinstance(parms, basestring):
         from lg_base.core.locate_file import locate_file
         parms = json.load(
             open(locate_file(parms, systemOnly=systemOnly), 'r'))
     if not isinstance(parms, dict):
         raise RuntimeError(
             'Particle Parameters need to be a json filename or a dictionary'
         )
     self._parameters = parms
Esempio n. 13
0
 def __init__(self, platform, timeinfo, process_control, corr_adjusts=None):
     super(dpl_constants_narr, self).__init__(timeinfo,
                                              cru.cal_file_reader(platform))
     self.platform = platform
     self.rprocess_control = process_control
     if self.rprocess_control is None:
         self.rprocess_control = 'raman_process_control.json'
     if isinstance(self.rprocess_control, basestring):
         import lg_base.core.locate_file as lf
         import lg_base.core.json_config as jc
         self.rprocess_control = jc.json_config(
             lf.locate_file(self.rprocess_control, systemOnly=False),
             'raman_process_defaults')
     self.rcorr_adjusts = self.rprocess_control.get_dict(
         'corr_adjusts').copy()
     if corr_adjusts != None:
         self.rcorr_adjusts.update(corr_adjusts)
Esempio n. 14
0
 def __init__(self,
              framestream,
              max_alt,
              display_defaults,
              instrument=None,
              processing_defaults=None,
              figurecontainer=None,
              limit_frame_to=None,
              breakup_nesting=False,
              flat_frame=False,
              enable_masking=None):
     super(dpl_images_artist, self).__init__(framestream)
     self.framestream = framestream
     #self.provides=framestream.provides
     self.instrument = instrument or framestream.hsrl_instrument
     self.max_alt = max_alt
     self.enable_masking = enable_masking
     try:
         self.processing_defaults = processing_defaults or framestream.hsrl_process_control
     except:
         self.processing_defaults = None
     self.display_defaults = display_defaults
     if isinstance(display_defaults, basestring):
         self.display_defaults = jc.json_config(
             lf.locate_file(display_defaults),
             'display_defaults',
             allow_missing_values=True)
     self.limit_frame_to = limit_frame_to
     self.breakup_nesting = breakup_nesting
     self.flat_frame = flat_frame
     import lg_base.graphics.graphics_toolkit as gt
     self.gt = gt
     #import hsrl.data_stream.display_utilities as du
     import hsrl.graphics.hsrl_display as du
     self.du = du
     self.stepper = None
     self.figcontainer = figurecontainer or self.gt.figurelist()
 def __init__(self,
              framestream,
              display_defaults,
              subframes,
              figurecontainer=None):
     super(dpl_profiles_images_artist, self).__init__(framestream)
     self.display_defaults = display_defaults
     if isinstance(display_defaults, basestring):
         self.display_defaults = jc.json_config(
             lf.locate_file(display_defaults),
             'display_defaults',
             allow_missing_values=True)
     self.figcontainer = figurecontainer
     self.subframenames = subframes
     self.framestream = framestream
     import cooperative.graphics.profiles_display as rd
     self.rd = rd
     import lg_base.graphics.graphics_toolkit as gt
     self.gt = gt
     #if isinstance(self.display_defaults,basestring):
     #    [self.display_defaults, dummy]= du.get_display_defaults(self.display_defaults,"new")
     self.stepper = None
     if self.figcontainer == None:
         self.figcontainer = self.gt.figurelist()
Esempio n. 16
0
 def __init__(self,
              framestream,
              display_defaults,
              subframe=None,
              mode='mass_dimension',
              figurecontainer=None,
              includenestedframes={}):
     super(dpl_particle_images_artist, self).__init__(framestream)
     self.display_defaults = display_defaults
     if isinstance(display_defaults, basestring):
         self.display_defaults = jc.json_config(
             lf.locate_file(display_defaults),
             'display_defaults',
             allow_missing_values=True)
     self.figcontainer = figurecontainer
     self.subframename = subframe
     self.framestream = framestream
     self.includenestedframes = includenestedframes
     self.mode = mode
     if mode not in ('spheroid', 'mass_dimension'):
         raise NotImplementedError('Particle image generation mode for ' +
                                   mode)
     instname = mode + '_instrument'
     if hasattr(self, instname):
         self.instrument = getattr(self, instname)
     else:
         self.instrument = mode
     import cooperative.graphics.coop_display as rd
     self.rd = rd
     import lg_base.graphics.graphics_toolkit as gt
     self.gt = gt
     #if isinstance(self.display_defaults,basestring):
     #    [self.display_defaults, dummy]= du.get_display_defaults(self.display_defaults,"new")
     self.stepper = None
     if self.figcontainer == None:
         self.figcontainer = self.gt.figurelist()
Esempio n. 17
0
    def __init__(self, wavelength, particle_parameters):
        """calculate size distribution weighted non-rayliegh radar backscatter phase
        function from mie q-factors contained in 'cooperative/config/mie_xx_temp.txt' file
        Returns a table of p180/4pi at 1 micron intervals"""

        if (particle_parameters['non-Rayleigh_radar'] == 'True' \
               or particle_parameters['non-Rayleigh_radar'] == 'true'):
            if wavelength == 8.6e-3:
                mie_filename = locate_file('mie_data_8.6mm_285K.txt')
                print 'reading  ' + mie_filename
                index_refraction = 4.72 - 2.72j
            elif wavelength == 3.154e-3:

                #hack--approximate wavelength used for mie calculation
                wavelength = 3.2e-3

                mie_filename = locate_file('mie_data_3.2mm_285K.txt')
                print 'reading  ' + mie_filename
                index_refraction = 3.08 - 1.78j
            else:
                print 'unknown radar wavelength = ', wavelength
                raise RuntimeError('Unknown radar wavelength')
            if os.path.isfile(mie_filename):
                [hh, dd] = ru.readascii(mie_filename)
            else:
                print "unable to preform adaptive p180 operation"
                raise RuntimeError, ("Can't find mie results in '",
                                     mie_filename, "'")

            #convert wavelength from meters to microns
            lambd = wavelength * 1e6

            #alpha = particle_parameters['alpha_water']
            #gam   = particle_parameters['g_water']

            if particle_parameters['size_distribution'].has_key(
                    'modified_gamma'):
                alpha = particle_parameters['size_distribution'][
                    'modified_gamma']['alpha_water']
                gam = particle_parameters['size_distribution'][
                    'modified_gamma']['g_water']
            elif particle_parameters['size_distribution'].has_key(
                    'oconnor_gamma'):
                alpha = particle_parameters['size_distribution'][
                    'oconnor_gamma']['alpha_water']

            #find largest diameter in microns for this wavelength
            dmax = np.int(dd[-1, 0] * lambd / np.pi)
            self.D = np.arange(dmax).astype('float')

            qsca = np.interp(self.D, dd[:, 0] * lambd / np.pi, dd[:, 2])
            qback = np.interp(self.D, dd[:, 0] * lambd / np.pi, dd[:, 3])

            k_squared = (index_refraction**2 - 1) / (index_refraction**2 + 2)
            k_squared = np.real(k_squared * np.conj(k_squared))

            print
            print '**********************************************************************************************'
            print
            print 'k_squared  ', k_squared, wavelength
            print
            print

            #qback_theory = np.pi**5 * lambd**-4 * k_squared * self.D**4
            qback_theory = np.pi**3 * lambd**-4 * k_squared * self.D**4

            #hack
            qback_theory = qback_theory * 4.0 * np.pi
            print
            print
            print '****** hack----qback_theory adjustment by factor of 4*pi'
            print

            if 1:
                import matplotlib.pylab as plt
                plt.figure(1999)
                plt.plot(self.D, qback, 'k', self.D, qback_theory, 'r')
                plt.xlabel('diameter (microns)')
                plt.ylabel('q backscatter')
                ax = plt.gca()
                ax.set_xscale('log')
                ax.set_yscale('log')
                ax.grid(True)
            #if particle_parameters['p180_water']['size_distribution'] == 'modified_gamma':
            if particle_parameters['size_distribution'].has_key(
                    'modified_gamma'):
                print 'Non-Rayleigh radar corrections calcualted with modified gamma size distribution'
                D_eval = np.logspace(np.log10(self.D[15]),
                                     np.log10(self.D[-1]), 500)
                table = np.ones(len(D_eval))
                scale = qback[100] / (self.D[100] * np.pi / lambd)**4
                rayleigh = scale * (self.D * np.pi / lambd)**4
                qsca_rayleigh = rayleigh
                for i in range(len(D_eval) - 1):
                    #table[i] = np.nansum(modified_gamma(self.D,D_eval[i],alpha,gam)* qback) \
                    #    /(4.0* np.pi * np.nansum(modified_gamma(self.D,D_eval[i],alpha,gam) * qsca))

                    #this is the ratio of the actual backscatter to the Rayleigh computed backscatter
                    #table[i] = np.nansum(modified_gamma(self.D,D_eval[i],alpha,gam) * qsca *  qback) \
                    #    /(np.nansum(modified_gamma(self.D,D_eval[i],alpha,gam) * qsca *  qback_theory))

                    #is this the correct way to average?
                    table[i] =np.nansum(modified_gamma(self.D,D_eval[i],alpha,gam) * qsca)\
                       /np.nansum(modified_gamma(self.D,D_eval[i],alpha,gam) * qsca *  qback/qback_theory)

                    if i % 50 == 0:
                        import matplotlib.pylab as plt
                        plt.figure(3000)
                        plt.plot(self.D,
                                 modified_gamma(self.D, D_eval[i], alpha, gam))
                        plt.xlabel('diameter (microns)')
                        plt.ylabel('distribution')
                        ax = plt.gca()
                        ax.set_xscale('log')
                        ax.grid(True)

            elif particle_parameters['size_distribution'].has_key(
                    'oconnor_gamma'):
                print 'Non-Rayleigh radar corrections calcualted with oconnor_gamma size distribution'
                for i in range(len(self.D)):
                    table[i] = np.nansum(self.oconnor_gamma(self.D,D[i],alpha)*rayleigh)\
                       /np.nansum(self.oconnor_gamma(self.D,D[i],alpha))
            else:
                raise RuntimeError("unrecongized size distribution type")

            #fill table for all sizes using interpolation
            self.non_rayleigh_vs_d_table = np.interp(self.D, D_eval, table)

        else:  #no non-rayliegh radar correction
            self.D = None
            self.non_rayleigh_vs_d_table = 1.0

        if particle_parameters['non-Rayleigh_radar'] == 'True':
            import matplotlib.pylab as plt
            plt.figure(2000)
            plt.plot(dd[:, 0] * lambd / np.pi, dd[:, 3], self.D, qback, self.D,
                     1.5 * rayleigh, 'r')
            plt.xlabel('diameter')
            plt.ylabel('qback')
            ax = plt.gca()
            ax.set_xscale('log')
            ax.grid(True)

            plt.figure(2001)
            plt.plot(dd[:, 0] * lambd / np.pi, dd[:, 2], 'r', self.D, qsca,
                     self.D, rayleigh, 'g')
            plt.xlabel('diameter (microns)')
            plt.ylabel('qscat')
            ax = plt.gca()
            ax.set_xscale('log')
            ax.grid(True)

            plt.figure(2002)
            plt.plot(self.D, self.non_rayleigh_vs_d_table,
                     dd[:, 0] * lambd / np.pi, dd[:, 5] * 8 * np.pi / 3.0)
            plt.xlabel('diameter (microns)')
            plt.ylabel('non-Rayleigh correction factor')
            ax = plt.gca()
            ax.set_xscale('log')
            ax.grid(True)

            plt.figure(2003)
            plt.plot(
                dd[:, 0] * lambd / np.pi, dd[:, 5] * 8 * np.pi / 3.0, 'k',
                self.D,
                modified_gamma(self.D, 20.0, alpha, gam) /
                np.max(modified_gamma(self.D, 20.0, alpha, gam)), 'm', self.D,
                modified_gamma(self.D, 100.0, alpha, gam) /
                np.max(modified_gamma(self.D, 100.0, alpha, gam)), 'm', self.D,
                modified_gamma(self.D, 550.0, alpha, gam) /
                np.max(modified_gamma(self.D, 550.0, alpha, gam)), 'm', self.D,
                2.0 / 3.0 * qback / qsca, 'c', self.D,
                self.non_rayleigh_vs_d_table, 'r')
            plt.xlabel('diameter (microns)')
            plt.ylabel('non-Rayleigh correction factor')
            ax = plt.gca()
            ax.set_xscale('log')
            ax.grid(True)

        return
Esempio n. 18
0
        gen = dpl_rti(
            instrument,
            tmp,
            tm,
            #datetime.datetime(2012,6,1,12,0,0),datetime.datetime(2012,6,1,17,0,0),
            datetime.timedelta(seconds=timeave),
            datetime.timedelta(seconds=retrievalslice),
            0,
            12000,
            rangeave)

        v = None

        donetest = 5

        for i in gen:
            if v == None:
                v = dpl_ctnc.dpl_create_templatenetcdf(
                    locate_file('hsrl_nomenclature.cdl'), n, i)
            v.appendtemplatedata(i)
            if i.rs_inv.times.shape[0] == 0:
                donetest -= 1
            else:
                donetest = 5
            if donetest == 0:
                break
            #raise TypeError
            #break
            n.sync()
        n.close()
Esempio n. 19
0
def makeArchiveImages(instrument,datetimestart,range_km=15,full24hour=None,filename=None,reprocess=False,attenuated=False,frappe=False,ir1064=False,ismf2ship=False,completeframe=None,*args,**kwargs):
    import lg_dpl_toolbox.filters.substruct as frame_substruct
    frame_substruct.SubstructBrancher.multiprocessable=False
    from lg_dpl_toolbox.dpl.dpl_read_templatenetcdf import dpl_read_templatenetcdf
    from lg_dpl_toolbox.filters.time_frame import FrameCachedConcatenate
    from hsrl.dpl.dpl_hsrl import dpl_hsrl
    from radar.dpl.dpl_radar import dpl_radar
    from lg_dpl_toolbox.dpl.dpl_create_templatenetcdf import dpl_create_templatenetcdf
    import hsrl.dpl.dpl_artists as hsrl_artists
    import radar.dpl.dpl_artists as radar_artists
    import raman.dpl.dpl_artists as raman_artists

    import lg_dpl_toolbox.core.archival as hru
    import hsrl.graphics.hsrl_display as du
    if filename!=None:
        useFile=True
    else:
         useFile=False #True
    if not useFile or not os.access(filename,os.F_OK):
        reprocess=True
    realend=None
    #process_control=None
    if reprocess:
        print datetime
        #instrument='ahsrl'
        if useFile:
            n=Dataset(filename,'w',clobber=True)
            n.instrument=instrument
        print datetime
        realstart=datetimestart
        if full24hour==None:
            realstart=datetimestart.replace(hour=0 if datetimestart.hour<12 else 12,minute=0,second=0,microsecond=0)
        elif frappe:
            realstart=datetimestart.replace(hour=0,minute=0,second=0,microsecond=0)
        elif ismf2ship:
            realend=realstart
            realstart=realstart-timedelta(days=2.0)
        else:
            realstart=realstart-timedelta(days=1.0)
        if 'realstart' in kwargs:
            realstart=kwargs['realstart']
        if realend is None:
            realend=realstart+timedelta(days=.5 if full24hour==None else 1.0)
        if 'realend' in kwargs:
            realend=kwargs['realend']
        isHsrl=False
        isRadar=False
        isRaman=False
        instrumentprefix=None
        if instrument.endswith('hsrl'):
            dpl=dpl_hsrl(instrument=instrument,filetype='data')
            dpl.hsrl_process_control.set_value('quality_assurance','enable',False)
            #dpl.hsrl_process_control.set_value('extinction_processing','enable',False)
            dpl.hsrl_process_control.set_value('wfov_corr','enable',False)
            gen=dpl(start_time_datetime=realstart,end_time_datetime=realend,min_alt_m=0,max_alt_m=range_km*1000,with_profiles=False)
            isHsrl=True
            #process_control=gen.hsrl_process_control
            hsrlinstrument=instrument
            if os.getenv('COMPLETEFRAME',completeframe)==None:
                import lg_dpl_toolbox.filters.substruct as frame_substruct
                dropcontent=['rs_raw','rs_mean']
                gen=frame_substruct.DropFrameContent(gen,dropcontent)

        elif ('kazr' in instrument) or ('mwacr' in instrument) or instrument=='mmcr':
            dpl=dpl_radar(instrument=instrument)
            gen=dpl(start_time_datetime=realstart,end_time_datetime=realend,min_alt_m=0,max_alt_m=range_km*1000,forimage=True,allow_nans=True)
            hsrlinstrument=dpl.instrumentbase
            isRadar=True
            if 'mwacr' in instrument:
                instrumentprefix='mwacr'
            else:
                instrumentprefix='radar'
            #merge=picnicsession.PicnicProgressNarrator(dplc,getLastOf('start'), searchparms['start_time_datetime'],searchparms['end_time_datetime'],session)
            #hasProgress=True
        elif instrument.startswith('rlprof'):
            from raman.dpl.raman_dpl import dpl_raman
            import lg_dpl_toolbox.filters.time_frame as time_slicing
            dpl=dpl_raman('bagohsrl',instrument.replace('rlprof',''))
            gen=dpl(start_time_datetime=realstart,end_time_datetime=realend,min_alt_m=0,max_alt_m=range_km*1000,forimage=True,allow_nans=True,inclusive=True)
            import functools
            import lg_base.core.array_utils as hau
            from dplkit.simple.blender import TimeInterpolatedMerge
            import lg_dpl_toolbox.filters.substruct as frame_substruct
            import lg_dpl_toolbox.dpl.TimeSource as TimeSource
            import lg_base.core.canvas_info as ci
            gen=time_slicing.TimeGinsu(gen,timefield='times',dtfield=None)
            forimage=ci.load_canvas_info()['canvas_pixels']
            timesource=TimeSource.TimeGenerator(realstart,realend,time_step_count=forimage['x'])
            gen=TimeInterpolatedMerge(timesource,[gen], allow_nans=True)
            gen=TimeSource.AddAppendableTime(gen,'times','delta_t')
            gen=frame_substruct.Retyper(gen,functools.partial(hau.Time_Z_Group,timevarname='times',altname='altitudes'))
            hsrlinstrument='bagohsrl'
            instrumentprefix=instrument
            isRaman=True


    if reprocess and useFile:
        v=None

        for i in gen:
            if v==None:
                v=dpl_create_templatenetcdf(locate_file('hsrl_nomenclature.cdl'),n,i)
            v.appendtemplatedata(i)
        #raise TypeError
        #break
            n.sync()
        n.close()

        #fn='outtest.nc'

    if useFile:
        v=dpl_read_templatenetcdf(filename)

        instrument=v.raw_netcdf().instrument[:]
    else:
        v=gen

    defaultjson='archive_plots.json'
    if ismf2ship:
        defaultjson='mf2ship_plots.json'
    (disp,conf)=du.get_display_defaults(os.getenv("PLOTSJSON",defaultjson))
  
    v=FrameCachedConcatenate(v)
    import lg_dpl_toolbox.dpl.TimeSource as TimeSource
    import lg_dpl_toolbox.filters.fill as fill
    import lg_base.core.canvas_info as ci
    forimage=ci.load_canvas_info()['canvas_pixels']
    ts=TimeSource.TimeGenerator(realstart,realend,time_step_count=forimage['x'])
    v=fill.FillIn(v,[np.array([x['start'] for x in ts])],ignoreGroups=ignoreSomeGroups)

    rs=None
    for n in v:
        if rs==None:
            rs=n
        else:
            rs.append(n)
        if isHsrl:
            if attenuated:
                bsfigname='atten_backscat_image'
                bsprefix='attbscat'
                disp.set_value(bsfigname,'enable',1)
                disp.set_value('backscat_image','enable',0)
                disp.set_value('linear_depol_image','figure',bsfigname)
                #disp.set_value('circular_depol_image','figure',bsfigname)
            else:
                bsfigname='backscat_image'
                bsprefix='bscat'
            if ir1064 or (hasattr(rs,'rs_inv') and hasattr(rs.rs_inv,'color_ratio')):
                disp.set_value('raw_color_ratio_image','enable',1)
                disp.set_value('color_ratio_image','enable',1)
            #if hasattr(rs.rs_inv,'circular_depol'):
            #    disp.set_value('linear_depol_image','enable',0) #this modification works because the artist doesn't render until after the yield
            #    field='circular_depol'
            #else:
            #disp.set_value('circular_depol_image','enable',0)
            field='linear_depol'


    if isHsrl:
        #if process_control==None:
        #    print 'loading process control from json'
        #    process_control=jc.json_config(locate_file('process_control.json'),'process_defaults')
        v=hsrl_artists.dpl_images_artist(v,instrument=instrument,max_alt=None,display_defaults=disp)
    elif isRadar:
        v=radar_artists.dpl_radar_images_artist(framestream=v,instrument=v.radarType,display_defaults=disp,subframe=None)
    elif isRaman:
        v=raman_artists.dpl_raman_images_artist(framestream=v,display_defaults=disp)

    for n in v:
        pass #run once more with cached value and actual artists

    usedpi=90

    #rs=Rti('ahsrl',stime.strftime('%d-%b-%y %H:%M'),dtime.total_seconds()/(60*60),minalt_km,maxalt_km,.5,'archive_plots.json')
    imtime=realstart
    imetime=realend
    imtimestr=imtime.strftime('%e-%b-%Y ')
    file_timetag=imtime.strftime("_%Y%m%dT%H%M_")+imetime.strftime("%H%M_")+("%i" % range_km)
    if not full24hour:
        imtimestr+= ('AM' if imtime.hour<12 else 'PM' )
        file_timetag+="_am" if imtime.hour<12 else "_pm"
    filelocation=hru.get_path_to_data(hsrlinstrument,None)
    print filelocation
    filelocation=os.path.join(filelocation,imtime.strftime("%Y/%m/%d/images/"))
    try:
        os.makedirs(filelocation)
    except OSError:
        pass
    print filelocation
    if os.getenv("DEBUG",None):
        filelocation='.'
        print 'DEBUG is on. storing in current directory'
    figs=v.figs
    extensionsList=('.png','.jpg','.gif','.jpeg')
    preferredFormat='jpg'
    preferredExtension='.'+preferredFormat
    if isHsrl:
        #figs=du.show_images(instrument,rs,None,{},process_control,disp,None,None,None)

        #for x in figs:
        #    fig = figs.figure(x)
        #    fig.canvas.draw()
        
        print figs
        f=figs.figure(bsfigname)
        #f.set_size_inches(f.get_size_inches(),forward=True)
        if frappe:
            frappetag=imtime.strftime('upperair.UW_HSRL.%Y%m%d%H%M.BAO_UWRTV_')
            hiresfile=os.path.join('.',frappetag+'backscatter_%ikm%s' %(range_km,preferredExtension))
            ensureOnlyOne(hiresfile,extensionsList)
            f.savefig(hiresfile,format=preferredFormat,bbox_inches='tight')
        else:
            hiresfile=os.path.join(filelocation,bsprefix+"_depol" + file_timetag + preferredExtension)
            ensureOnlyOne(hiresfile,extensionsList)
            f.savefig(hiresfile,format=preferredFormat,bbox_inches='tight')
        if disp.enabled('raw_color_ratio_image'):
            f=figs.figure('color_ratio_image')
            #f.set_size_inches(f.get_size_inches(),forward=True)
            hiresfileir=os.path.join(filelocation,"ratioIR" + file_timetag + preferredExtension)
            ensureOnlyOne(hiresfileir,extensionsList)
            f.savefig(hiresfileir,format=preferredFormat,bbox_inches='tight')
        figs.close()

        if not full24hour and hasattr(rs,'rs_inv'):
            scale = [float(disp.get_value(bsfigname,'lo_color_lmt')),
                     float(disp.get_value(bsfigname,'hi_color_lmt'))]
            depol=100*getattr(rs.rs_inv,field)
            depolscale = [float(disp.get_value(field+'_image', 'lo_color_lmt')),
                          float(disp.get_value(field+'_image', 'hi_color_lmt'))]

            if attenuated:
                backscat=rs.rs_inv.atten_beta_a_backscat
            elif hasattr(rs.rs_inv,'beta_a_backscat'):
                backscat=rs.rs_inv.beta_a_backscat
            else:
                backscat=rs.rs_inv.beta_a_backscat_par + rs.rs_inv.beta_a_backscat_perp 
            print rs.rs_inv.times.shape
            print backscat.shape
            if disp.get_value(bsfigname,"log_linear")=='log':
                scale=np.log10(scale)
                backscat[backscat<=0]=np.NAN;
                backscat=np.log10(backscat)
            if disp.get_value(field+'_image',"log_linear")=='log':
                depol[depol<=0]=np.NAN;
                depolscale=np.log10(depolscale)
                depol=np.log10(depol)
            print backscat.shape
            qc_mask=None
            if hasattr(rs.rs_inv,'qc_mask'):
               qc_mask=np.ones_like(rs.rs_inv.qc_mask)           
               qcbits={'mol_lost':64,'mol_sn_ratio':16,'cloud_mask':128,'I2_lock_lost':4}
               for name,maskbit in qcbits.items():
                 if disp.get_value('mask_image',name):
                   qc_mask = np.logical_and(rs.rs_inv.qc_mask & maskbit > 0,qc_mask)
            #print np.sum(backscat<=0)
            f=tinythumb(backscat,scale,imtimestr,dpi=usedpi,qcmask=qc_mask)
            thumbname=os.path.join(filelocation,bsprefix + file_timetag + '_thumb'+preferredExtension)
            print thumbname
            ensureOnlyOne(thumbname,extensionsList)
            f.savefig(thumbname,format=preferredFormat,bbox_inches='tight',dpi=usedpi)
            trimborder(thumbname)
            
            f=tinythumb(depol,depolscale,imtimestr,dpi=usedpi,qcmask=qc_mask)
            thumbname=os.path.join(filelocation,'depol' + file_timetag + '_thumb'+preferredExtension)
            ensureOnlyOne(thumbname,extensionsList)
            f.savefig(thumbname,format=preferredFormat,bbox_inches='tight',dpi=usedpi)
            trimborder(thumbname)

            if ir1064 or hasattr(rs.rs_inv,'color_ratio'):
                if hasattr(rs.rs_inv,'qc_mask'):
                   qc_mask=np.ones_like(rs.rs_inv.qc_mask)           
                   qcbits={'mol_lost':64,'mol_sn_ratio':16,'cloud_mask':128,'I2_lock_lost':4,'1064_shutter':0x8000}
                   for name,maskbit in qcbits.items():
                        if disp.get_value('mask_image',name):
                            qc_mask = np.logical_and(rs.rs_inv.qc_mask & maskbit > 0,qc_mask)
                cr=rs.rs_inv.color_ratio
                scale = [float(disp.get_value('color_ratio_image','lo_color_lmt')),
                         float(disp.get_value('color_ratio_image','hi_color_lmt'))]
                if disp.get_value('color_ratio_image',"log_linear")=='log':
                    scale=np.log10(scale)
                    cr[cr<=0]=np.NAN;
                    cr=np.log10(cr)
                f=tinythumb(cr,scale,imtimestr,dpi=usedpi,qcmask=qc_mask)
                thumbname=os.path.join(filelocation,'ratioIR' + file_timetag + '_thumb'+preferredExtension)
                print thumbname
                ensureOnlyOne(thumbname,extensionsList)
                f.savefig(thumbname,format=preferredFormat,bbox_inches='tight',dpi=usedpi)
                trimborder(thumbname)


    elif isRadar:
        f=figs.figure('radar_backscatter_image')
        #f.set_size_inches(f.get_size_inches(),forward=True)
        hiresfile=os.path.join(filelocation,instrumentprefix+"_bscat" + file_timetag + preferredExtension)
        ensureOnlyOne(hiresfile,extensionsList)
        f.savefig(hiresfile,format=preferredFormat,bbox_inches='tight')
        figs.close()
        if not full24hour:
            radarscale = [float(disp.get_value('radar_backscatter_image', 'lo_color_lmt')),
                          float(disp.get_value('radar_backscatter_image', 'hi_color_lmt'))]
            radar=rs.Backscatter
            if disp.get_value('radar_backscatter_image','log_linear')=='log':
                radar[radar<=0]=np.NAN
                radarscale=np.log10(radarscale)
                radar=np.log10(radar)
            f=tinythumb(radar,radarscale,imtimestr,dpi=usedpi)
            thumbname=os.path.join(filelocation,instrumentprefix+'_bscat' + file_timetag + '_thumb'+preferredExtension)
            ensureOnlyOne(thumbname,extensionsList)
            f.savefig(thumbname,format=preferredFormat,bbox_inches='tight',dpi=usedpi)
            trimborder(thumbname)
    elif isRaman:
        if hasattr(rs,'backscatter'):
            f=figs.figure('rl_backscatter_image')
            #f.set_size_inches(f.get_size_inches(),forward=True)
            hiresfile=os.path.join(filelocation,instrumentprefix+"_bscat" + file_timetag + preferredExtension)
            ensureOnlyOne(hiresfile,extensionsList)
            f.savefig(hiresfile,format=preferredFormat,bbox_inches='tight')
        elif hasattr(rs,'beta'):
            f=figs.figure('rl_backscatter_image')
            #f.set_size_inches(f.get_size_inches(),forward=True)
            hiresfile=os.path.join(filelocation,instrumentprefix+"_beta" + file_timetag + preferredExtension)
            ensureOnlyOne(hiresfile,extensionsList)
            f.savefig(hiresfile,format=preferredFormat,bbox_inches='tight')
        if hasattr(rs,'linear_depol'):
            f=figs.figure('rl_depol_image')
            #f.set_size_inches(f.get_size_inches(),forward=True)
            hiresfile=os.path.join(filelocation,instrumentprefix+"_dep" + file_timetag + preferredExtension)
            ensureOnlyOne(hiresfile,extensionsList)
            f.savefig(hiresfile,format=preferredFormat,bbox_inches='tight')
        figs.close()
        if not full24hour:
            if hasattr(rs,'backscatter'):
                ramanscale = [float(disp.get_value('rl_backscatter_image', 'lo_color_lmt')),
                              float(disp.get_value('rl_backscatter_image', 'hi_color_lmt'))]
                raman=rs.backscatter.copy()
                #print np.nanmax(raman),np.nanmin(raman),' is ramans actual range'
                if disp.get_value('rl_backscatter_image','log_linear')=='log':
                    raman[raman<=0]=np.NAN
                    ramanscale=np.log10(ramanscale)
                    raman=np.log10(raman)
                f=tinythumb(raman,ramanscale,imtimestr,dpi=usedpi)
                thumbname=os.path.join(filelocation,instrumentprefix+'_bscat' + file_timetag + '_thumb'+preferredExtension)
                ensureOnlyOne(thumbname,extensionsList)
                f.savefig(thumbname,format=preferredFormat,bbox_inches='tight',dpi=usedpi)
                trimborder(thumbname)
            if hasattr(rs,'beta'):
                ramanscale = [float(disp.get_value('rl_backscatter_image', 'lo_color_lmt')),
                              float(disp.get_value('rl_backscatter_image', 'hi_color_lmt'))]
                raman=rs.beta.copy()
                #print np.nanmax(raman),np.nanmin(raman),' is ramans actual range'
                if disp.get_value('rl_backscatter_image','log_linear')=='log':
                    raman[raman<=0]=np.NAN
                    ramanscale=np.log10(ramanscale)
                    raman=np.log10(raman)
                f=tinythumb(raman,ramanscale,imtimestr,dpi=usedpi)
                thumbname=os.path.join(filelocation,instrumentprefix+'_beta' + file_timetag + '_thumb'+preferredExtension)
                ensureOnlyOne(thumbname,extensionsList)
                f.savefig(thumbname,format=preferredFormat,bbox_inches='tight',dpi=usedpi)
                trimborder(thumbname)
            if hasattr(rs,'linear_depol'):
                ramanscale = [float(disp.get_value('rl_depol_image', 'lo_color_lmt')),
                              float(disp.get_value('rl_depol_image', 'hi_color_lmt'))]
                raman=rs.linear_depol.copy()
                #print np.nanmax(raman),np.nanmin(raman),' is ramans actual range'
                if disp.get_value('rl_depol_image','log_linear')=='log':
                    raman[raman<=0]=np.NAN
                    ramanscale=np.log10(ramanscale)
                    raman=np.log10(raman)
                f=tinythumb(raman,ramanscale,imtimestr,dpi=usedpi)
                thumbname=os.path.join(filelocation,instrumentprefix+'_dep' + file_timetag + '_thumb'+preferredExtension)
                ensureOnlyOne(thumbname,extensionsList)
                f.savefig(thumbname,format=preferredFormat,bbox_inches='tight',dpi=usedpi)
                trimborder(thumbname)

    today=datetime.utcnow()
    if not frappe and not ismf2ship and full24hour and imetime.year==today.year and imetime.month==today.month and imetime.day==today.day:
        destpath=os.getenv("DAYSTORE",os.path.join('/','var','ftp','public_html','hsrl'))
        destfile=os.path.join(destpath,full24hour + '_current'+preferredExtension)
        fulln=hiresfile
        if not fulln:
            return
        outf=file(destfile,'w')
        inf=file(fulln,'r')
        ensureOnlyOne(destfile,extensionsList)
        outf.write(inf.read())
        inf.close()
        outf.close()
        os.unlink(fulln)
    elif ismf2ship:
        if not hiresfile:
            return
        destpath='./'#os.getenv("DAYSTORE",os.path.join('/','var','ftp','public_html','hsrl'))
        destfile=os.path.join(destpath,realend.strftime('hsrl_%Y%m%d%H%M')+preferredExtension)
        fulln=hiresfile
        outf=file(destfile,'w')
        inf=file(fulln,'r')
        ensureOnlyOne(destfile,extensionsList)
        outf.write(inf.read())
        inf.close()
        outf.close()
        os.unlink(fulln)
        recompressImage(destfile,quality=30,optimize=True)
        varlist=('rs_mean.times','rs_mean.latitude','rs_mean.longitude','rs_mean.transmitted_energy','rs_mean.seedvoltage',\
            'rs_mean.seeded_shots','rs_mean.coolant_temperature','rs_mean.laserpowervalues','rs_mean.opticalbenchairpressure',\
            'rs_mean.humidity','rs_mean.l3locking_stats','rs_mean.l3cavityvoltage','rs_mean.nonfiltered_energy','rs_mean.filtered_energy',\
            'rs_mean.builduptime','rs_mean.one_wire_temperatures')
        skip=0
        while skip<16:
           try:
              print 'skip is ',skip
              addExifComment(destfile,makedict(n,varlist,skip=skip))
              break
           except Exception as e:
              skip+=1
              print e
              traceback.print_exc()
              pass
        sendSCPFile(destfile,host='198.129.80.15',path='/ftpdata/outgoing',user='******',keyfile=os.getenv('MF2_HSRL_KEY'))
    elif frappe:
        if not hiresfile:
            return
        sendFTPFile(hiresfile,host='catalog.eol.ucar.edu',path='/pub/incoming/catalog/frappe/',user='******',password='******')
Esempio n. 20
0
    def __init__(self, wavelength, particle_parameters):
        """calculate size distribution weighted p180/4pi from mie q-factors contained in
        'cooparative/config/mie_coefficients.txt' file"""

        #if not particle_parameters['p180_water']['size_distribution'] == 'None':
        if not particle_parameters['size_distribution'] == 'None':
            mie_filename = locate_file('mie_coefficients_0_18000.txt')
            if os.path.isfile(mie_filename):
                [hh, dd] = ru.readascii(mie_filename)
            else:
                print "unable to preform adaptive p180 operation"
                raise RuntimeError, ("Can't find mie results in '",
                                     mie_filename, "'")
            """
            alpha = particle_parameters['alpha_water']
            gam   = particle_parameters['g_water']
            """

            if particle_parameters['size_distribution'].has_key(
                    'modified_gamma'):
                alpha = particle_parameters['size_distribution'][
                    'modified_gamma']['alpha_water']
                gam = particle_parameters['size_distribution'][
                    'modified_gamma']['g_water']
            elif particle_parameters['size_distribution'].has_key(
                    'oconnor_gamma'):
                alpha = particle_parameters['size_distribution'][
                    'oconnor_gamma']['alpha_water']
            else:
                raise RuntimeError("unknown particle size distribution")

            #convert wavenumber to diameter in microns
            self.D = dd[:, 0] * wavelength * 1e6 / np.pi
            #q mie scattering
            qsca = dd[:, 2]
            #q mie backscatter
            qback = dd[:, 3]

            #p180/4pi vs diameter table
            self.p180_vs_d_table = np.zeros_like(self.D)

            #if particle_parameters['p180_water']['size_distribution'] == 'modified_gamma':
            if particle_parameters['size_distribution'].has_key(
                    'modified_gamma'):
                for i in range(1, len(self.D) - 1):
                    self.p180_vs_d_table[i] = np.sum(self.modified_gamma(self.D,self.D[i],alpha,gam)\
                        * qback)/(4.0*np.pi)\
                        /np.sum(self.modified_gamma(self.D,self.D[i],alpha,gam)*qsca)
            elif particle_parameters['size_distribution'].has_key(
                    'oconnor_gamma'):
                for i in range(len(self.D)):
                    self.p180_vs_d_table[i] = np.sum(self.oconnor_gamma(self.D,self.D[i],alpha)\
                       * qback)/(4.0*np.pi)\
                       /np.sum(self.oconnor_gamma(self.D,self.D[i],alpha)*qsca)
            else:
                raise RuntimeError("unrecongized size distribution type"\
                      + particle_parameters.p180_water)

        else:  #constant value of p180/4pi requested
            self.D = None
            self.p180_vs_d_table = particle_parameters['p180_water']['value']
        return
Esempio n. 21
0
def read_mie_files(radar_wavelength):
    """[D,qsca_lidar,qback_lidar,qsca_radar,qback_radar]
                 =read_mie_files(radar_wavelength)
           read scattering and backscatter efficiencies from mie files
	   and provide values scattering efficeinies as funtion of diameter.
           radar_wavelength in meters (either 3.154e-3 or 8.6e-3)
           D = vector of diameters (microns) at which the efficiencies are provided.
        """
    scattering_model = 'Mie'  # 'Rayleigh' OR  'Mie'

    if scattering_model == 'Mie':  #for mie cross sections
        #reading mie theory for lidar
        mie_filename = locate_file('mie_coefficients_0_18000.txt')
        if os.path.isfile(mie_filename):
            print 'reading  ' + mie_filename
            [hh_lidar, dd_lidar] = ru.readascii(mie_filename)
            lambda_lidar = 0.532  #in microns
        else:
            print "unable to compute radar/lidar backscatter ratio"
            raise RuntimeError, ("Can't find mie results in '", mie_filename,
                                 "'")
        print
        print 'lambda', radar_wavelength
        print
        if radar_wavelength == 3.154e-3:
            radar_mie_filename = locate_file('mie_data_3.2mm_285K.txt')
            lambda_radar = radar_wavelength * 1e6  #in microns
        elif radar_wavelength == 8.6e-3:
            lambda_radar = radar_wavelength * 1e6  #wavelength in m converted to microns
            radar_mie_filename = locate_file('mie_data_8.6mm_285K.txt')

        else:
            print "unable to compute radar/lidar backscatter ratio"
            raise RuntimeError, ("Can't find mie results in '",
                                 radar_mie_filename, "'")
    if scattering_model == 'Mie':
        print 'reading  ' + radar_mie_filename

        [hh_radar, dd_radar] = ru.readascii(radar_mie_filename)

        #convert wavenumber to diameter in microns
        dmax_lidar = dd_lidar[-1, 0] * lambda_lidar / np.pi
        #find largest diameter in microns for this radar wavelength
        dmax_radar = dd_radar[-1, 0] * lambda_radar / np.pi

        #limit max size to smallest max of lidar or radar
        d_max = np.min([dmax_lidar, dmax_radar])
        D = np.arange(d_max, dtype='float')
        #place lidar and radar backscat eff on common scale--1 micron/pt
        #and divide backscatter by 4pi to conform to Deirmenjian normalized phase function
        qsca_lidar = np.interp(D, dd_lidar[:, 0] * lambda_lidar / np.pi,
                               dd_lidar[:, 2])
        qback_lidar = np.interp(D, dd_lidar[:, 0] * lambda_lidar / np.pi,
                                dd_lidar[:, 3])
        qback_lidar = qback_lidar / (4.0 * np.pi)
        qsca_radar = np.interp(D, dd_radar[:, 0] * lambda_radar / np.pi,
                               dd_radar[:, 2])
        qback_radar = np.interp(D, dd_radar[:, 0] * lambda_radar / np.pi,
                                dd_radar[:, 3])
        qback_radar = qback_radar / (4.0 * np.pi)

        if 0:
            import matplotlib.pylab as plt

            plt.plot(D, size_dist.area_weighted_number(D, D[200], 'water'))
            plt.ylabel('number density')
            plt.xlabel('Diameter (microns)')
            ax = plt.gca()
            ax.set_xscale('log')
            ax.grid(True)

#for rayleigh radar cross section and geometeric lidar with p180/4pi=0.65
    if scattering_model == 'Rayleigh':
        D = np.arange(10000, dtype='float')
        k_sq_water, k_sq_ice = K_squared(radar_wavelength)
        qsca_lidar = 2.0 * np.ones_like(D)
        qback_lidar = 0.065 * qsca_lidar
        #qsca_radar from van de Hulst page 70
        qsca_radar = (8.0 / 3.0) * np.pi**4 * k_sq_water * (
            D * 1e-6 / radar_wavelength)**4
        qback_radar = 3 * qsca_radar / (8.0 * np.pi)

    return D, qsca_lidar, qback_lidar, qsca_radar, qback_radar
Esempio n. 22
0
def write_cfradial(output_filename,
                   start_dt,
                   minutes,
                   timeres_s=5,
                   altres_m=60,
                   maxtimeslice_td=datetime.timedelta(seconds=30 * 60),
                   instrument='gvhsrl',
                   min_alt_m=0,
                   max_alt_m=5000,
                   store_calibrations=False):
    """ writes HSRL data in CfRadial netcdf format

          output_filename        = where to write the data
          start_td             = = datetime.datetime object first time to retrieve.
          minutes                = how many minutes to process
          timeres_s              = time resolution in seconds (native would be 2.5)
          altres_m               = altitude resolution in meters
          maxtimeslice_timedelta = datetime.timedelta object for amount of data processed (safe is 1 or 2 hours)
          instrument             = hsrl id string (eg. 'ahsrl','gvhsrl','nshsrl','mf2hsrl').
          min_alt_m              = minimum altitude in meters to display
          max_alt_m              = maximum altitude in meters to display.
    """

    cdl = locate_file('hsrl_cfradial.cdl', forModule=lgtb)
    print 'CDL = ', cdl
    timeres_td = datetime.timedelta(seconds=timeres_s)

    netcdf = Dataset(output_filename, 'w', clobber=True)
    delta = datetime.timedelta(minutes=minutes)
    timeres_delta = datetime.timedelta(seconds=timeres_s)
    end_dt = start_dt + delta

    gen = dpl_hsrl(instrument)

    if store_calibrations:  # to store calibrations, newer actors are needed, as well as the precall methods (FIXME better design)
        import maestro.netcdf_precall as npc
        args = []
        kwargs = dict(output=netcdf,
                      template=cdl,
                      usecfradial=True,
                      basetime=start_dt)
        x = npc.addConstantsToParms(npc.addCalibrationsToNetCDF())
        hsrlnar = gen(start_dt,
                      end_dt,
                      timeres_timedelta=timeres_delta,
                      min_alt_m=min_alt_m,
                      max_alt_m=max_alt_m,
                      altres_m=altres_m)
        x(hsrlnar, args, kwargs)
        nar = artists.dpl_netcdf_artist(hsrlnar, *args, **kwargs)
        #framestream,template,outputfilename=None,format=None,usecfradial=None,selected_bindings=None,output=None,forModule=None,withUnlimited=None,basetime=None,addAttributes={}):
        for x in nar:
            pass
    else:
        v = None
        try:
            # store each lidar record
            for tzg in gen(start_dt,
                           end_dt,
                           timeres_timedelta=timeres_delta,
                           min_alt_m=min_alt_m,
                           max_alt_m=max_alt_m,
                           altres_m=altres_m):
                if v == None:
                    v = cfr.DplCreateCfradial(cdl, netcdf, tzg)
                v.append_data(tzg)

            v.close()

        except RuntimeError, msg:
            print msg
            traceback.print_exc()
            print 'write_cfradial: could not process data for %s starting at %s' % \
                  (instrument, start_dt.strftime('%Y-%m-%d %H:%M:%S'))
Esempio n. 23
0
    def __init__(calvals,
                 filename=None,
                 instrument=None,
                 systemOnly=True,
                 varlist=None,
                 level=0):
        if filename is None:
            filename = 'calvals_' + instrument + '.txt'
            filename = locate_file(filename,
                                   systemOnly=systemOnly,
                                   level=1 +
                                   level)  #level 1 means find based on caller
        print 'cal_file_reader: opening ', filename
        fileid = open(filename)
        # fileid = open_config(filename)
        import lg_base.core.read_utilities as hru
        name = None
        date_value = []
        cal_vec = np.ndarray(3)
        realunits = None
        for line in fileid:
            if '#' in line:
                line = line[:line.find('#')]  #strip any comment suffix
                if len(line) == 0:
                    continue
            if line[0] != ' ':  #header line
                #if this is not the first time through
                if name:
                    if vars(calvals).has_key(name):
                        raise RuntimeError, 'Error - duplicate entry for %s in %s' % (
                            name, filename)
                    setattr(calvals, name, date_value)
                date_value = []
                index = line.find('(')
                index2 = line.find(')')
                if index >= 0:
                    name = st.rstrip(st.lstrip(line[:index]))
                    units = st.rstrip(st.lstrip(line[index + 1:index2]))
                    if len(st.rstrip(st.lstrip(line[index2 + 1:]))) > 0:
                        raise RuntimeError, "Format error. Variable has excess content after units. %s in %s" % (
                            name, filename)
                else:
                    if index2 >= 0:
                        raise RuntimeError, "Format error. Variable names can't have parentheses. %s in %s" % (
                            name, filename)
                    name = st.rstrip(st.lstrip(line))
                    units = None
                if varlist is not None and name not in varlist:
                    name = None
                realunits = units
                k = 0
            elif len(st.lstrip(line)) > 0:  #record line
                #look for required comma after effective date string
                units = realunits
                try:
                    index = line.index(',')
                except ValueError:
                    print ' '
                    print 'ERROR ***************missing comma in calvals file **********'
                    print line
                    print ' '
                    raise RuntimeError(
                        'Missing comma in calvals file on line %i' % (line))

                e_date = hru.convert_date_str(
                    line[:index], twodigityearok_fubar=True)['datetime']

                line = line[(index + 1):]
                #check for string value as return
                if line.find("'") >= 0 or line.find('"') >= 0:
                    string_var = st.rstrip(st.lstrip(line))
                    string_var = st.replace(string_var, "'", "")
                    string_var = st.replace(string_var, '"', '')
                    date_value.append(list())
                    date_value[k].append((e_date, string_var, None))
                #check for calval vector
                elif line.find('[') >= 0 and line.find(']') >= 0:
                    index = line.index('[')
                    index2 = line.index(']')
                    tmp_vec = eval(line[(index + 1):index2])
                    line = line[(index2 + 1):]
                    if line[0] == '/':
                        divisor = float(eval(line[1:]))
                        cal_vec = np.array(tmp_vec) / divisor
                    elif line[0] == '*':
                        multiplier = float(eval(line[1:]))
                        cal_vec = np.array(tmp_vec) * multiplier
                    elif isinstance(tmp_vec, tuple):
                        cal_vec = np.array(tmp_vec)
                    else:  # special case for scalars - don't save as array
                        cal_vec = tmp_vec
                    date_value.append(list())
                    if units is not None and units == "deg W":
                        cal_vec = -cal_vec
                        units = "deg E"
                    date_value[k].append((e_date, cal_vec, units))
                else:
                    print ' '
                    print 'Error*************calvals syntax error****************'
                    print line
                    print ' '
                    raise RuntimeError('Calvals syntax error on line %i' %
                                       (line))
                k = k + 1
        if name is not None:
            if hasattr(calvals, name):
                raise RuntimeError, 'Error - duplicate entry for %s in %s' % (
                    name, filename)
            setattr(calvals, name, date_value)
        # validate all entries contain descending times
        for (name, value) in vars(calvals).items():
            cur_time = datetime(2200, 1, 1, 0, 0, 0)
            for i in range(len(value)):
                if cur_time < value[i][0][0]:
                    print 'cal_file_reader for %s: %s < %s' % (name, cur_time,
                                                               value[i][0][0])
                    raise RuntimeError, 'Error - non-descending time values in %s for %s' % (
                        filename, name)
                cur_time = value[i][0][0]