예제 #1
0
    def write_oco_ascii(self, filename=None):
        if not self.model:
            self.create_model()

        fn_date_str = self.model_dt.strftime("%Y%m%d%H%M")
        id_date_str = self.model_dt.strftime("%Y-%m-%dT%H:%M:%S")
        if not filename:
            filename = "model_%s.dat" % (fn_date_str)

        nlev = self.model.pressure.data.shape[0]

        # Convert pressure from mbar to Pascals
        press_mbar = self.model.pressure
        press_pa = press_mbar._replace(data=press_mbar.data * 100.0)
        data_in_order = (press_pa, self.model.height, self.model.temperature,
                         self.model.h2o_vmr)
        data_arr = zeros((nlev, len(data_in_order)), dtype=float)

        for idx in range(len(data_in_order)):
            # Reverse data to be increasing pressure order
            data_arr[:, idx] = data_in_order[idx].data[::-1]

        # Put import here to not make this code rely on this module
        from full_physics.oco_matrix import OcoMatrix

        out_mat = OcoMatrix()

        out_mat.file_id = "Atmosphere Model Created from NCEP data interpolated to latitude: %f, longitude: %f on %s" % (
            self.site_lat, self.site_lon, id_date_str)

        out_mat.units = []
        out_mat.labels = []
        for val in data_in_order:
            out_mat.units.append(val.units)
            if val.name == "Temperature":
                out_mat.labels.append("T")
            else:
                out_mat.labels.append(val.name)

        out_mat.header["Surface_Pressure"] = self.model.surface_pressure.data
        out_mat.header[
            "Surface_Pressure_Units"] = self.model.surface_pressure.units

        out_mat.data = data_arr

        logger.debug("Writing to ASCII file: %s" % filename)
        out_mat.write(filename)
예제 #2
0
    def create_psurf_apriori_file(self, spectrum_filename,
                                  psurf_output_filename):
        base_spec_name = os.path.basename(spectrum_filename)

        # Use grep because its faster than doing it outself
        grep_cmd = "grep -E " + base_spec_name + " " + self.runlog_filename
        matched_line = os.popen(grep_cmd).readline()

        if matched_line == None or len(matched_line) == 0:
            raise IOError('Could not find spectrum name: %s in run log file: %s' % (base_spec_name, self.runlog_filename))

        try:
            matched_columns = matched_line.split()
            psurf_val = float(matched_columns[self.pout_col_idx]) * self.convert_factor
        except:
            raise ValueError('Failed to parse psurf value from: "%s" from runlog line: %s' % (matched_columns[self.pout_col_idx], matched_line))
   
        out_obj = OcoMatrix()
    
        out_obj.data = numpy.zeros((1,1), dtype=float)
        out_obj.data[0,0] = psurf_val

        out_obj.file_id = 'psurf value extracted for spectrum named: %s from runlog file: %s' % (base_spec_name, self.runlog_filename)
        out_obj.labels = ['PSURF']
        
        out_obj.write(psurf_output_filename)
def solver_old_method():
    sys.path.append("../operations/populator/modules/")
    from load_disp_apriori import create_scene_dispersion_file
    from full_physics.oco_matrix import OcoMatrix

    latitude = lua_config.l1b.latitude(0).value
    sza_r = math.radians(lua_config.l1b.solar_zenith(0).value)
    saz_r = math.radians(lua_config.l1b.solar_azimuth(0).value)
    time_struct = lua_config.l1b.time().timetuple()
    aband_data = lua_config.l1b.radiance(0).data()
    apriori_out_file = "./tmp_disp_solve.dat"
    create_scene_dispersion_file(lua_config.sid_string, latitude, sza_r, saz_r,
                                 time_struct, aband_data, dispersion_coefs,
                                 apriori_out_file)
    result = np.transpose(OcoMatrix(apriori_out_file).data[:, 1:])
    os.remove(apriori_out_file)
    return result
    def write_oco_ascii(self, filename=None):
        if not self.model:
            self.create_model()

        fn_date_str = self.model_dt.strftime("%Y%m%d%H%M")
        id_date_str = self.model_dt.strftime("%Y-%m-%dT%H:%M:%S")
        if not filename:
            filename = "model_%s.dat" % (fn_date_str)

        nlev = self.model.pressure.data.shape[0]

        # Convert pressure from mbar to Pascals
        press_mbar = self.model.pressure 
        press_pa = press_mbar._replace(data=press_mbar.data * 100.0)
        data_in_order = (press_pa, self.model.height, self.model.temperature, self.model.h2o_vmr)
        data_arr = zeros((nlev, len(data_in_order)), dtype=float)

        for idx in range(len(data_in_order)):
            # Reverse data to be increasing pressure order
            data_arr[:, idx] = data_in_order[idx].data[::-1]

        # Put import here to not make this code rely on this module
        from full_physics.oco_matrix import OcoMatrix

        out_mat = OcoMatrix()

        out_mat.file_id = "Atmosphere Model Created from NCEP data interpolated to latitude: %f, longitude: %f on %s" % (self.site_lat, self.site_lon, id_date_str)

        out_mat.units = [ v.units for v in data_in_order ]
        out_mat.labels = [ v.name for v in data_in_order ]

        out_mat.header["Surface_Pressure"] = self.model.surface_pressure.data
        out_mat.header["Surface_Pressure_Units"] = self.model.surface_pressure.units

        out_mat.data = data_arr

        logger.debug("Writing to ASCII file: %s" % filename)
        out_mat.write(filename)
예제 #5
0
    def set_input_config_values(self,
                                sounding_id_file,
                                sounding_id_sect,
                                input_config_filename,
                                input_file_list=[],
                                **kwargs):
        '''This writes the input configuration values to the input configuration
        file (i.e., the sdos_input_list.dat file)'''

        keyword_defaults = {
            'file_id': 'Scalar Retrieval Outputs',
            'exe_path': None,
            'exe_version': None,
            'data_version': None,
            'release_version': None,
            'comments': '',
            'algorithm_descriptor': None,
            'algorithm_maturity': None,
            'l2_input_path': None,
            'number_soundings': None,
        }
        # Load sounding id list so we can leave a count in the config file we produce
        sounding_id_list = self.read_id_list_file(sounding_id_file,
                                                  sounding_id_sect)

        file_keywords = {}
        file_keywords.update(keyword_defaults)
        file_keywords.update(kwargs)

        file_keywords['number_soundings'] = len(sounding_id_list)

        # Try getting versions from the binary file itself first
        exe_version = None
        data_version = None
        if 'exe_path' in file_keywords and file_keywords['exe_path'] != None:
            try:
                ver_ret = binary_version(file_keywords['exe_path'])
            except OSError as exc:
                raise OSError(
                    "Could not execute L2 binary: %s due to error: %s" %
                    (file_keywords['exe_path'], exc))

            if ver_ret:
                file_keywords['release_version'] = ver_ret[0]
                exe_version = ver_ret[1]
                self.logger.debug(
                    'Retrieved release_version "%s" from binary %s' %
                    (file_keywords['release_version'],
                     file_keywords['exe_path']))
                self.logger.debug('Retrieved exe_version "%s" from binary %s' %
                                  (exe_version, file_keywords['exe_path']))

                data_version = ver_ret[2]
                if data_version != None:
                    self.logger.debug(
                        'Retrieved data_version "%s" from binary %s' %
                        (data_version, file_keywords['exe_path']))

            # If the binary doesn't have any version information, then try looking for a CM directory
            # where the executable lives
            if exe_version == None:
                exe_dir = os.path.dirname(file_keywords['exe_path'])
                exe_version = source_version(exe_dir)
                if exe_version != None:
                    self.logger.debug(
                        'Retrieved exe_version "%s" from binary containing directory %s'
                        % (exe_version, exe_dir))

        # If the binary is not in a source controlled directory try the src_path, which probably
        # came from an enviromental variable
        if exe_version == None and 'src_path' in file_keywords and file_keywords[
                'src_path'] != None:
            exe_version = source_version(file_keywords['src_path'])
            if exe_version != None:
                self.logger.debug(
                    'Retrieved exe_version "%s" from source directory %s' %
                    (exe_version, file_keywords['src_path']))

        if exe_version:
            file_keywords['exe_version'] = exe_version
        else:
            self.logger.error(
                "Could not determine exe_version from executable: %s or source path: %s"
                % (file_keywords['exe_path'], file_keywords['src_path']))

        # If there was no binary version extracted from the binary then search for it from
        # the data_path variable
        if data_version != None:
            file_keywords['data_version'] = data_version
        elif 'data_path' in file_keywords and file_keywords[
                'data_path'] != None:
            data_version = source_version(file_keywords['data_path'])
            self.logger.debug(
                'Retrieved data_version "%s" from %s' %
                (file_keywords['data_version'], file_keywords['data_path']))

        if data_version:
            file_keywords['data_version'] = data_version
        else:
            self.logger.error(
                "Could not determine data_version from path: %s" %
                file_keywords['data_path'])

        if 'L2_INPUT_PATH' in os.environ:
            file_keywords['l2_input_path'] = os.environ['L2_INPUT_PATH']

        self.logger.debug('Writing input file config file: %s' %
                          input_config_filename)
        out_mat_obj = OcoMatrix()

        # Set items into input config file from values specified in configuraiton file
        for head_key_name, head_key_value in file_keywords.items():
            if hasattr(out_mat_obj, head_key_name):
                self.logger.debug('Set %s as an attribute' % head_key_name)

                prev_value = getattr(out_mat_obj, head_key_name)
                setattr(out_mat_obj, head_key_name, head_key_value)
            else:
                self.logger.debug('Set %s into header' % head_key_name)

                if isinstance(head_key_value, six.binary_type):
                    head_key_value = head_key_value.decode('UTF-8')

                if isinstance(
                        head_key_value,
                        six.string_types) and head_key_value.find(' ') >= 0:
                    out_mat_obj.header[head_key_name] = '"%s"' % head_key_value
                elif head_key_value == None:
                    out_mat_obj.header[head_key_name] = 'VALUE NOT SET'
                else:
                    out_mat_obj.header[head_key_name] = '%s' % head_key_value

        out_mat_obj.data = [
            fn for fn in input_file_list if fn != None and len(fn) > 0
        ]
        out_mat_obj.write(input_config_filename, auto_size_cols=False)
예제 #6
0
def reformat_gfit_atmosphere(mav_file, out_file, next_spec_srch=None):

    print('Reading mav data from %s' % mav_file)

    spec_line_start = 1

    mav_data = []
    mav_fobj = open(mav_file, "r")
    file_line_idx = 0

    if next_spec_srch == None:
        found_spec = True
    else:
        found_spec = False
    for mav_line in mav_fobj.readlines():
        line_parts = mav_line.split()
        mav_data.append(line_parts)

        if mav_line.find('Next Spectrum:') >= 0 and next_spec_srch != None:
            if re.search(next_spec_srch, mav_line):
                spec_line_start = file_line_idx
                found_spec = True

        file_line_idx += 1

    if not found_spec:
        raise ValueError(
            'Could not find next spectrum search string: %s in mav file: %s' %
            (next_spec_srch, mav_file))

    print('Processing for', ' '.join(mav_data[spec_line_start]))

    mav_size_row = spec_line_start + 1
    mav_header_row = mav_size_row + 2

    try:
        (num_skip, num_cols,
         num_rows) = [int(val) for val in mav_data[mav_size_row]]
    except:
        mav_header_row = 0
        num_skip = -2
        num_cols = len(mav_data[0])
        num_rows = len(mav_data)

    print()

    print("Skip: %d, Cols %d, Rows: %d" % (num_skip, num_cols, num_rows))

    mav_beg_row = mav_size_row + num_skip + 2
    mav_end_row = mav_beg_row + num_rows - 3

    mav_all_cols = mav_data[mav_header_row]

    print("Column names:", mav_all_cols)

    out_col_idx = 0
    output_data_matrix = numpy.zeros(
        (mav_end_row - mav_beg_row + 1, len(all_col_names)), dtype=float)

    for (curr_mav_col, scale) in mav_col_extract:
        print('Processing column:', curr_mav_col)
        mav_col_idx = mav_all_cols.index(curr_mav_col)
        row_idx = mav_end_row - mav_beg_row

        for mav_row_data in mav_data[mav_beg_row:mav_end_row + 1]:
            new_col_data = float(mav_row_data[mav_col_idx]) * float(scale)
            output_data_matrix[row_idx, out_col_idx] = output_data_matrix[
                row_idx, out_col_idx] + new_col_data
            row_idx -= 1

        out_col_idx += 1

    print('Writing output file %s' % out_file)
    out_mat_obj = OcoMatrix()
    out_mat_obj.file_id = 'GFIT Atmospheric State modified from: %s' % (
        mav_file)
    out_mat_obj.dims = [len(output_data_matrix), len(all_col_names)]
    out_mat_obj.labels = all_col_names
    out_mat_obj.units = all_unit_names
    out_mat_obj.data = output_data_matrix
    out_mat_obj.write(out_file)
def reformat_gfit_atmosphere(mav_file, out_file, next_spec_srch=None):

    print 'Reading mav data from %s' % mav_file

    spec_line_start = 1

    mav_data = []
    mav_fobj = open(mav_file, "r")
    file_line_idx = 0

    if next_spec_srch == None:
        found_spec = True
    else:
        found_spec = False
    for mav_line in mav_fobj.readlines():
        line_parts = mav_line.split()
        mav_data.append( line_parts )

        if mav_line.find('Next Spectrum:') >= 0 and next_spec_srch != None:
            if re.search(next_spec_srch, mav_line):
                spec_line_start = file_line_idx
                found_spec = True
                
        file_line_idx += 1

    if not found_spec:
        raise ValueError('Could not find next spectrum search string: %s in mav file: %s' % (next_spec_srch, mav_file))

    print 'Processing for', ' '.join(mav_data[spec_line_start])

    mav_size_row   = spec_line_start + 1
    mav_header_row = mav_size_row + 2

    try:
        (num_skip, num_cols, num_rows) = [int(val) for val in mav_data[mav_size_row]]
    except:
        mav_header_row = 0
        num_skip = -2
        num_cols = len(mav_data[0])
        num_rows = len(mav_data)

    print 

    print "Skip: %d, Cols %d, Rows: %d" % (num_skip, num_cols, num_rows)

    mav_beg_row = mav_size_row + num_skip + 2
    mav_end_row = mav_beg_row + num_rows - 3

    mav_all_cols = mav_data[mav_header_row]

    print "Column names:", mav_all_cols

    out_col_idx = 0
    output_data_matrix = numpy.zeros((mav_end_row-mav_beg_row+1, len(all_col_names)), dtype=float)

    for (curr_mav_col, scale) in mav_col_extract:
        print 'Processing column:', curr_mav_col
        mav_col_idx = mav_all_cols.index(curr_mav_col)
        row_idx = mav_end_row-mav_beg_row

        for mav_row_data in mav_data[mav_beg_row:mav_end_row+1]:
            new_col_data = float(mav_row_data[mav_col_idx]) * float(scale)
            output_data_matrix[row_idx, out_col_idx] = output_data_matrix[row_idx, out_col_idx] + new_col_data
            row_idx -= 1

        out_col_idx += 1

    print 'Writing output file %s' % out_file
    out_mat_obj = OcoMatrix()
    out_mat_obj.file_id = 'GFIT Atmospheric State modified from: %s' % (mav_file)
    out_mat_obj.dims = [len(output_data_matrix), len(all_col_names)]
    out_mat_obj.labels = all_col_names
    out_mat_obj.units = all_unit_names
    out_mat_obj.data = output_data_matrix
    out_mat_obj.write(out_file)
예제 #8
0
    def set_input_config_values(self, sounding_id_file, sounding_id_sect, input_config_filename, input_file_list=[], **kwargs):
        '''This writes the input configuration values to the input configuration
        file (i.e., the sdos_input_list.dat file)'''

        keyword_defaults = {
            'file_id':                   'Scalar Retrieval Outputs',
            'exe_path':                  None,
            'exe_version':               None,
            'data_version':              None,
            'release_version':           None,
            'comments':                  '',
            'algorithm_descriptor':      None,
            'algorithm_maturity':        None,
            'l2_input_path':             None,
            'number_soundings':          None,
            }
        # Load sounding id list so we can leave a count in the config file we produce
        sounding_id_list = self.read_id_list_file(sounding_id_file, sounding_id_sect)

        file_keywords = {}
        file_keywords.update(keyword_defaults)
        file_keywords.update(kwargs)

        file_keywords['number_soundings'] = len(sounding_id_list)

        # Try getting versions from the binary file itself first
        exe_version = None
        data_version = None
        if 'exe_path' in file_keywords and file_keywords['exe_path'] != None:
            try:
                ver_ret = binary_version(file_keywords['exe_path'])
            except OSError as exc:
                raise OSError("Could not execute L2 binary: %s due to error: %s" % (file_keywords['exe_path'], exc))

            if ver_ret:
                file_keywords['release_version'] = ver_ret[0]
                exe_version = ver_ret[1]
                self.logger.debug('Retrieved release_version "%s" from binary %s' % (file_keywords['release_version'], file_keywords['exe_path']))
                self.logger.debug('Retrieved exe_version "%s" from binary %s' % (exe_version, file_keywords['exe_path']))

                data_version = ver_ret[2]
                if data_version != None:
                    self.logger.debug('Retrieved data_version "%s" from binary %s' % (data_version, file_keywords['exe_path']))

            # If the binary doesn't have any version information, then try looking for a CM directory
            # where the executable lives
            if exe_version == None:
                exe_dir = os.path.dirname(file_keywords['exe_path'])
                exe_version = source_version(exe_dir)
                if exe_version != None:
                    self.logger.debug('Retrieved exe_version "%s" from binary containing directory %s' % (exe_version, exe_dir))

        # If the binary is not in a source controlled directory try the src_path, which probably
        # came from an enviromental variable
        if exe_version == None and 'src_path' in file_keywords and file_keywords['src_path'] != None:
            exe_version = source_version(file_keywords['src_path'])
            if exe_version != None:
                self.logger.debug('Retrieved exe_version "%s" from source directory %s' % (exe_version, file_keywords['src_path']))

        if exe_version:
            file_keywords['exe_version'] = exe_version
        else:
            self.logger.error("Could not determine exe_version from executable: %s or source path: %s" % (file_keywords['exe_path'], file_keywords['src_path']))

        # If there was no binary version extracted from the binary then search for it from
        # the data_path variable
        if data_version != None:
            file_keywords['data_version'] = data_version
        elif 'data_path' in file_keywords and file_keywords['data_path'] != None:
            data_version = source_version(file_keywords['data_path'])
            self.logger.debug('Retrieved data_version "%s" from %s' % (file_keywords['data_version'], file_keywords['data_path']))

        if data_version:
            file_keywords['data_version'] = data_version
        else:
            self.logger.error("Could not determine data_version from path: %s" % file_keywords['data_path'])

        if 'L2_INPUT_PATH' in os.environ:
            file_keywords['l2_input_path'] = os.environ['L2_INPUT_PATH']

        self.logger.debug('Writing input file config file: %s' % input_config_filename)
        out_mat_obj = OcoMatrix()

        # Set items into input config file from values specified in configuraiton file
        for head_key_name, head_key_value in file_keywords.items():
            if hasattr(out_mat_obj, head_key_name):
                self.logger.debug('Set %s as an attribute' % head_key_name)

                prev_value = getattr(out_mat_obj, head_key_name)
                setattr(out_mat_obj, head_key_name, head_key_value)
            else:
                self.logger.debug('Set %s into header' % head_key_name)

                if isinstance(head_key_value, six.string_types) and head_key_value.find(' ') >= 0:
                    out_mat_obj.header[head_key_name] = '"%s"' % head_key_value
                elif head_key_value == None:
                    out_mat_obj.header[head_key_name] = 'VALUE NOT SET'
                else:
                    out_mat_obj.header[head_key_name] = '%s' % head_key_value

        out_mat_obj.data = [fn for fn in input_file_list if fn != None and len(fn) > 0]
        out_mat_obj.write(input_config_filename, auto_size_cols=False)