def Perform_LINK(source, destination, options):
    link_relative = evaluate_bool_str( options.get('link_relative'), False )
    remove_broken_link = evaluate_bool_str( options.get('remove_broken_link'), False )
    
    if link_relative:
        source = os.path.relpath(source, os.path.dirname(destination))
        
    os.symlink(source, destination)

    # Check for broken link
    if not os.path.exists(destination):
        # Remove broken symbolic link if requested
        if remove_broken_link:
            os.remove(destination) 
        raise IOError('Could not create or broken symbolic link %s named %s' % (source, destination))
def Process_File(sourceFilename, destFilename, fileKeywords, moduleSections, valuesDict, mapDict):
   logger = logging.getLogger(os.path.basename(__file__))

   if len(moduleSections) > 1:
       raise RuntimeError('Only one extraction block allowed')

   prof_file    = Apply_Template(moduleSections[0].Get_Keyword_Value('prof_file'), valuesDict, mapDict=mapDict)
   log_file     = Apply_Template(moduleSections[0].Get_Keyword_Value('log_file'), valuesDict, mapDict=mapDict)
   l1b_file     = Apply_Template(moduleSections[0].Get_Keyword_Value('l1b_file'), valuesDict, mapDict=mapDict)
   resample_to  = Apply_Template(moduleSections[0].Get_Keyword_Value('resample_to'), valuesDict, mapDict=mapDict)
   verbose      = evaluate_bool_str(moduleSections[0].Get_Keyword_Value('verbose'))
   id_list_file = Apply_Template(moduleSections[0].Get_Keyword_Value('id_list_file'), valuesDict, mapDict=mapDict)
   id_section   = Apply_Template(moduleSections[0].Get_Keyword_Value('id_section'), valuesDict, mapDict=mapDict)

   if not os.path.isdir(destFilename):
      raise IOError('destFilename %s must be a directory not a file' % destFilename)

   if id_list_file != None and len(id_list_file) > 0:
      sounding_ids = [ long(id_val) for id_val in Read_Id_List_File(id_list_file, id_section) ]
   else:
      sounding_ids = None

   if l1b_file == None:
      raise ValueError('l1b_file is must be defined')

   if log_file != None:
      logger.debug('Extracting orbit simulator data from %s into %s' % (prof_file, destFilename))
   else:
      logger.debug('Extracting orbit simulator data from %s and %s into %s' % (prof_file, log_file, destFilename))
   
   extract_orbit_sim_data(prof_file, l1b_file, destFilename, log_file=log_file, resample_to=resample_to, sounding_id_list=sounding_ids, verbose=verbose)
def Should_Process(check_section, valuesDict, mapDict, default=True):
    logger = logging.getLogger(sys._getframe(0).f_code.co_name)

    do_process = default
    try:
        only_if_tmpl = check_section.Get_Keyword_Value("only_if")
        only_if_val = Apply_Template(only_if_tmpl, valuesDict, mapDict=mapDict)
    except TemplateError:
        only_if_val = False

    try:
        not_if_tmpl = check_section.Get_Keyword_Value("not_if")
        not_if_val = Apply_Template(not_if_tmpl, valuesDict, mapDict=mapDict)
    except TemplateError:
        not_if_val = False

    # not_if takes precedence over only_if
    if not_if_val != None:
        if not getattr(not_if_val, "__iter__", False):
            not_if_val = [not_if_val]

        for curr_val in not_if_val:
            do_process = not evaluate_bool_str(str(curr_val), default=True)
            if not do_process:
                break

        logger.debug(
            'Should process %s = %s : not_if string: "%s" evaluates: %s'
            % (check_section.leaf[0], do_process, not_if_tmpl, not_if_val)
        )

    # If only_if_val is defined then make sure it evaluates to true
    if do_process and only_if_val != None:
        if not getattr(only_if_val, "__iter__", False):
            only_if_val = [only_if_val]

        for curr_val in only_if_val:
            do_process = evaluate_bool_str(str(curr_val), default=False)
            if not do_process:
                break

        logger.debug(
            'Should process %s = %s : only_if string: "%s" evaluates: %s'
            % (check_section.leaf[0], do_process, only_if_tmpl, only_if_val)
        )

    return do_process
Exemple #4
0
def Perform_LINK(source, destination, options):
    link_relative = evaluate_bool_str(options.get('link_relative'), False)
    remove_broken_link = evaluate_bool_str(options.get('remove_broken_link'),
                                           False)

    if link_relative:
        source = os.path.relpath(source, os.path.dirname(destination))

    os.symlink(source, destination)

    # Check for broken link
    if not os.path.exists(destination):
        # Remove broken symbolic link if requested
        if remove_broken_link:
            os.remove(destination)
        raise IOError('Could not create or broken symbolic link %s named %s' %
                      (source, destination))
Exemple #5
0
def Should_Process(check_section, valuesDict, mapDict, default=True):
    logger = logging.getLogger(sys._getframe(0).f_code.co_name)

    do_process = default
    try:
        only_if_tmpl = check_section.Get_Keyword_Value('only_if')
        only_if_val = Apply_Template(only_if_tmpl, valuesDict, mapDict=mapDict)
    except TemplateError:
        only_if_val = False

    try:
        not_if_tmpl = check_section.Get_Keyword_Value('not_if')
        not_if_val = Apply_Template(not_if_tmpl, valuesDict, mapDict=mapDict)
    except TemplateError:
        not_if_val = False

    # not_if takes precedence over only_if
    if not_if_val != None:
        if not getattr(not_if_val, '__iter__', False):
            not_if_val = [not_if_val]

        for curr_val in not_if_val:
            do_process = not evaluate_bool_str(str(curr_val), default=True)
            if not do_process:
                break

        logger.debug(
            'Should process %s = %s : not_if string: "%s" evaluates: %s' %
            (check_section.leaf[0], do_process, not_if_tmpl, not_if_val))

    # If only_if_val is defined then make sure it evaluates to true
    if do_process and only_if_val != None:
        if not getattr(only_if_val, '__iter__', False):
            only_if_val = [only_if_val]

        for curr_val in only_if_val:
            do_process = evaluate_bool_str(str(curr_val), default=False)
            if not do_process:
                break

        logger.debug(
            'Should process %s = %s : only_if string: "%s" evaluates: %s' %
            (check_section.leaf[0], do_process, only_if_tmpl, only_if_val))

    return do_process
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict, mapDict):
    logger = logging.getLogger(os.path.basename(__file__))    

    optionDict = copy.copy(valuesDict)
    for keyName in fileKeywords.keys():
        optionDict[keyName] = fileKeywords[keyName]

    if source != None:
        logger.warning('source ignored by EXECUTE module')

    if destination != None:
        logger.warning('destination ignored by EXECUTE module')

    for executeSect in moduleSections:
        binary  = Apply_Template(executeSect.Get_Keyword_Value('binary'), valuesDict, mapDict=mapDict)
        options = Apply_Template(executeSect.Get_Keyword_Value('options'), optionDict, mapDict=mapDict)
        chdir   = Apply_Template(executeSect.Get_Keyword_Value('chdir'), optionDict, mapDict=mapDict)
        quiet   = evaluate_bool_str(executeSect.Get_Keyword_Value('quiet'))

        if binary == None or len(binary) == 0:
            raise ValueError("No binary name specified")

        if type(options) is ListType:
            options = ' '.join(options)

        old_dir = os.getcwd()
        if chdir != None:
            if not os.path.exists(chdir):
                raise IOError('Could not change to dir: %s' % chdir)
            os.chdir(chdir)

        envSects = executeSect.Get_Section('EXECUTE->ENVIRONMENT')
        if envSects != None:
            logger.debug('Setting enviromental variables: ')
            for currEnvSect in envSects:
                for keyName in currEnvSect.Get_All_Keyword_Names():
                    os.environ[keyName] = Apply_Template(currEnvSect.Get_Keyword_Value(keyName), valuesDict, mapDict=mapDict)
                    logger.debug(keyName, '=', os.environ[keyName])

        if options != None:
            run_command = '%s %s' % (binary, options)
        else:
            run_command = '%s' % (binary)

        logger.debug('Executing command: %s' % run_command)

        run_obj = os.popen(run_command)
        if not quiet:
            for run_line in run_obj.readlines():
                logger.debug(run_line.strip())
        run_obj.close()

        os.chdir(old_dir)
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict, mapDict):

   if len(moduleSections) > 1:
       raise RuntimeError('Only one resample levels block allowed')

   resample_to = Apply_Template(moduleSections[0].Get_Keyword_Value('resample_to'), valuesDict, mapDict=mapDict)
   extrapolate = evaluate_bool_str(moduleSections[0].Get_Keyword_Value('extrapolate'))

   if resample_to == None or len(resample_to) == 0:
       raise ValueError('resample_to keyword not specified')

   resample_levels(source, destination, resample_to, val_extrapolate=extrapolate)
Exemple #8
0
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict,
                 mapDict):

    if len(moduleSections) > 1:
        raise RuntimeError('Only one resample levels block allowed')

    resample_to = Apply_Template(
        moduleSections[0].Get_Keyword_Value('resample_to'),
        valuesDict,
        mapDict=mapDict)
    extrapolate = evaluate_bool_str(
        moduleSections[0].Get_Keyword_Value('extrapolate'))

    if resample_to == None or len(resample_to) == 0:
        raise ValueError('resample_to keyword not specified')

    resample_levels(source,
                    destination,
                    resample_to,
                    val_extrapolate=extrapolate)
Exemple #9
0
def Process_Operation_Section(fileOpSection,
                              baseSourceDir=None,
                              valuesDict=None,
                              mapDict=None):
    logger = logging.getLogger(sys._getframe(0).f_code.co_name)

    operation_options = fileOpSection.Get_Keywords_Dict()
    for curr_key, curr_val in operation_options.items():
        operation_options[curr_key] = Apply_Template(curr_val,
                                                     valuesDict,
                                                     mapDict=mapDict)

    fail_on_error = evaluate_bool_str(operation_options.get('fail_on_error'),
                                      True)
    skip_if_exists = operation_options.get('skip_if_exists')
    remove_existing = evaluate_bool_str(
        operation_options.get('remove_existing'), False)

    if skip_if_exists != None and len(skip_if_exists) > 0:
        skipExistFiles = Expand_Filename(skip_if_exists)
        if os.path.exists(skipExistFiles[0]):
            logger.debug('Skipping %s section because this file exist: %s' %
                         (fileOpSection.leaf[0], skipExistFiles[0]))
            return

    if not Should_Process(fileOpSection, valuesDict, mapDict):
        logger.debug(
            'Skipping %s section because of template evaluation result' %
            (fileOpSection.leaf[0]))
        return

    for fileAction in fileOpSection.Get_All_Section_Nodes():

        actionName = fileAction.leaf[0].upper()
        action_matrix_data = fileAction.Get_Matrix_Data()

        # Allow for a replacement map inside of file operation section
        # Trickily the order of the MAP can matter!
        if actionName == 'MAP':
            mapDict = Get_Map_Values(fileAction, existing=mapDict)

        elif fileOpSection.leaf[0] != actionName:
            try:
                actionFunc = eval('Perform_%s' % actionName)
            except NameError:
                raise NameError(
                    'Could not find function to handle %s operation %s' %
                    (fileOpSection.leaf[0], actionName))

            for templateLine in action_matrix_data:
                if hasattr(templateLine, '__iter__'):
                    sources = Expand_Filename(
                        Apply_Template(templateLine[0],
                                       valuesDict,
                                       mapDict=mapDict), baseSourceDir)
                    destinations = Expand_Filename(
                        Apply_Template(templateLine[1],
                                       valuesDict,
                                       mapDict=mapDict))
                else:
                    sources = (None, )
                    destinations = Expand_Filename(
                        Apply_Template(templateLine,
                                       valuesDict,
                                       mapDict=mapDict))

                for curr_source, curr_destination in zip(
                        sources, destinations):

                    try:
                        if os.path.exists(
                                curr_destination) and remove_existing:
                            logger.debug('Removing existing destination: %s' %
                                         curr_destination)
                            Perform_DELETE(curr_destination, operation_options)

                        if curr_source == None:
                            logger.debug(
                                'Performing disk operation %s for section %s with file: %s'
                                % (actionName, fileOpSection.leaf[0],
                                   curr_destination))
                            actionFunc(curr_destination, operation_options)
                        else:
                            logger.debug(
                                'Performing disk operation %s for section %s with source: %s, destination: %s'
                                % (actionName, fileOpSection.leaf[0],
                                   curr_source, curr_destination))
                            actionFunc(curr_source, curr_destination,
                                       operation_options)

                    except:
                        err_msg = 'Could not process disk operation: %s for section: %s with source: %s, destination: %s' % (
                            actionName, fileOpSection.leaf[0], curr_source,
                            curr_destination)
                        if fail_on_error:
                            logger.error(err_msg)
                            raise
                        else:
                            logger.debug(err_msg)
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict, mapDict):
   logger = logging.getLogger(os.path.basename(__file__))

   if len(moduleSections) > 1:
       raise RuntimeError('Only one value map creation allowed per FILE')

   if str(source) == str(destination):
      raise IOError('source and destination must be different. will not overwrite source file')

   list_file    = Apply_Template(moduleSections[0].Get_Keyword_Value('list_file'), valuesDict, mapDict=mapDict)
   data_file    = Apply_Template(moduleSections[0].Get_Keyword_Value('data_file'), valuesDict, mapDict=mapDict)
   data_col     = Apply_Template(moduleSections[0].Get_Keyword_Value('data_column'), valuesDict, mapDict=mapDict)
   section      = Apply_Template(moduleSections[0].Get_Keyword_Value('section'), valuesDict, mapDict=mapDict)
   static_value = Apply_Template(moduleSections[0].Get_Keyword_Value('static_value'), valuesDict, mapDict=mapDict)
   is_log_file  = evaluate_bool_str(moduleSections[0].Get_Keyword_Value('is_log_file'))
   l1b_file     = Apply_Template(moduleSections[0].Get_Keyword_Value('l1b_file'), valuesDict, mapDict=mapDict)
   modify       = moduleSections[0].Get_Keyword_Value('modify')

   max_range_val = None
   range_values = {}
   for range_sect in moduleSections[0].Get_Section('->RANGES'):
      logger.debug('Using range section')

      for range_spec in range_sect.Get_Matrix_Data():
         (range_name, range_str) = range_spec

         if range_str.find(',') > 0:
            curr_range = [ float(val) for val in range_str.split(',') ]
         else:
            curr_range = [ float(val) for val in range_str.split() ]

         if max_range_val == None:
            max_range_val = max(curr_range)
         else:
            max_range_val = max(max_range_val, max(curr_range))

         range_values[range_name] = curr_range

   id_list = Read_Id_List_File(list_file, section, valuesDict=valuesDict, mapDict=mapDict)

   data_values = []
   if data_file != None:
      
      if len(data_file) == 0 or not os.path.exists(data_file):
         raise IOError('Could not read data_file')

      if is_log_file:
         if l1b_file == None or len(l1b_file) == 0:
            raise ValueError('Need L1B file specified for using log file as source of data')
         if not os.path.exists(l1b_file):
            raise IOError('L1B file specified does not exist: %s' % l1b_file)
         
         log_file_obj = Orbit_Sim.Log_File(data_file)
         col_index = log_file_obj.get_column_index(data_col)

         if not type(col_index) is ListType:
            col_index = [ col_index ]

         h5_obj = h5py.File(l1b_file, 'r')
         snd_id_matrix = h5_obj[SOUNDING_ID_GROUP][SOUNDING_ID_DATASET]
         frame_id_arr  = h5_obj[FRAME_ID_GROUP][FRAME_ID_DATASET]

         for curr_sounding in id_list:
            curr_frame_id = int(str(curr_sounding)[0:-1])
            frame_index = bisect.bisect_left(frame_id_arr, curr_frame_id)

            for snd_index in range(snd_id_matrix.shape[1]):
               if snd_id_matrix[frame_index, snd_index] == int(curr_sounding):
                  break

            if snd_id_matrix[frame_index, snd_index] != int(curr_sounding):
                raise ValueError('did not find correct sounding id: %d at index: %s in hdf file: %s, instead found: %d' % (curr_sounding, (frame_index, snd_index), l1b_file, snd_id_matrix[frame_index, snd_index]))
             
            curr_log_val = 0.0
            for curr_val_idx in col_index:
               curr_log_val += log_file_obj.data[frame_index, snd_index, curr_val_idx]

            data_values.append(curr_log_val)

      else:
          if data_col == None:
             data_col = 0
          else:
             data_col = int(data_col)

          logger.debug('Reading mapped values from column %d of file %s' % (data_col, data_file))
          data_fobj = open(data_file)
          for data_line in data_fobj.readlines():
               data_line = data_line.strip()
               if len(data_line) > 0 and data_line.find('#') != 0:
                   line_parts = data_line.split()

                   if len(line_parts)-1 < data_col:
                       raise IOError('data file %s does not have column %d' % (data_file, data_col))
                   data_values.append(line_parts[data_col])

   if static_value != None:
      logger.debug('Setting mapped value to static value: %s' % static_value)
      for idx in range(len(id_list) - len(data_values)):
         data_values.append(static_value)

   if len(id_list) != len(data_values):
       raise IOError('Length of id list %d from file %s does not match length of data values %d from %s' % (len(id_list), list_file,  len(data_values), data_file))

   mapValues = None
   mapSects = moduleSections[0].Get_Section('->MAP')
   if mapSects != None and len(mapSects) > 0:
      mapValues = Get_Map_Values(mapSects, valuesDict)

   logger.debug('Writing map file: %s' % destination)

   if type(destination) is str:
      dstFileObj = open(destination, 'w')
   elif hasattr(destination, 'write'):
      dstFileObj = destination
   else:
      raise Exception('Unrecognized source object: %s' % destination)


   if modify != None and len(modify) > 0:
      modifyDict = copy.copy(valuesDict)
      
   for (id_val, data_val) in zip(id_list, data_values):
      if modify != None and len(modify) > 0:
         modifyDict['original'] = str(data_val)
         modify_expr = Apply_Template(modify, modifyDict, mapDict=mapDict)
         data_val = eval(modify_expr)

      if len(range_values) > 0:
         found_range_value = False
         for (curr_name, curr_values) in range_values.items():
            beg_val = curr_values[0]
            end_val = curr_values[1]

            if float(data_val) >= beg_val and float(data_val) < end_val:
               data_val = curr_name
               found_range_value = True
               break

         if not found_range_value:
            raise LookupError('RANGE values specified but none matched for value: %s' % data_val)
      
      if mapValues != None and (str(data_val) in mapValues[DEFAULT_MAP_NAME]):
         print >>dstFileObj, id_val, mapValues[DEFAULT_MAP_NAME][str(data_val)]
      else:
         print >>dstFileObj, id_val, str(data_val)

   if type(destination) is str:
      dstFileObj.close()
Exemple #11
0
def Get_Constant_Values(constSections,
                        existingDict={},
                        mapDict={},
                        templateDict=None):
    logger = logging.getLogger(sys._getframe(0).f_code.co_name)

    # Use existing dict as template when template dict is not defined
    # Put for backwards compatibility, where existingDict was used
    # in keyword applications
    if templateDict == None:
        templateDict = existingDict

    loaded_constants = []
    for const_sect in constSections:
        sect_name = const_sect.leaf[0]

        for const_child in const_sect.children:

            if const_child.type == 'assignment':
                const_name = const_child.leaf
                const_val = Apply_Template(
                    const_sect.Get_Keyword_Value(const_name),
                    templateDict,
                    mapDict=mapDict)

                if type(const_val) is ListType:
                    raise ValueError(
                        'constant %s defined more than once or is a list with value: %s'
                        % (const_name, const_val))

                loaded_constants.append(const_name)
                existingDict[const_name] = const_val

            elif const_child.type == 'section' and const_child.leaf[
                    0] == 'EXTRACT':
                extract_filename = Apply_Template(
                    const_child.Get_Keyword_Value('filename'),
                    templateDict,
                    mapDict=mapDict)
                allow_missing = evaluate_bool_str(
                    Apply_Template(
                        const_child.Get_Keyword_Value('allow_missing'),
                        templateDict,
                        mapDict=mapDict), False)

                keyword_sect_list = const_child.Get_Section(
                    'EXTRACT->KEYWORDS')

                if extract_filename == None or len(extract_filename) == 0:
                    raise ValueError(
                        'filename must be specified for %s->EXTRACT section' %
                        sect_name)

                if keyword_sect_list == None or len(keyword_sect_list) == 0:
                    raise ValueError(
                        'KEYWORD section must be specified for %s->EXTRACT section'
                        % sect_name)

                if not os.path.exists(extract_filename):
                    raise ValueError(
                        'filename specified for %s->EXTRACT section does not exist: %s'
                        % (sect_name, extract_filename))

                logger.debug('Reading constant keyword values from file: %s' %
                             extract_filename)
                keyFileObj = L2_Input.Input_File(extract_filename)

                for keyword_sect in keyword_sect_list:
                    wanted_consts = keyword_sect.Get_All_Keyword_Names()
                    for new_const_name in wanted_consts:
                        search_path = Apply_Template(
                            keyword_sect.Get_Keyword_Value(new_const_name),
                            templateDict,
                            mapDict=mapDict)
                        logger.debug('Loading %s from keyword file as %s' %
                                     (search_path, new_const_name))
                        search_sect_name = '->'.join(
                            search_path.split('->')[0:-1])
                        search_key_name = search_path.split('->')[-1]

                        search_sect_obj = keyFileObj.Get_Section(
                            search_sect_name)

                        if search_sect_obj == None or len(
                                search_sect_obj) == 0:
                            raise IOError(
                                'Could not find section: %s in file: %s' %
                                (search_sect_name, extract_filename))

                        new_const_value = [
                            sect.Get_Keyword_Value(search_key_name)
                            for sect in search_sect_obj
                        ]

                        if new_const_value == None or len(
                                new_const_value) == 0:
                            if allow_missing:
                                new_const_value = ""
                            else:
                                raise ValueError(
                                    'Could not find keyword: %s in section: %s in file: %s'
                                    % (search_key_name, search_sect_name,
                                       extract_filename))
                        elif len(new_const_value) == 1:
                            new_const_value = new_const_value[0]

                        loaded_constants.append(new_const_name)
                        existingDict[new_const_name] = new_const_value

    if len(loaded_constants) > 0:
        logger.debug('Loaded values: %s' % ', '.join(loaded_constants))

    return existingDict
Exemple #12
0
def Get_Map_Values(mapSectionsList, subDict=None, existing={}):
    logger = logging.getLogger(sys._getframe(0).f_code.co_name)

    mapData = copy.copy(existing)

    if type(mapSectionsList) is not ListType:
        mapSectionsList = [mapSectionsList]

    for mapSection in mapSectionsList:
        mapName = mapSection.Get_Keyword_Value('name')
        if mapName == None or mapName == '':
            mapName = DEFAULT_MAP_NAME

        if not mapName in mapData.keys():
            if type(mapName) is ListType:
                raise ValueError(
                    'A named MAP section must contain map vaues in VALUES subsection'
                )
            mapData[mapName] = {}

        required = evaluate_bool_str(mapSection.Get_Keyword_Value('required'),
                                     True)
        if subDict != None:
            mapFilename = Apply_Template(
                mapSection.Get_Keyword_Value('from_file'), subDict)
            sectionName = Apply_Template(
                mapSection.Get_Keyword_Value('section'), subDict)
        else:
            mapFilename = mapSection.Get_Keyword_Value('from_file')
            sectionName = mapSection.Get_Keyword_Value('section')

        if mapFilename != None and len(mapFilename) > 0:
            mapFilename = Expand_Filename(mapFilename)[0]

            if not os.path.exists(mapFilename):
                if required:
                    raise IOError("MAP source file '%s' does not exist" %
                                  (mapFilename))
                else:
                    continue

            if sectionName == None:
                logger.debug('Loading MAP %s contents from file: %s' %
                             (mapName, mapFilename))
                mapFileData = L2_Input.Input_File(mapFilename)
                mapSection = mapFileData.rootNode
                mapValues = mapSection.Get_Matrix_Data()
            else:
                logger.debug(
                    'Loading MAP %s section as %s contents from file: %s' %
                    (sectionName, mapName, mapFilename))
                fileObj = L2_Input.Input_File(mapFilename)

                foundSects = fileObj.Get_Section(sectionName)

                if foundSects == None or len(foundSects) == 0:
                    raise IOError('Could not find section %s in file: %s' %
                                  (sectionName, mapFilename))

                mapValues = []
                for currFileSect in foundSects:
                    for sectKeyName in currFileSect.Get_All_Keyword_Names():
                        sectKeyVal = currFileSect.Get_Keyword_Value(
                            sectKeyName)
                        mapValues.append([str(sectKeyName), str(sectKeyVal)])

        else:
            if mapName == DEFAULT_MAP_NAME:
                valuesSect = [mapSection]
            else:
                valuesSect = mapSection.Get_Section('MAP->VALUES')

            if len(valuesSect) == 0:
                raise ValueError(
                    'Could not find map values in either main MAP section or VALUES sub section'
                )
            if len(valuesSect) > 1:
                raise ValueError(
                    'Too many VALUES sub sections for MAP section')

            mapValues = valuesSect[0].Get_Matrix_Data()

        if mapValues != None:
            for mapRow in mapValues:
                mapKey = mapRow[0]
                mapValue = mapRow[1:]

                if len(mapValue) == 1:
                    mapValue = mapValue[0]

                mapData[mapName][mapKey] = mapValue

    return mapData
def Get_Map_Values(mapSectionsList, subDict=None, existing={}):
    logger = logging.getLogger(sys._getframe(0).f_code.co_name)

    mapData = copy.copy(existing)

    if type(mapSectionsList) is not ListType:
        mapSectionsList = [mapSectionsList]

    for mapSection in mapSectionsList:
        mapName = mapSection.Get_Keyword_Value("name")
        if mapName == None or mapName == "":
            mapName = DEFAULT_MAP_NAME

        if not mapName in mapData.keys():
            if type(mapName) is ListType:
                raise ValueError("A named MAP section must contain map vaues in VALUES subsection")
            mapData[mapName] = {}

        required = evaluate_bool_str(mapSection.Get_Keyword_Value("required"), True)
        if subDict != None:
            mapFilename = Apply_Template(mapSection.Get_Keyword_Value("from_file"), subDict)
            sectionName = Apply_Template(mapSection.Get_Keyword_Value("section"), subDict)
        else:
            mapFilename = mapSection.Get_Keyword_Value("from_file")
            sectionName = mapSection.Get_Keyword_Value("section")

        if mapFilename != None and len(mapFilename) > 0:
            mapFilename = Expand_Filename(mapFilename)[0]

            if not os.path.exists(mapFilename):
                if required:
                    raise IOError("MAP source file '%s' does not exist" % (mapFilename))
                else:
                    continue

            if sectionName == None:
                logger.debug("Loading MAP %s contents from file: %s" % (mapName, mapFilename))
                mapFileData = L2_Input.Input_File(mapFilename)
                mapSection = mapFileData.rootNode
                mapValues = mapSection.Get_Matrix_Data()
            else:
                logger.debug(
                    "Loading MAP %s section as %s contents from file: %s" % (sectionName, mapName, mapFilename)
                )
                fileObj = L2_Input.Input_File(mapFilename)

                foundSects = fileObj.Get_Section(sectionName)

                if foundSects == None or len(foundSects) == 0:
                    raise IOError("Could not find section %s in file: %s" % (sectionName, mapFilename))

                mapValues = []
                for currFileSect in foundSects:
                    for sectKeyName in currFileSect.Get_All_Keyword_Names():
                        sectKeyVal = currFileSect.Get_Keyword_Value(sectKeyName)
                        mapValues.append([str(sectKeyName), str(sectKeyVal)])

        else:
            if mapName == DEFAULT_MAP_NAME:
                valuesSect = [mapSection]
            else:
                valuesSect = mapSection.Get_Section("MAP->VALUES")

            if len(valuesSect) == 0:
                raise ValueError("Could not find map values in either main MAP section or VALUES sub section")
            if len(valuesSect) > 1:
                raise ValueError("Too many VALUES sub sections for MAP section")

            mapValues = valuesSect[0].Get_Matrix_Data()

        if mapValues != None:
            for mapRow in mapValues:
                mapKey = mapRow[0]
                mapValue = mapRow[1:]

                if len(mapValue) == 1:
                    mapValue = mapValue[0]

                mapData[mapName][mapKey] = mapValue

    return mapData
Exemple #14
0
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict,
                 mapDict):
    logger = logging.getLogger(os.path.basename(__file__))

    # Load existing file
    matrix_obj = OCO_Matrix(source)

    for modifySect in moduleSections:

        # Add ability to specify cols individually or using a * to goto end
        columns = Apply_Template(modifySect.Get_Keyword_Value('columns'),
                                 valuesDict,
                                 mapDict=mapDict)
        rows = Apply_Template(modifySect.Get_Keyword_Value('rows'),
                              valuesDict,
                              mapDict=mapDict)
        modify = modifySect.Get_Keyword_Value('modify')
        delete = evaluate_bool_str(modifySect.Get_Keyword_Value('delete'))
        add_column = evaluate_bool_str(
            modifySect.Get_Keyword_Value('add_column'))

        if columns != None:
            try:
                columns = index_range_list(columns,
                                           max_value=matrix_obj.dims[1])
            except:
                if not type(columns) is ListType:
                    col_name_list = [columns]
                else:
                    col_name_list = columns

                columns = []
                for curr_name in col_name_list:
                    if curr_name.lower() not in matrix_obj.labels_lower:
                        if add_column:
                            matrix_obj.add_column(curr_name)
                            columns.append(matrix_obj.dims[1] - 1)
                        else:
                            raise IOError(
                                'Column named %s not found in file: %s' %
                                (curr_name, source))

                    columns.append(
                        matrix_obj.labels_lower.index(curr_name.lower()))
        else:
            columns = range(matrix_obj.dims[1])

        if rows != None:
            rows = index_range_list(rows, max_value=matrix_obj.dims[0])
        else:
            rows = range(matrix_obj.dims[0])

        if delete and modify != None:
            raise ValueError(
                'delete and modify keywords can not be specified together')

        if delete:
            if len(columns) > matrix_obj.dims[1]:
                raise IOError(
                    'More columns to be deleted %d than exist %d in input file %s'
                    % (len(columns), matrix_obj.dims[1], source))

            new_data = numpy.zeros(
                (matrix_obj.dims[0], matrix_obj.dims[1] - len(columns)),
                dtype=numpy.double)
            new_labels = []
            new_units = []

            new_col_idx = 0
            for old_col_idx in range(matrix_obj.dims[1]):
                if old_col_idx not in columns:
                    new_labels.append(matrix_obj.labels[old_col_idx])
                    new_units.append(matrix_obj.units[old_col_idx])

                    new_data[:, new_col_idx] = matrix_obj.data[:, old_col_idx]

                    new_col_idx += 1

            matrix_obj.data = new_data
            matrix_obj.labels = new_labels
            matrix_obj.units = new_units

        if modify != None and len(modify) > 0:

            modifyDict = copy_module.copy(valuesDict)

            Get_Constant_Values(modifySect.Get_Section('->CONSTANTS'),
                                modifyDict)

            for row_idx in rows:
                for col_idx in columns:
                    modifyDict['original'] = str(
                        matrix_obj.data[row_idx][col_idx])

                    modify_str = Apply_Template(modify,
                                                modifyDict,
                                                mapDict=mapDict)

                    try:
                        matrix_obj.data[row_idx][col_idx] = eval(modify_str)
                    except:
                        raise RuntimeError(
                            'Error evaluating modify string: "%s"' %
                            modify_str)

    matrix_obj.write(destination, auto_size_cols=False)
def Get_Constant_Values(constSections, existingDict={}, mapDict={}, templateDict=None):
    logger = logging.getLogger(sys._getframe(0).f_code.co_name)

    # Use existing dict as template when template dict is not defined
    # Put for backwards compatibility, where existingDict was used
    # in keyword applications
    if templateDict == None:
        templateDict = existingDict

    loaded_constants = []
    for const_sect in constSections:
        sect_name = const_sect.leaf[0]

        for const_child in const_sect.children:

            if const_child.type == "assignment":
                const_name = const_child.leaf
                const_val = Apply_Template(const_sect.Get_Keyword_Value(const_name), templateDict, mapDict=mapDict)

                if type(const_val) is ListType:
                    raise ValueError(
                        "constant %s defined more than once or is a list with value: %s" % (const_name, const_val)
                    )

                loaded_constants.append(const_name)
                existingDict[const_name] = const_val

            elif const_child.type == "section" and const_child.leaf[0] == "EXTRACT":
                extract_filename = Apply_Template(
                    const_child.Get_Keyword_Value("filename"), templateDict, mapDict=mapDict
                )
                allow_missing = evaluate_bool_str(
                    Apply_Template(const_child.Get_Keyword_Value("allow_missing"), templateDict, mapDict=mapDict), False
                )

                keyword_sect_list = const_child.Get_Section("EXTRACT->KEYWORDS")

                if extract_filename == None or len(extract_filename) == 0:
                    raise ValueError("filename must be specified for %s->EXTRACT section" % sect_name)

                if keyword_sect_list == None or len(keyword_sect_list) == 0:
                    raise ValueError("KEYWORD section must be specified for %s->EXTRACT section" % sect_name)

                if not os.path.exists(extract_filename):
                    raise ValueError(
                        "filename specified for %s->EXTRACT section does not exist: %s" % (sect_name, extract_filename)
                    )

                logger.debug("Reading constant keyword values from file: %s" % extract_filename)
                keyFileObj = L2_Input.Input_File(extract_filename)

                for keyword_sect in keyword_sect_list:
                    wanted_consts = keyword_sect.Get_All_Keyword_Names()
                    for new_const_name in wanted_consts:
                        search_path = Apply_Template(
                            keyword_sect.Get_Keyword_Value(new_const_name), templateDict, mapDict=mapDict
                        )
                        logger.debug("Loading %s from keyword file as %s" % (search_path, new_const_name))
                        search_sect_name = "->".join(search_path.split("->")[0:-1])
                        search_key_name = search_path.split("->")[-1]

                        search_sect_obj = keyFileObj.Get_Section(search_sect_name)

                        if search_sect_obj == None or len(search_sect_obj) == 0:
                            raise IOError(
                                "Could not find section: %s in file: %s" % (search_sect_name, extract_filename)
                            )

                        new_const_value = [sect.Get_Keyword_Value(search_key_name) for sect in search_sect_obj]

                        if new_const_value == None or len(new_const_value) == 0:
                            if allow_missing:
                                new_const_value = ""
                            else:
                                raise ValueError(
                                    "Could not find keyword: %s in section: %s in file: %s"
                                    % (search_key_name, search_sect_name, extract_filename)
                                )
                        elif len(new_const_value) == 1:
                            new_const_value = new_const_value[0]

                        loaded_constants.append(new_const_name)
                        existingDict[new_const_name] = new_const_value

    if len(loaded_constants) > 0:
        logger.debug("Loaded values: %s" % ", ".join(loaded_constants))

    return existingDict
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict,
                 mapDict):
    logger = logging.getLogger(os.path.basename(__file__))

    if len(moduleSections) > 1:
        raise RuntimeError('Only one of this module per FILE')

    verbose = evaluate_bool_str(moduleSections[0].Get_Keyword_Value('verbose'),
                                False)
    max_dirs = moduleSections[0].Get_Keyword_Value('max_dirs')

    if max_dirs != None:
        max_dirs = int(max_dirs)

    run_spec_obj = L2_Input.Input_File(source)
    run_dirs = run_spec_obj.Get_Matrix_Data()

    # No directories to search for scrubbing
    if run_dirs == None:
        logger.info('No run directories available for searching for scrubbing')
        return

    sys.stdout.write('%s: Searching run directories for unreferenced files: ' %
                     os.path.basename(__file__))

    ref_unique_filenames = {}
    dir_count = 0
    for curr_dir in run_dirs:
        if max_dirs != None and dir_count >= max_dirs:
            break

        run_filename = '%s/%s' % (curr_dir, run_basename)
        run_file_obj = L2_Input.Input_File(run_filename)
        control_sect = run_file_obj.Get_Section('CONTROL')
        if len(control_sect) == None:
            raise IOError('No CONTROL section in %s' % run_filename)

        try:
            input_file = '%s/%s' % (
                curr_dir, control_sect[0].Get_Keyword_Value('input_file'))
        except:
            raise LookupError(
                'Could not find find input_file keyword in CONTROL section of file: %s'
                % run_filename)
        inp_file_obj = L2_Input.Input_File(input_file)

        run_file_list = get_obj_referenced_filenames(run_file_obj, curr_dir)
        inp_file_list = get_obj_referenced_filenames(inp_file_obj, curr_dir)

        for ref_filename in (run_file_list + inp_file_list):
            if ref_unique_filenames.has_key(ref_filename):
                ref_unique_filenames[ref_filename] += 1
            else:
                ref_unique_filenames[ref_filename] = 0

        # Progress marks since this loop takes awhile
        sys.stdout.write('.')
        sys.stdout.flush()

        dir_count += 1

    # Seperate progress marks
    sys.stdout.write('\n')

    if type(destination) is str:
        dest_filenames = get_all_filenames(destination)
    else:
        dest_filenames = get_all_filenames(destination.filename)

    for static_filename in dest_filenames:
        if not ref_unique_filenames.has_key(static_filename):
            try:
                os.remove(static_filename)
                if verbose:
                    logger.debug('Deleted: %s' % static_filename)
            except:
                logger.error('Could not delete: %s' % static_filename)
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict,
                 mapDict, buffer_objs):
    logger = logging.getLogger(os.path.basename(__file__))

    logger.debug('Creating aerosol blocks for file: %s' % source)

    fileObj = L2_Input.Input_File(source)

    param_def_sec = fileObj.Get_Section('PARAMETER_DEFINITION')
    if len(param_def_sec) == 0:
        logger.error('%s has sections: %s' %
                     (source, fileObj.Get_All_Section_Names()))
        raise IOError(
            'Could not find PARAMETER_DEFINITION section in source: %s' %
            source)

    orig_aero_secs = param_def_sec[0].Get_Section('->AEROSOL')
    orig_aero_sec_names = [
        curr_aer_sec.Get_Keyword_Value('name').upper()
        for curr_aer_sec in orig_aero_secs
    ]

    try:
        aero_block_prototype = orig_aero_secs[0]
    except IndexError:
        # For now just raise the error
        raise IndexError(
            'Could not find aerosol block to use as prototype in file: %s' %
            source)

        logger.warning(
            'No aerosol block found as prototype, trying to create a new section'
        )

        aero_block_prototype = L2_Input.Section('AEROSOL')
        aero_block_prototype.Set_Keyword_Value('name', None)
        aero_block_prototype.Set_Keyword_Value('a_priori', None)
        aero_block_prototype.Set_Keyword_Value('covariance', None)
        aero_block_prototype.Set_Keyword_Value('mie_file', None)
        aero_block_prototype.Set_Keyword_Value('moment_file', None)
        aero_block_prototype.Set_Keyword_Value('retrieval_indicies', None)

    for curr_sect_index, curr_section_obj in enumerate(moduleSections):
        profile_names = Apply_Template(
            curr_section_obj.Get_Keyword_Value('profile_names'),
            valuesDict,
            mapDict=mapDict)
        type_names = Apply_Template(
            curr_section_obj.Get_Keyword_Value('type_names'),
            valuesDict,
            mapDict=mapDict)

        aerosol_file = Apply_Template(
            curr_section_obj.Get_Keyword_Value('from_file'),
            valuesDict,
            mapDict=mapDict)
        set_retrieval_vector = evaluate_bool_str(
            curr_section_obj.Get_Keyword_Value('set_retrieval_vector'))

        if curr_sect_index == 0:
            remove_existing_blocks = True
        else:
            remove_existing_blocks = False

        if aerosol_file != None and (profile_names == None
                                     or len(profile_names) == 0):
            if buffer_objs.has_key(aerosol_file):
                aerosol_obj = buffer_objs[aerosol_file]
                aerosol_obj.seek(0)
            else:
                aerosol_obj = aerosol_file

            profile_names = []
            type_names = []

            logger.debug('Using aerosol file for profile names: %s' %
                         aerosol_obj)

            mat_obj = OCO_Matrix(aerosol_obj)
            for lbl_name in mat_obj.labels_lower:
                if lbl_name != 'pressure':
                    profile_names.append(lbl_name.upper())
                    type_names.append(lbl_name.lower())
        else:
            if profile_names == None or len(profile_names) == 0:
                raise AttributeError('profile_names needs to be defined')
            elif not type(profile_names) is ListType:
                profile_names = profile_names.split()

            if type_names == None or len(type_names) == 0:
                raise AttributeError('type_names needs to be defined')
            elif not type(type_names) is ListType:
                type_names = type_names.split()

        logger.debug('Using profile names: %s' % (', '.join(profile_names)))
        logger.debug('Using type names: %s' % (', '.join(type_names)))

        if remove_existing_blocks:
            param_def_sec[0].Set_Keyword_Value('aerosol_types', profile_names)
        else:
            existing_types = param_def_sec[0].Get_Keyword_Value(
                'aerosol_types')

            if hasattr(existing_types, '__iter__'):
                existing_types += profile_names
            else:
                existing_types += ' ' + ' '.join(profile_names)

            param_def_sec[0].Set_Keyword_Value('aerosol_types', existing_types)

        if set_retrieval_vector:
            retrieval_vector = param_def_sec[0].Get_Keyword_Value(
                'retrieval_vector')

            if retrieval_vector == None:
                raise IOError(
                    'Could not find retrieval_vector keyword for PARAMETER_DEFINITION in file: %s'
                    % source)

            if type(retrieval_vector) is not ListType:
                retrieval_vector = retrieval_vector.split()

            retrieval_vector = [rv_str.upper() for rv_str in retrieval_vector]

            # Remove existing aerosol names from retrieval vector
            for curr_aer_name in orig_aero_sec_names:
                if curr_aer_name in retrieval_vector:
                    retrieval_vector.remove(curr_aer_name)

            for curr_prof_name in profile_names:
                retrieval_vector.append(curr_prof_name)

            param_def_sec[0].Set_Keyword_Value('retrieval_vector',
                                               retrieval_vector)

        # Delete from param list aerosol types
        if remove_existing_blocks:
            for as_del in orig_aero_secs:
                param_def_sec[0].children.remove(as_del)

        for (curr_prof_name, curr_type_name) in zip(profile_names, type_names):
            new_aero = copy.deepcopy(aero_block_prototype)

            new_aero.Set_Keyword_Value('name', curr_prof_name)

            mie = new_aero.Get_Keyword_Value('mie_file')
            mie_dn = os.path.dirname(mie)
            new_aero.Set_Keyword_Value('mie_file',
                                       mie_dn + '/' + curr_type_name + '.mie')

            mom = new_aero.Get_Keyword_Value('moment_file')
            mom_dn = os.path.dirname(mom)
            new_aero.Set_Keyword_Value('moment_file',
                                       mom_dn + '/' + curr_type_name + '.mom')

            param_def_sec[0].children.append(new_aero)

    logger.debug('Writing aerosol changes to file: %s' % destination)
    fileObj.Write(destination)
Exemple #18
0
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict, mapDict):
    logger = logging.getLogger(os.path.basename(__file__))
    
    logger.debug('')
    logger.debug('Reading source file: %s' % source)
    modFileObj = L2_Input.Input_File(source)
    
    for pick in moduleSections:
        section  = Apply_Template(pick.Get_Keyword_Value('section'), valuesDict, mapDict=mapDict)
        keyword  = Apply_Template(pick.Get_Keyword_Value('keyword'), valuesDict, mapDict=mapDict)

        # Resolve template later when can add to the values dictionary
        template = pick.Get_Keyword_Value('template', addWhiteSpace=True)

        which_line    = Apply_Template(pick.Get_Keyword_Value('which_line'), valuesDict, mapDict=mapDict)
        which_section = Apply_Template(pick.Get_Keyword_Value('which_section'), valuesDict, mapDict=mapDict)
        which_keyword = Apply_Template(pick.Get_Keyword_Value('which_keyword'), valuesDict, mapDict=mapDict)

        ignore_missing = Apply_Template(pick.Get_Keyword_Value('ignore_missing'), valuesDict, mapDict=mapDict)
        index_format   = Apply_Template(pick.Get_Keyword_Value('index_format'), valuesDict, mapDict=mapDict)

        delete   = Apply_Template(pick.Get_Keyword_Value('delete'), valuesDict, mapDict=mapDict)
        indent   = Apply_Template(pick.Get_Keyword_Value('indent'), valuesDict, mapDict=mapDict)
        unique   = evaluate_bool_str(Apply_Template(pick.Get_Keyword_Value('unique'), valuesDict, mapDict=mapDict), False)

        if indent == None:
            indent = 0

        if keyword != None and type(keyword) is not ListType:
            keyword = [keyword]

        if which_line == None:
            which_line = None
        elif type(which_line) is ListType or not which_line.isdigit():
            raise ValueError('which_line must be a scalar integer')
        else:
            which_line = int(which_line)

        if (template == None or len(template) == 0) and (delete == None or len(delete)) == 0:
            raise ValueError('template must be defined for PICK')

        if section != None:
            if keyword != None:
                if delete != None and len(delete) > 0:
                    logger.debug('Deleting keyword %s->%s' % (section, keyword))
                else:
                    logger.debug('Modifying keyword %s->%s' % (section, keyword))
            elif delete != None and len(delete) > 0:
                logger.debug('Deleting section %s' % section)
        else:
            if keyword != None:
                if delete != None and len(delete) > 0:
                    logger.debug('Deleting keyword %s' % keyword)
                else:
                    logger.debug('Modifying keyword %s' % keyword)

            elif delete != None and len(delete) > 0:
                logger.debug('Deleting lines from root file section:', delete)

        # Find the section to modify
        if section == None:
            modSect = [ modFileObj.rootNode ]
        else:
            if type(section) is ListType:
                modSect = []
                for curr_sect_name in section:
                    for found_sect in modFileObj.rootNode.Get_Section(curr_sect_name):
                        modSect.append(found_sect)
            else:
                modSect = modFileObj.rootNode.Get_Section(section)

        if len(modSect) == 0:
            modSect = [L2_Input.Section(leaf=section)]

            if which_line != None:
                modFileObj.children.insert(which_line, modSect[0])
            else:
                modFileObj.children.append(modSect[0])

        # If which is defined then filter sections to modify
        if keyword != None and which_section != None:

            try:
                section_indexes = index_range_list(which_section)
            except:
                section_indexes = []
                curr_index = 0
                for testSect in modSect:
                    sectName = testSect.Get_Keyword_Value('name')
                    if sectName == which_section:
                        section_indexes.append(curr_index)
                    curr_index += 1
                if len(section_indexes) == 0 and not ignore_missing:
                    raise IOError('Could not find section named: %s with name keyword: %s in file %s' % (section, which_section, source))
                
            sectChoices = []
            for w_idx in section_indexes:
                try:
                    sectChoices.append( modSect[w_idx] )
                except:
                    raise IOError("Section index: %d not found for keyword: %s, section: %s in file %s" % (w_idx, keyword, section, source))
            modSect = sectChoices

        # Finally modify all chosen sections and chosen keywords
        for pickSect in modSect:
            pickValDict = copy.copy(valuesDict)

            if keyword != None:
                for curr_keyname in keyword:
                    keyword_val = pickSect.Get_Keyword_Value(curr_keyname)
                    if type(keyword_val) is ListType:
                        pickValDict[curr_keyname] = ' '.join(keyword_val)
                    elif keyword_val != None:
                        pickValDict[curr_keyname] = keyword_val

            newValue = Apply_Template(template, pickValDict, mapDict=mapDict)

            if delete != None and len(delete) > 0:

                try:
                    delete = index_range_list(delete)
                except:
                    delete = [0]

                if keyword != None and len(keyword) > 0:
                    for curr_keyname in keyword:
                        pickSect.Delete_Keyword(curr_keyname, which=delete)
                elif section != None and len(section) > 0:
                    modFileObj.Delete_Section(pickSect) # which=delete
                else:
                    # Remove from a location a certain # of times
                    for dlist_idx in range(len(delete)-1, -1, -1):
                        del_idx = delete[dlist_idx]
                        x = pickSect.children.pop(del_idx)

            elif keyword != None:

                if which_keyword != None:
                    which_keyword = index_range_list(which_keyword)

                for curr_keyname in keyword:
                    if index_format != None and hasattr(newValue, '__iter__'):
                        for curr_index, curr_value in enumerate(newValue):
                            index_keyname = index_format.format(keyword=curr_keyname, index=curr_index+1)
                            pickSect.Set_Keyword_Value(index_keyname, curr_value, which=which_keyword, indent=indent)
                    else:
                        pickSect.Set_Keyword_Value(curr_keyname, newValue, which=which_keyword, indent=indent)
            else:
                if not type(newValue) is ListType:
                    newValue = [ newValue ]

                for currValue in newValue:
                    if unique and currValue in modFileObj.Get_Matrix_Data():
                        continue
                    
                    newNode = L2_Input.Node('value', currValue + '\n')

                    if which_line != None:
                        pickSect.children.insert(which_line, newNode)
                    else:
                        pickSect.children.append(newNode)

    # Write newly modified file
    logger.debug('Writing destination file: %s' % destination)
    modFileObj.Write(destination)
def Process_Operation_Section(fileOpSection, baseSourceDir=None, valuesDict=None, mapDict=None):
    logger = logging.getLogger(sys._getframe(0).f_code.co_name)
   
    operation_options = fileOpSection.Get_Keywords_Dict()
    for curr_key, curr_val in operation_options.items():
        operation_options[curr_key] = Apply_Template(curr_val, valuesDict, mapDict=mapDict)

    fail_on_error   = evaluate_bool_str( operation_options.get('fail_on_error'), True)
    skip_if_exists  = operation_options.get('skip_if_exists')
    remove_existing = evaluate_bool_str( operation_options.get('remove_existing'), False )
   
    if skip_if_exists != None and len(skip_if_exists) > 0:
        skipExistFiles = Expand_Filename(skip_if_exists)
        if os.path.exists(skipExistFiles[0]):
            logger.debug('Skipping %s section because this file exist: %s' % (fileOpSection.leaf[0], skipExistFiles[0]))
            return

    if not Should_Process(fileOpSection, valuesDict, mapDict):
        logger.debug('Skipping %s section because of template evaluation result' % (fileOpSection.leaf[0]))
        return

    for fileAction in fileOpSection.Get_All_Section_Nodes():

        actionName = fileAction.leaf[0].upper()
        action_matrix_data = fileAction.Get_Matrix_Data()

        # Allow for a replacement map inside of file operation section
        # Trickily the order of the MAP can matter!
        if actionName == 'MAP':
            mapDict = Get_Map_Values( fileAction, existing=mapDict )

        elif fileOpSection.leaf[0] != actionName:
            try:
                actionFunc = eval('Perform_%s' % actionName)
            except NameError:
                raise NameError('Could not find function to handle %s operation %s' % (fileOpSection.leaf[0], actionName))

            for templateLine in action_matrix_data:                
                if hasattr(templateLine, '__iter__'):
                    sources      = Expand_Filename( Apply_Template( templateLine[0], valuesDict, mapDict=mapDict ), baseSourceDir )
                    destinations = Expand_Filename( Apply_Template( templateLine[1], valuesDict, mapDict=mapDict ) )
                else:
                    sources      = (None,)
                    destinations = Expand_Filename( Apply_Template( templateLine, valuesDict, mapDict=mapDict ) )
                
                for curr_source, curr_destination in zip(sources, destinations):
                   
                    try:
                        if os.path.exists(curr_destination) and remove_existing:
                            logger.debug('Removing existing destination: %s' % curr_destination)
                            Perform_DELETE(curr_destination, operation_options)

                        if curr_source == None:
                            logger.debug('Performing disk operation %s for section %s with file: %s' % (actionName, fileOpSection.leaf[0], curr_destination))
                            actionFunc(curr_destination, operation_options)
                        else:
                            logger.debug('Performing disk operation %s for section %s with source: %s, destination: %s' % (actionName, fileOpSection.leaf[0], curr_source, curr_destination))
                            actionFunc(curr_source, curr_destination, operation_options)
                            
                    except:
                        err_msg = 'Could not process disk operation: %s for section: %s with source: %s, destination: %s' % (actionName, fileOpSection.leaf[0], curr_source, curr_destination)
                        if fail_on_error:
                            logger.error(err_msg)
                            raise
                        else:
                            logger.debug(err_msg)
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict, mapDict):
    logger = logging.getLogger(os.path.basename(__file__))
    
    if len(moduleSections) > 1:
        raise RuntimeError('Only one of this module per FILE')

    verbose  = evaluate_bool_str( moduleSections[0].Get_Keyword_Value('verbose'), False)
    max_dirs = moduleSections[0].Get_Keyword_Value('max_dirs')

    if max_dirs != None:
        max_dirs = int(max_dirs)

    run_spec_obj = L2_Input.Input_File(source)
    run_dirs = run_spec_obj.Get_Matrix_Data()

    # No directories to search for scrubbing
    if run_dirs == None:
        logger.info('No run directories available for searching for scrubbing')
        return

    sys.stdout.write('%s: Searching run directories for unreferenced files: ' % os.path.basename(__file__))

    ref_unique_filenames = {}
    dir_count = 0
    for curr_dir in run_dirs:
        if max_dirs != None and dir_count >= max_dirs:
            break
                
        run_filename = '%s/%s' % (curr_dir, run_basename)
        run_file_obj = L2_Input.Input_File(run_filename)
        control_sect = run_file_obj.Get_Section('CONTROL')
        if len(control_sect) == None:
            raise IOError('No CONTROL section in %s' % run_filename)

        try:
            input_file   = '%s/%s' % (curr_dir, control_sect[0].Get_Keyword_Value('input_file'))
        except:
            raise LookupError('Could not find find input_file keyword in CONTROL section of file: %s' % run_filename)
        inp_file_obj = L2_Input.Input_File(input_file)

        run_file_list = get_obj_referenced_filenames(run_file_obj, curr_dir)
        inp_file_list = get_obj_referenced_filenames(inp_file_obj, curr_dir)

        for ref_filename in (run_file_list + inp_file_list):
            if ref_unique_filenames.has_key(ref_filename):
                ref_unique_filenames[ref_filename] += 1
            else:
                ref_unique_filenames[ref_filename] = 0

        # Progress marks since this loop takes awhile
        sys.stdout.write('.')
        sys.stdout.flush()
        
        dir_count += 1

    # Seperate progress marks
    sys.stdout.write('\n')

    if type(destination) is str:
        dest_filenames = get_all_filenames(destination)
    else:
        dest_filenames = get_all_filenames(destination.filename)

    for static_filename in dest_filenames:
        if not ref_unique_filenames.has_key(static_filename):
            try:
                os.remove(static_filename)
                if verbose:
                    logger.debug('Deleted: %s' % static_filename)
            except:
                logger.error('Could not delete: %s' % static_filename)
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict, mapDict, buffer_objs):
    logger = logging.getLogger(os.path.basename(__file__))

    logger.debug('Creating aerosol blocks for file: %s' % source)

    fileObj = L2_Input.Input_File(source)

    param_def_sec = fileObj.Get_Section('PARAMETER_DEFINITION')
    if len(param_def_sec) == 0:
        logger.error('%s has sections: %s' % (source, fileObj.Get_All_Section_Names()))
        raise IOError('Could not find PARAMETER_DEFINITION section in source: %s' % source)

    orig_aero_secs     = param_def_sec[0].Get_Section('->AEROSOL')
    orig_aero_sec_names = [ curr_aer_sec.Get_Keyword_Value('name').upper() for curr_aer_sec in orig_aero_secs ]

    try:
        aero_block_prototype = orig_aero_secs[0]
    except IndexError:
        # For now just raise the error
        raise IndexError('Could not find aerosol block to use as prototype in file: %s' % source)
        
        logger.warning('No aerosol block found as prototype, trying to create a new section')
       
        aero_block_prototype = L2_Input.Section('AEROSOL')
        aero_block_prototype.Set_Keyword_Value('name', None)
        aero_block_prototype.Set_Keyword_Value('a_priori', None)
        aero_block_prototype.Set_Keyword_Value('covariance', None)
        aero_block_prototype.Set_Keyword_Value('mie_file', None)
        aero_block_prototype.Set_Keyword_Value('moment_file', None)
        aero_block_prototype.Set_Keyword_Value('retrieval_indicies', None)
    
    for curr_sect_index, curr_section_obj in enumerate(moduleSections):
        profile_names = Apply_Template(curr_section_obj.Get_Keyword_Value('profile_names'), valuesDict, mapDict=mapDict)
        type_names    = Apply_Template(curr_section_obj.Get_Keyword_Value('type_names'), valuesDict, mapDict=mapDict)

        aerosol_file  = Apply_Template(curr_section_obj.Get_Keyword_Value('from_file'), valuesDict, mapDict=mapDict)
        set_retrieval_vector = evaluate_bool_str( curr_section_obj.Get_Keyword_Value('set_retrieval_vector') )

        if curr_sect_index == 0:
            remove_existing_blocks = True
        else:
            remove_existing_blocks = False

        if aerosol_file != None and (profile_names == None or len(profile_names) == 0):
            if buffer_objs.has_key(aerosol_file):
                aerosol_obj = buffer_objs[aerosol_file]
                aerosol_obj.seek(0)
            else:
                aerosol_obj = aerosol_file

            profile_names = []
            type_names = []

            logger.debug('Using aerosol file for profile names: %s' % aerosol_obj)

            mat_obj = OCO_Matrix(aerosol_obj)
            for lbl_name in mat_obj.labels_lower:
                if lbl_name != 'pressure':
                    profile_names.append(lbl_name.upper())
                    type_names.append(lbl_name.lower())
        else:
            if profile_names == None or len(profile_names) == 0:
                raise AttributeError('profile_names needs to be defined')
            elif not type(profile_names) is ListType:
                profile_names = profile_names.split()

            if type_names == None or len(type_names) == 0:
                raise AttributeError('type_names needs to be defined')
            elif not type(type_names) is ListType:
                type_names = type_names.split()

        logger.debug('Using profile names: %s' % (', '.join(profile_names)))
        logger.debug('Using type names: %s' % (', '.join(type_names)))

        if remove_existing_blocks:
            param_def_sec[0].Set_Keyword_Value('aerosol_types', profile_names)
        else:
            existing_types = param_def_sec[0].Get_Keyword_Value('aerosol_types')

            if hasattr(existing_types, '__iter__'):
                existing_types += profile_names
            else:
                existing_types += ' ' + ' '.join(profile_names)

            param_def_sec[0].Set_Keyword_Value('aerosol_types', existing_types)

        if set_retrieval_vector:
            retrieval_vector = param_def_sec[0].Get_Keyword_Value('retrieval_vector')

            if retrieval_vector == None:
                raise IOError('Could not find retrieval_vector keyword for PARAMETER_DEFINITION in file: %s' % source)
            
            if type(retrieval_vector) is not ListType:
                retrieval_vector = retrieval_vector.split()

            retrieval_vector = [ rv_str.upper() for rv_str in retrieval_vector ]

            # Remove existing aerosol names from retrieval vector
            for curr_aer_name in orig_aero_sec_names:
                if curr_aer_name in retrieval_vector:
                    retrieval_vector.remove(curr_aer_name)

            for curr_prof_name in profile_names:
                retrieval_vector.append(curr_prof_name)

            param_def_sec[0].Set_Keyword_Value('retrieval_vector', retrieval_vector)

        # Delete from param list aerosol types
        if remove_existing_blocks:
            for as_del in orig_aero_secs:
                param_def_sec[0].children.remove(as_del)

        for (curr_prof_name, curr_type_name) in zip(profile_names, type_names):
            new_aero = copy.deepcopy(aero_block_prototype)

            new_aero.Set_Keyword_Value('name', curr_prof_name)

            mie = new_aero.Get_Keyword_Value('mie_file')
            mie_dn = os.path.dirname(mie)
            new_aero.Set_Keyword_Value('mie_file', mie_dn + '/' + curr_type_name + '.mie')

            mom = new_aero.Get_Keyword_Value('moment_file')
            mom_dn = os.path.dirname(mom)
            new_aero.Set_Keyword_Value('moment_file', mom_dn + '/' + curr_type_name + '.mom')

            param_def_sec[0].children.append(new_aero)

    logger.debug('Writing aerosol changes to file: %s' % destination)
    fileObj.Write(destination)
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict, mapDict):
   logger = logging.getLogger(os.path.basename(__file__))
   
   if len(moduleSections) > 1:
      raise RuntimeError( 'Only one extraction block allowed')

   apriori_dir      = Apply_Template(moduleSections[0].Get_Keyword_Value('apriori_dir'), valuesDict, mapDict=mapDict)
   num_levels       = int(Apply_Template(moduleSections[0].Get_Keyword_Value('num_levels'), valuesDict, mapDict=mapDict))
   l1b_file         = Apply_Template(moduleSections[0].Get_Keyword_Value('l1b_file'), valuesDict, mapDict=mapDict)
   ecmwf_file       = Apply_Template(moduleSections[0].Get_Keyword_Value('ecmwf_file'), valuesDict, mapDict=mapDict)
   sounding_id_file = Apply_Template(moduleSections[0].Get_Keyword_Value('sounding_id_file'), valuesDict, mapDict=mapDict)
   sounding_id_sect = Apply_Template(moduleSections[0].Get_Keyword_Value('sounding_id_sect'), valuesDict, mapDict=mapDict)

   # Force all soundings to use a specific brdf type
   use_brdf_type    = Apply_Template(moduleSections[0].Get_Keyword_Value('use_brdf_type'), valuesDict, mapDict=mapDict)

   apriori_base_id_map = Apply_Template(moduleSections[0].Get_Keyword_Value('apriori_base_id_map'), valuesDict, mapDict=mapDict)
   brdf_type_id_map    = Apply_Template(moduleSections[0].Get_Keyword_Value('brdf_type_id_map'), valuesDict, mapDict=mapDict)
   gain_type_id_map    = Apply_Template(moduleSections[0].Get_Keyword_Value('gain_type_id_map'), valuesDict, mapDict=mapDict)
   windspeed_f         = evaluate_bool_str(Apply_Template(moduleSections[0].Get_Keyword_Value('windspeed_f'), valuesDict, mapDict=mapDict), default=False)

   global_mean_xco2    = Apply_Template(moduleSections[0].Get_Keyword_Value('global_mean_xco2'), valuesDict, mapDict=mapDict)

   # Check arguments
   if not os.path.isdir(apriori_dir):
      raise IOError('destFilename: %s must be a directory' % apriori_dir)

   for required_keyword in ('num_levels', 'l1b_file', 'sounding_id_file', 'sounding_id_sect'):
      if eval(required_keyword) == None:
       raise ValueError('%s keyword must be defined' % required_keyword)

   for file_check in ('l1b_file', 'sounding_id_file'):
      if not os.path.exists(eval(file_check)):
         raise IOError('%s: %s does not exist' % (file_check, eval(file_check)))

   # Load data from external files needed to only be read once
   sounding_id_list = Read_Id_List_File(sounding_id_file, sounding_id_sect, valuesDict=valuesDict, mapDict=mapDict)

   if global_mean_xco2 != None:
      if len(global_mean_xco2) == 0:
         global_mean_xco2 = None
      else:
         global_mean_xco2 = float(global_mean_xco2)

   co2_apriori_db = Apriori_DB.CO2(alternative_global_mean=global_mean_xco2)

   # Operate in context of input files
   with nested(ACOS_File.L1B(l1b_file), h5py.File(ecmwf_file,'r')) as (l1b_obj, ecmwf_obj):
       logger.debug('Loading apriori data based on %d soundings from l1b file: %s' % (len(sounding_id_list), l1b_file))

       # Retieve these here since they are done with H5Dump and should not be
       # done for each sounding
       l1b_build_id = l1b_obj.get_build_id()

       # Write per sounding files and maps in context of output map files
       with nested(write_if_def(apriori_base_id_map, 'apriori base name map'), write_if_def(brdf_type_id_map, 'brdf type map'), write_if_def(gain_type_id_map, 'gain code map'),) as (apriori_base_id_obj, brdf_type_id_obj, gain_type_id_obj):


           sys.stdout.write('%s: Creating per sounding data: ' % os.path.basename(__file__))
           for sounding_id in sounding_id_list:

              load_sounding_apriori(sounding_id, l1b_obj, ecmwf_obj, num_levels, apriori_dir, co2_apriori_db, apriori_base_id_obj, brdf_type_id_obj, gain_type_id_obj, windspeed_f, l1b_build_id=l1b_build_id, use_brdf_type=use_brdf_type)

              # Progress marks
              sys.stdout.write('.')
              sys.stdout.flush()

           # Newline for progress marks
           sys.stdout.write('\n')
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict, mapDict):
    logger = logging.getLogger(os.path.basename(__file__))

    # Load existing file
    matrix_obj = OCO_Matrix(source)

    for modifySect in moduleSections:

        # Add ability to specify cols individually or using a * to goto end
        columns = Apply_Template(modifySect.Get_Keyword_Value('columns'), valuesDict, mapDict=mapDict)
        rows    = Apply_Template(modifySect.Get_Keyword_Value('rows'), valuesDict, mapDict=mapDict)
        modify  = modifySect.Get_Keyword_Value('modify')
        delete  = evaluate_bool_str( modifySect.Get_Keyword_Value('delete') )
        add_column = evaluate_bool_str( modifySect.Get_Keyword_Value('add_column') )

        if columns != None:
            try:
                columns = index_range_list(columns, max_value=matrix_obj.dims[1])
            except:
                if not type(columns) is ListType:
                    col_name_list = [columns]
                else:
                    col_name_list = columns

                columns = []
                for curr_name in col_name_list:
                    if curr_name.lower() not in matrix_obj.labels_lower:
                        if add_column:
                            matrix_obj.add_column(curr_name)
                            columns.append(  matrix_obj.dims[1] - 1 )
                        else:
                            raise IOError('Column named %s not found in file: %s' % (curr_name, source))
                            
                    columns.append( matrix_obj.labels_lower.index(curr_name.lower()) )
        else:
            columns = range(matrix_obj.dims[1])

        if rows != None:
            rows = index_range_list(rows, max_value=matrix_obj.dims[0])
        else:
            rows = range(matrix_obj.dims[0])

        if delete and modify != None:
            raise ValueError('delete and modify keywords can not be specified together')

        if delete:
            if len(columns) > matrix_obj.dims[1]:
                raise IOError('More columns to be deleted %d than exist %d in input file %s' % (len(columns), matrix_obj.dims[1], source))
            
            new_data = numpy.zeros((matrix_obj.dims[0], matrix_obj.dims[1]-len(columns)), dtype=numpy.double)
            new_labels = []
            new_units  = []

            new_col_idx = 0
            for old_col_idx in range(matrix_obj.dims[1]):
                if old_col_idx not in columns:
                    new_labels.append(matrix_obj.labels[old_col_idx])
                    new_units.append(matrix_obj.units[old_col_idx])

                    new_data[:,new_col_idx] = matrix_obj.data[:,old_col_idx]

                    new_col_idx += 1

            matrix_obj.data = new_data
            matrix_obj.labels = new_labels
            matrix_obj.units  = new_units

        if modify != None and len(modify) > 0:
            
            modifyDict = copy_module.copy(valuesDict)

            Get_Constant_Values(modifySect.Get_Section('->CONSTANTS'), modifyDict)

            for row_idx in rows:
                for col_idx in columns:
                    modifyDict['original'] = str(matrix_obj.data[row_idx][col_idx])

                    modify_str = Apply_Template(modify, modifyDict, mapDict=mapDict)

                    try:
                        matrix_obj.data[row_idx][col_idx] = eval(modify_str)
                    except:
                        raise RuntimeError('Error evaluating modify string: "%s"' % modify_str)

    matrix_obj.write(destination, auto_size_cols=False)
Exemple #24
0
def Process_File(source, destination, fileKeywords, moduleSections, valuesDict,
                 mapDict):
    logger = logging.getLogger(os.path.basename(__file__))

    if len(moduleSections) > 1:
        raise RuntimeError('Only one value map creation allowed per FILE')

    if str(source) == str(destination):
        raise IOError(
            'source and destination must be different. will not overwrite source file'
        )

    list_file = Apply_Template(
        moduleSections[0].Get_Keyword_Value('list_file'),
        valuesDict,
        mapDict=mapDict)
    data_file = Apply_Template(
        moduleSections[0].Get_Keyword_Value('data_file'),
        valuesDict,
        mapDict=mapDict)
    data_col = Apply_Template(
        moduleSections[0].Get_Keyword_Value('data_column'),
        valuesDict,
        mapDict=mapDict)
    section = Apply_Template(moduleSections[0].Get_Keyword_Value('section'),
                             valuesDict,
                             mapDict=mapDict)
    static_value = Apply_Template(
        moduleSections[0].Get_Keyword_Value('static_value'),
        valuesDict,
        mapDict=mapDict)
    is_log_file = evaluate_bool_str(
        moduleSections[0].Get_Keyword_Value('is_log_file'))
    l1b_file = Apply_Template(moduleSections[0].Get_Keyword_Value('l1b_file'),
                              valuesDict,
                              mapDict=mapDict)
    modify = moduleSections[0].Get_Keyword_Value('modify')

    max_range_val = None
    range_values = {}
    for range_sect in moduleSections[0].Get_Section('->RANGES'):
        logger.debug('Using range section')

        for range_spec in range_sect.Get_Matrix_Data():
            (range_name, range_str) = range_spec

            if range_str.find(',') > 0:
                curr_range = [float(val) for val in range_str.split(',')]
            else:
                curr_range = [float(val) for val in range_str.split()]

            if max_range_val == None:
                max_range_val = max(curr_range)
            else:
                max_range_val = max(max_range_val, max(curr_range))

            range_values[range_name] = curr_range

    id_list = Read_Id_List_File(list_file,
                                section,
                                valuesDict=valuesDict,
                                mapDict=mapDict)

    data_values = []
    if data_file != None:

        if len(data_file) == 0 or not os.path.exists(data_file):
            raise IOError('Could not read data_file')

        if is_log_file:
            if l1b_file == None or len(l1b_file) == 0:
                raise ValueError(
                    'Need L1B file specified for using log file as source of data'
                )
            if not os.path.exists(l1b_file):
                raise IOError('L1B file specified does not exist: %s' %
                              l1b_file)

            log_file_obj = Orbit_Sim.Log_File(data_file)
            col_index = log_file_obj.get_column_index(data_col)

            if not type(col_index) is ListType:
                col_index = [col_index]

            h5_obj = h5py.File(l1b_file, 'r')
            snd_id_matrix = h5_obj[SOUNDING_ID_GROUP][SOUNDING_ID_DATASET]
            frame_id_arr = h5_obj[FRAME_ID_GROUP][FRAME_ID_DATASET]

            for curr_sounding in id_list:
                curr_frame_id = int(str(curr_sounding)[0:-1])
                frame_index = bisect.bisect_left(frame_id_arr, curr_frame_id)

                for snd_index in range(snd_id_matrix.shape[1]):
                    if snd_id_matrix[frame_index,
                                     snd_index] == int(curr_sounding):
                        break

                if snd_id_matrix[frame_index, snd_index] != int(curr_sounding):
                    raise ValueError(
                        'did not find correct sounding id: %d at index: %s in hdf file: %s, instead found: %d'
                        % (curr_sounding, (frame_index, snd_index), l1b_file,
                           snd_id_matrix[frame_index, snd_index]))

                curr_log_val = 0.0
                for curr_val_idx in col_index:
                    curr_log_val += log_file_obj.data[frame_index, snd_index,
                                                      curr_val_idx]

                data_values.append(curr_log_val)

        else:
            if data_col == None:
                data_col = 0
            else:
                data_col = int(data_col)

            logger.debug('Reading mapped values from column %d of file %s' %
                         (data_col, data_file))
            data_fobj = open(data_file)
            for data_line in data_fobj.readlines():
                data_line = data_line.strip()
                if len(data_line) > 0 and data_line.find('#') != 0:
                    line_parts = data_line.split()

                    if len(line_parts) - 1 < data_col:
                        raise IOError('data file %s does not have column %d' %
                                      (data_file, data_col))
                    data_values.append(line_parts[data_col])

    if static_value != None:
        logger.debug('Setting mapped value to static value: %s' % static_value)
        for idx in range(len(id_list) - len(data_values)):
            data_values.append(static_value)

    if len(id_list) != len(data_values):
        raise IOError(
            'Length of id list %d from file %s does not match length of data values %d from %s'
            % (len(id_list), list_file, len(data_values), data_file))

    mapValues = None
    mapSects = moduleSections[0].Get_Section('->MAP')
    if mapSects != None and len(mapSects) > 0:
        mapValues = Get_Map_Values(mapSects, valuesDict)

    logger.debug('Writing map file: %s' % destination)

    if type(destination) is str:
        dstFileObj = open(destination, 'w')
    elif hasattr(destination, 'write'):
        dstFileObj = destination
    else:
        raise Exception('Unrecognized source object: %s' % destination)

    if modify != None and len(modify) > 0:
        modifyDict = copy.copy(valuesDict)

    for (id_val, data_val) in zip(id_list, data_values):
        if modify != None and len(modify) > 0:
            modifyDict['original'] = str(data_val)
            modify_expr = Apply_Template(modify, modifyDict, mapDict=mapDict)
            data_val = eval(modify_expr)

        if len(range_values) > 0:
            found_range_value = False
            for (curr_name, curr_values) in range_values.items():
                beg_val = curr_values[0]
                end_val = curr_values[1]

                if float(data_val) >= beg_val and float(data_val) < end_val:
                    data_val = curr_name
                    found_range_value = True
                    break

            if not found_range_value:
                raise LookupError(
                    'RANGE values specified but none matched for value: %s' %
                    data_val)

        if mapValues != None and (str(data_val)
                                  in mapValues[DEFAULT_MAP_NAME]):
            print >> dstFileObj, id_val, mapValues[DEFAULT_MAP_NAME][str(
                data_val)]
        else:
            print >> dstFileObj, id_val, str(data_val)

    if type(destination) is str:
        dstFileObj.close()