Beispiel #1
0
    def __init__(self,
                 filenames,
                 routine_objs=None,
                 obj_names=None,
                 addl_files=None):
        # Load files
        self.data_objs = [acos_file.L2(fn) for fn in filenames]

        # Load additional files
        self.addl_objs = []
        if addl_files != None:
            for curr_addl in addl_files:
                # Try adding file as one where we need to find
                # the sounding id dataset, otherwise assume the
                # file has the sounding id dimension as first one
                # in each file ie ECMWF file
                try:
                    curr_obj = acos_file.IdDatasetFinder(curr_addl)
                except (LookupError, AttributeError):
                    curr_obj = acos_file.SoundingFirstFile(curr_addl)
                self.addl_objs.append(curr_obj)

        if len(self.addl_objs) > 0 and len(self.data_objs) != len(
                self.addl_objs):
            raise Exception(
                "If additional files are supplied, there must be one for each data object"
            )

        # Names to use when plotting
        if obj_names != None:
            if len(obj_names) != len(filenames):
                raise ValueError(
                    "Number of object names passed must be same as number of filenames"
                )
            self.obj_names = obj_names
        else:
            self.obj_names = self._obj_names_from_filenames(filenames)

        # Make sure we can iterate one or more objects
        if not hasattr(routine_objs, "__iter__"):
            routine_objs = [routine_objs]

        # Get routines present in the objects
        self.routines = {}
        if routine_objs != None:
            self.add_routines(routine_objs)

        # Correlate the soundings in the supplied files
        self.__dict__.update(self.correlate_soundings())

        # Initialize the global name space
        self._init_global_namespace()

        # Init local namespace
        self._init_local_namespace()
Beispiel #2
0
def gather_status(base_dir,
                  output_dir,
                  run_id_file=None,
                  aggregate_file=None,
                  verbose=False):

    # Strip trailing slash
    output_dir = output_dir.rstrip('/')

    # Make output location if not already existing
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # Load aggregate file
    if aggregate_file != None:
        aggregate_file = acos_file.L2(aggregate_file)

    r_count = run_results.results_count()

    out_filenames = {}
    out_objects = {}
    for type_name in r_count.type_names(unique=False):
        filename = '%s/%s.list' % (output_dir, type_name)
        out_obj = StringIO()
        out_filenames[type_name] = filename
        out_objects[type_name] = out_obj
        r_count.register_output(type_name, out_obj)

    run_id_list = []
    if (run_id_file != None):
        for run_id in run_results.read_run_id_file(run_id_file):
            run_id_list.append((base_dir, run_id))
    else:
        run_results.find_run_ids(base_dir, run_id_list)

    for id_dir, run_id in run_id_list:
        d_result = run_results.run_id_result(id_dir,
                                             run_id,
                                             aggregate_file=aggregate_file)
        r_count.add_count(d_result)

    # Output stats to file
    stats_out_obj = open('%s/%s' % (output_dir, status_count_filename), 'w')
    r_count.print_overall_stats(stats_out_obj)
    stats_out_obj.close()

    # Output stats to screen
    if verbose:
        r_count.print_overall_stats()

    # Output non-empty files
    run_lists = {}
    aggregated_dirs = {}
    for key in list(aggregate_types.keys()):
        aggregated_dirs[key] = ''

    for type_name, filename in list(out_filenames.items()):
        out_str_obj = out_objects[type_name]
        out_strings = out_str_obj.getvalue()

        if len(out_strings) > 0:
            run_lists[type_name] = out_strings.split('\n')[
                0:-1]  # Loose last empty one due to \n

            out_file_obj = open(filename, 'w')
            out_file_obj.write(out_strings)
            out_file_obj.close()

            for agg_name, agg_types in list(aggregate_types.items()):
                if type_name in agg_types:
                    aggregated_dirs[agg_name] += out_strings
        elif os.path.exists(filename):
            os.remove(filename)

    # Output non empty aggregated files
    for agg_name, agg_strings in list(aggregated_dirs.items()):
        agg_filename = '%s/%s.list' % (output_dir, agg_name)

        if len(agg_strings) > 0:
            run_lists[agg_name] = agg_strings.split('\n')[
                0:-1]  # Loose last empty one due to \n

            out_file_obj = open(agg_filename, 'w')
            out_file_obj.write(agg_strings)
            out_file_obj.close()
        elif os.path.exists(agg_filename):
            os.remove(agg_filename)

    return run_lists