예제 #1
0
 def convert(self):
     """Write values to output table.
     """
     print('convert from steps to compartments...')
     prog = ProgressDisplay(len(self.compartments))
     step = 0
     filters = tables.Filters(complevel=5, complib='zlib')
     for comp in self.compartments:
         prog.show_progress(step + 1, force=True)
         step = step + 1
         group = self.hdf_output.create_group(
             '/', '{}'.format(str(comp.decode('ascii'))))
         values = self._get_values(comp, 'lateral_input')
         self.hdf_output.create_table(group,
                                      'lateral_input',
                                      values,
                                      filters=filters)
         values = self._get_values(comp, 'upstream_input')
         self.hdf_output.create_table(group,
                                      'upstream_input',
                                      values,
                                      filters=filters)
         values = self._get_parameterization(comp)
         self.hdf_output.create_table(group,
                                      'parameterization',
                                      values,
                                      filters=filters)
         values = self._get_initialcondititions(comp)
         self.hdf_output.create_table(group,
                                      'initial_conditions',
                                      values,
                                      filters=filters)
     print()
     print(prog.last_display)
     print('Done')
 def convert(self):
     """Write values to hdf output table and csv files.
     """
     print('convert from steps to outlets...')
     prog = ProgressDisplay(len(self.outlets))
     step = 0
     filters = tables.Filters(complevel=5, complib='zlib')
     for outlet in self.outlets:
         prog.show_progress(step + 1, force=True)
         step = step + 1
         group = self.hdf_output.create_group('/', '{}'.format(str(outlet.decode('ascii'))))
         values = self._get_values(outlet, 'upstream_input')
         self.hdf_output.create_table(group, 'upstream_input', values, filters=filters)
         self.write_outletaggregation_csv(outlet, values)            
     print()
     print(prog.last_display)
     print('Done')
예제 #3
0
 def sum_load(self, steps=365*24):
     """Yearly sum of load upstream of lakes.
     """
     ids = self.lake_dict()
     d = {'loads':dict.fromkeys(ids,0), 'local_discharge':dict.fromkeys(ids,0)}
     print('')
     print('Sum loads and discharges of all upstream catchments...')
     prog = ProgressDisplay(steps)
     for step in range(0,steps):
         prog.show_progress(step + 1, force=True)
         in_table = pandas.read_hdf(self.input_file_name, '/step_{}/values'.format(step), mode='r')
         #filters = tables.Filters(complevel=5, complib='zlib')
         loads = in_table[["catchment","load","local_discharge"]][in_table["catchment"].isin(ids)]
         load_add = loads.set_index('catchment')['load'].to_dict()
         discharge_add = loads.set_index('catchment')['local_discharge'].to_dict()
         d['load_sum']=dict(Counter(d['loads'])+Counter(load_add))
         d['local_discharge_sum']=dict(Counter(d['local_discharge'])+Counter(discharge_add)) 
     print()
     print(prog.last_display)
     return d
예제 #4
0
 def aggregate(self, steps=365*24):
     """Aggregate loads for every timestep
     """
     print('')
     print('Aggregate loads for every compartment and write to HDF output file per timestep...')
     prog = ProgressDisplay(steps)
     for step in range(0,steps):
         prog.show_progress(step + 1, force=True)
         in_table = pandas.read_hdf(self.input_file_name, '/step_{}/values'.format(step), mode='r')
         filters = tables.Filters(complevel=5, complib='zlib')
         out_group = self.hdf_output.create_group('/', 'step_{}'.format(step))
         out_table = self.hdf_output.create_table(out_group, 'lateral_input', OutputValues, filters=filters)
         outputvalues = out_table.row
         self._write_lateral_input(step, in_table, outputvalues)
         out_table.flush()
         out_table = self.hdf_output.create_table(out_group, 'upstream_input', OutputValues, filters=filters)
         outputvalues = out_table.row
         self._write_upstream_input(step, in_table, outputvalues)
         out_table.flush()
     print()
     print(prog.last_display)
     print('Done')
예제 #5
0
 def write_hdf(self, steps=24*365):
     """Write new hdf with yearly concentration for all catchments above lakes.
     """
     d = self.sum_load(steps)
     df = pandas.DataFrame(d)
     df['catchment'] = df.index
     df['concentration'] = df['load_sum']/(df['local_discharge_sum']*3600)*1e06
     with pandas.HDFStore(self.output_file_name, mode='w') as store:
         print('')
         print('Calculate constant concentration and discharge varying loads and write to HDF5...')
         prog = ProgressDisplay(steps)  
         for step in range(0,steps):
             prog.show_progress(step + 1, force=True)
             out_table = pandas.read_hdf(self.input_file_name, '/step_{}/values'.format(step), mode='r')
             #filters = tables.Filters(complevel=5, complib='zlib')
             df.index.rename('catchment', inplace=True)
             out_table.set_index('catchment', inplace =True)
             out_table['concentration'].update(df.concentration)
             out_table['load'].update(out_table.loc[out_table.index.isin(df.index)].concentration*
                                      out_table.loc[out_table.index.isin(df.index)].local_discharge*3600*1e-06)
             out_table.reset_index(level=0, inplace=True)
             store.append( '/step_{}/values'.format(step), out_table, data_columns = True, index = False)
         print()
         print(prog.last_display)
예제 #6
0
    def _run_all(self):
        """Run all models with out catching all exceptions.
        The similary named `run_all()` method will close all HDF5 files
        after an exception in this method.
        """
        all_ids = find_ids(self.hdf_input)
        nids = len(all_ids)
        prog = ProgressDisplay(nids)
        all_ids = iter(all_ids)  # iterator of all_ids
        free_paths = self.worker_paths[:]
        active_workers = {}
        done = False
        counter = 0
        with ChDir(str(self.tmp_path)):
            while True:
                for path in free_paths:
                    try:
                        id_ = next(all_ids)
                    except StopIteration:
                        done = True
                        break
                    counter += 1
                    prog.show_progress(counter, additional=id_)
                    parameters, inputs = self._read_parameters_inputs(id_)
                    worker = Worker(id_, path, parameters, inputs, 
                                    self.steps_per_day,
                                    self.layout_xml_path,
                                    self.layout_name_template,
                                    self.param_txt_path,
                                    self.param_name_template,
                                    self.ratio_area_cal,
                                    self.queue,
                                    debug=self.debug,
                                    use_wine=self.use_wine)
                    worker.start()
                    active_workers[path] = worker

                free_paths = []
                for path, worker in active_workers.items():
                    if not worker.is_alive():
                        free_paths.append(path)
                while not self.queue.empty():
                    self._write_output(*self.queue.get())
                if done:
                    break
            for worker in active_workers.values():
                worker.join()
            while not self.queue.empty():
                self._write_output(*self.queue.get())
        prog.show_progress(counter, additional=id_, force=True)
        print()
예제 #7
0
def convert(in_file_name, out_file_name, batch_size=2, total=365 * 24):
    """Convert on gigantic table into one per timesstep.
    """
    prog = ProgressDisplay(total)
    filters = tables.Filters(complevel=5, complib='zlib')
    in_file = tables.open_file(in_file_name, mode='a')
    table = in_file.get_node('/output')
    make_index(table)
    nrows = table.nrows  # pylint: disable=no-member
    nids = count_ids(table.cols.catchment)  # pylint: disable=no-member
    assert nrows == total * nids
    out_file = tables.open_file(out_file_name, mode='w')
    start = 0
    stop = nids
    read_start = 0
    read_stop = nids * batch_size
    for step in range(total):
        prog.show_progress(step + 1)
        if step % batch_size == 0:
            # pylint: disable=no-member
            batch_data = table.read_sorted('timestep',
                                           start=read_start,
                                           stop=read_stop)
            read_start = read_stop
            read_stop += nids * batch_size
            read_stop = min(read_stop, nrows)
            start = 0
            stop = start + nids
        id_data = batch_data[start:stop]
        start = stop
        stop += nids
        try:
            assert len(set(id_data['timestep'])) == 1
        except AssertionError:
            print(set(id_data['timestep']))
            print(id_data)
        values = id_data[[
            'catchment', 'concentration', 'discharge', 'local_discharge',
            'load'
        ]]
        group = out_file.create_group('/', 'step_{}'.format(step))
        out_file.create_table(group, 'values', values, filters=filters)
    prog.show_progress(step + 1, force=True)
    in_file.close()
    out_file.close()
예제 #8
0
def add_input_tables(h5_file_name,
                     t_file_name,
                     p_file_name,
                     q_file_name,
                     qloc_file_name,
                     timesteps_per_day,
                     batch_size=None,
                     total=365 * 24):
    """Add input with pandas.
    """
    # pylint: disable=too-many-locals
    filters = tables.Filters(complevel=5, complib='zlib')
    h5_file = tables.open_file(h5_file_name, mode='a')
    get_child = h5_file.root._f_get_child  # pylint: disable=protected-access
    all_ids = ids = find_ids(h5_file)
    usecols = None
    if batch_size is None:
        batch_size = sys.maxsize
    if batch_size < len(all_ids):
        usecols = True
    counter = 0
    total_ids = len(all_ids)
    prog = ProgressDisplay(total_ids)
    # pylint: disable=undefined-loop-variable
    while all_ids:
        ids = all_ids[-batch_size:]
        all_ids = all_ids[:-batch_size]
        if usecols:
            usecols = ids
        temp = pandas.read_csv(t_file_name,
                               sep=';',
                               parse_dates=True,
                               usecols=usecols)
        precip = pandas.read_csv(p_file_name,
                                 sep=';',
                                 parse_dates=True,
                                 usecols=usecols)
        dis = pandas.read_csv(q_file_name,
                              sep=';',
                              parse_dates=True,
                              usecols=usecols)
        locdis = pandas.read_csv(qloc_file_name,
                                 sep=';',
                                 parse_dates=True,
                                 usecols=usecols)
        temp_hourly = temp.reindex(dis.index, method='ffill')
        for id_ in ids:
            counter += 1
            inputs = pandas.concat(
                [temp_hourly[id_], precip[id_], dis[id_], locdis[id_]], axis=1)
            inputs.columns = [
                'temperature', 'precipitation', 'discharge', 'local_discharge'
            ]
            inputs['precipitation'] *= int(timesteps_per_day)
            input_table = inputs.to_records(index=False)
            name = 'catch_{}'.format(id_)
            group = get_child(name)
            h5_file.create_table(group,
                                 'inputs',
                                 input_table,
                                 'time varying inputs',
                                 expectedrows=total,
                                 filters=filters)
            prog.show_progress(counter, additional=id_)
    prog.show_progress(counter, additional=id_, force=True)
    int_steps = pandas.DataFrame(dis.index.to_series()).astype(numpy.int64)
    int_steps.columns = ['timesteps']
    time_steps = int_steps.to_records(index=False)
    h5_file.create_table('/', 'time_steps', time_steps,
                         'time steps for all catchments')
    h5_file.create_table(
        '/', 'steps_per_day',
        numpy.array([(timesteps_per_day, )], dtype=[('steps_per_day', '<i8')]),
        'time steps for all catchments')
    h5_file.close()