コード例 #1
0
def run(input_root, simulation_period):
    paths.build(input_root)

    amf_dict = amf_obs_time_series(BASE_AMF_DICT,
                                   complete_days_only=True,
                                   return_low_err=True)
    # get_etrm_time_series(paths.amf_extract, dict_=amf_dict)
    for k, v in amf_dict.iteritems():
        for kk, vv in v.iteritems():
            if isinstance(vv, DataFrame):
                p = os.path.join(paths.amf_output_root,
                                 '{}_{}.csv'.format(k, kk))
                print 'writing to {}'.format(p)
                vv.to_csv(p)
    val = amf_dict.values()[0]

    cfg = Config()
    for runspec in cfg.runspecs:
        paths.build(runspec.input_root, runspec.output_root)

        etrm = Processes(runspec)
        etrm.configure_run(runspec)
        etrm.run()

    save_run(etrm, val)
コード例 #2
0
def get_ameriflux_data(amf_file_path,
                       simulation_period,
                       etrm_extract=None,
                       static_inputs=None,
                       initial_path=None,
                       save_csv=None,
                       save_cleaned_data=None,
                       save_combo=False):
    amf_dict = amf_obs_time_series(BASE_AMF_DICT,
                                   amf_file_path,
                                   complete_days_only=True,
                                   save_cleaned_data_path=save_cleaned_data,
                                   return_low_err=True)
    if save_cleaned_data:
        return None
    # print 'amf dict w/ AMF time series: \n{}'.format(amf_dict)

    get_etrm_time_series(etrm_extract, dict_=amf_dict)
    # print 'amf dict w/ etrm input time series: \n{}'.format(amf_dict)  # fix this so it appends to all sites
    # print 'ameriflux dict: {}'.format(amf_dict)

    for key, val in amf_dict.iteritems():
        # instantiate for each item to get a clean master dict
        etrm = Processes(simulation_period,
                         save_csv,
                         static_inputs=static_inputs,
                         point_dict=amf_dict,
                         initial_inputs=initial_path)
        # print 'amf dict, pre-etrm run {}'.format(amf_dict)
        print '\n key : {}'.format(key)
        # print 'find etrm dataframe as amf_dict[key][''etrm'']\n{}'.format(amf_dict[key]['etrm'])
        tracker = etrm.run(simulation_period,
                           point_dict=amf_dict,
                           point_dict_key=key,
                           modify_soils=True,
                           apply_rofrac=0.7,
                           allen_ceff=0.8)
        # print 'tracker after etrm run: \n {}'.format(tracker)
        csv_path_filename = os.path.join(save_csv,
                                         '{}.csv'.format(val['Name']))
        print 'this should be your csv: {}'.format(csv_path_filename)

        tracker.to_csv(csv_path_filename, na_rep='nan', index_label='Date')

        amf_obs_etrm_combo = DataFrame(
            concat((val['AMF_Data'], tracker), axis=1, join='outer'))

        obs_etrm_comb_out = os.path.join(save_combo,
                                         '{}_Ceff.csv'.format(val['Name']))

        print 'this should be your combo csv: {}'.format(obs_etrm_comb_out)
        amf_obs_etrm_combo.to_csv(obs_etrm_comb_out, index_label='Date')
        # print 'tracker for {}: {}'.format(key, tracker)

    return None
コード例 #3
0
def get_ameriflux_data(amf_file_path, simulation_period, etrm_extract=None,
                       static_inputs=None, initial_path=None, save_csv=None, save_cleaned_data=None,
                       save_combo=False):
    amf_dict = amf_obs_time_series(BASE_AMF_DICT, amf_file_path, complete_days_only=True,
                                   save_cleaned_data_path=save_cleaned_data, return_low_err=True)
    if save_cleaned_data:
        return None
    # print 'amf dict w/ AMF time series: \n{}'.format(amf_dict)

    get_etrm_time_series(etrm_extract, dict_=amf_dict)
    # print 'amf dict w/ etrm input time series: \n{}'.format(amf_dict)  # fix this so it appends to all sites
    # print 'ameriflux dict: {}'.format(amf_dict)

    for key, val in amf_dict.iteritems():
        # instantiate for each item to get a clean master dict
        etrm = Processes(simulation_period, save_csv, static_inputs=static_inputs, point_dict=amf_dict,
                         initial_inputs=initial_path)
        # print 'amf dict, pre-etrm run {}'.format(amf_dict)
        print '\n key : {}'.format(key)
        # print 'find etrm dataframe as amf_dict[key][''etrm'']\n{}'.format(amf_dict[key]['etrm'])
        tracker = etrm.run(simulation_period, point_dict=amf_dict, point_dict_key=key, modify_soils=True,
                           apply_rofrac=0.7, allen_ceff=0.8)
        # print 'tracker after etrm run: \n {}'.format(tracker)
        csv_path_filename = os.path.join(save_csv, '{}.csv'.format(val['Name']))
        print 'this should be your csv: {}'.format(csv_path_filename)

        tracker.to_csv(csv_path_filename, na_rep='nan', index_label='Date')

        amf_obs_etrm_combo = DataFrame(concat((val['AMF_Data'], tracker), axis=1, join='outer'))

        obs_etrm_comb_out = os.path.join(save_combo, '{}_Ceff.csv'.format(val['Name']))

        print 'this should be your combo csv: {}'.format(obs_etrm_comb_out)
        amf_obs_etrm_combo.to_csv(obs_etrm_comb_out, index_label='Date')
        # print 'tracker for {}: {}'.format(key, tracker)

    return None
コード例 #4
0
def amf_observation_analysis(amf_file_path, save_cleaned_data=None):
    amf_dict = amf_obs_time_series(BASE_AMF_DICT,
                                   amf_file_path,
                                   complete_days_only=True,
                                   save_cleaned_data_path=save_cleaned_data,
                                   return_low_err=True)