示例#1
0
def run_scaling(args):
    '''wrapper function for scaling tests'''
    config = hosts[args.host]
    vic_exe = VIC(args.vic_exe)

    # write timing file header
    header = string.Template(table_header)
    header_kwargs = get_header_info(args.vic_exe, args.global_param)
    header = header.safe_substitute(**header_kwargs)
    with open(args.timing, 'w') as f:
        f.write(header)

    for i, kwargs in enumerate(config.profile):
        if config.template:
            # run on a cluster of some kind
            # start by printing the template
            print('-'.ljust(OUT_WIDTH, '-'))
            print('{host} template'.format(host=args.host).center(OUT_WIDTH))
            print('-'.ljust(OUT_WIDTH, '-'))
            print(config.template)
            print('-'.ljust(OUT_WIDTH, '-'))
            template = string.Template(config.template)

            run_string = template.safe_substitute(
                vic_exe=args.vic_exe,
                vic_global=args.global_param,
                timing_table_file=args.timing,
                i=i,
                mpiexec=config.mpiexec,
                **kwargs)
            run_file = 'vic_{host}_{i}.sh'.format(host=args.host, i=i)
            with open(run_file, 'w') as f:
                f.write(run_string)

            cmd = '{submit} {run_file}'.format(submit=config.submit,
                                               run_file=run_file)
            print(cmd)
            if not args.test:
                check_call(cmd, shell=True)

            if args.clean:
                os.remove(run_file)
        else:
            # run locally
            n = kwargs['np']
            print('Running {} with {} processors'.format(args.vic_exe, n))
            if not args.test:
                start = time.time()
                vic_exe.run(args.global_param, mpi_proc=int(n))
                end = time.time()
                diff = end - start
                with open(args.timing, 'a') as f:
                    f.write('%5s | %.2f\n' % (n, diff))

    print('See %s for scaling table' % args.timing)
示例#2
0
def run_scaling(args):
    '''wrapper function for scaling tests'''
    config = hosts[args.host]
    vic_exe = VIC(args.vic_exe)

    # write timing file header
    header = string.Template(table_header)
    header_kwargs = get_header_info(args.vic_exe, args.global_param)
    header = header.safe_substitute(**header_kwargs)
    with open(args.timing, 'w') as f:
        f.write(header)

    for i, kwargs in enumerate(config.profile):
        if config.template:
            # run on a cluster of some kind
            # start by printing the template
            print('-'.ljust(OUT_WIDTH, '-'))
            print('{host} template'.format(
                host=args.host).center(OUT_WIDTH))
            print('-'.ljust(OUT_WIDTH, '-'))
            print(config.template)
            print('-'.ljust(OUT_WIDTH, '-'))
            template = string.Template(config.template)

            run_string = template.safe_substitute(
                vic_exe=args.vic_exe, vic_global=args.global_param,
                timing_table_file=args.timing, i=i,
                mpiexec=config.mpiexec, **kwargs)
            run_file = 'vic_{host}_{i}.sh'.format(host=args.host, i=i)
            with open(run_file, 'w') as f:
                f.write(run_string)

            cmd = '{submit} {run_file}'.format(submit=config.submit,
                                               run_file=run_file)
            print(cmd)
            if not args.test:
                check_call(cmd, shell=True)

            if args.clean:
                os.remove(run_file)
        else:
            # run locally
            n = kwargs['np']
            print('Running {} with {} processors'.format(args.vic_exe, n))
            if not args.test:
                start = time.time()
                vic_exe.run(args.global_param, mpi_proc=int(n))
                end = time.time()
                diff = end - start
                with open(args.timing, 'a') as f:
                    f.write('%5s | %.2f\n' % (n, diff))

    print('See %s for scaling table' % args.timing)
示例#3
0
# ============================================================ #
# Process command line arguments
# ============================================================ #
# Read config file
cfg = read_configobj(sys.argv[1])

# Read number of processors for python multiprocessing
nproc = int(sys.argv[2])

# Read number of processors for VIC MPI runs
mpi_proc = int(sys.argv[3])

# ============================================================ #
# Prepare VIC exe
# ============================================================ #
vic_exe = VIC(os.path.join(cfg['CONTROL']['root_dir'], cfg['VIC']['vic_exe']))

# ============================================================ #
# Process cfg data
# ============================================================ #
N = cfg['EnKF']['N']  # number of ensemble members
start_time = pd.to_datetime(cfg['EnKF']['start_time'])
end_time = pd.to_datetime(cfg['EnKF']['end_time'])

start_year = start_time.year
end_year = end_time.year

# ============================================================ #
# Setup postprocess output directories
# ============================================================ #
dirs = setup_output_dirs(
示例#4
0
def main(cfg_file, nproc=1):
    ''' Main function

    Parameters
    ----------
    cfg_file: <str>
        Input config file
    nproc: <int>
        Number of processors to use
    '''
    
    # ====================================================== #
    # Load in config file
    # ====================================================== #
    cfg = read_configobj(cfg_file)
   
 
    # ====================================================== #
    # Process some cfg variables
    # ====================================================== #
    start_date = pd.to_datetime(cfg['FORCING']['start_date'])
    end_date = pd.to_datetime(cfg['FORCING']['end_date'])
    
    start_year = start_date.year
    end_year = end_date.year

    ens_list = range(cfg['FORCING']['ens_start'],
                     cfg['FORCING']['ens_end'] + 1)
    
    
    # ====================================================== #
    # Set up output directories
    # ====================================================== #
    dirs = setup_output_dirs(cfg['OUTPUT']['out_basedir'],
                             mkdirs=['forc_orig_nc', 'forc_orig_asc',
                                     'forc_disagg_asc', 'forc_disagg_nc',
                                     'config_files', 'logs_vic'])
    # Subdirs for config files for ensemble
    subdirs_config = setup_output_dirs(
                            dirs['config_files'],
                            mkdirs=['netcdf2vic', 'vic4', 'vic2nc'])
    
    # ====================================================== #
    # Load in domain file
    # ====================================================== #
    ds_domain = xr.open_dataset(cfg['DOMAIN']['domain_nc'])
    da_domain = ds_domain[cfg['DOMAIN']['mask_name']]
    
    lat_min = da_domain['lat'].min().values
    lat_max = da_domain['lat'].max().values
    lon_min = da_domain['lon'].min().values
    lon_max = da_domain['lon'].max().values
    
    
    # ====================================================== #
    # Load in and process Newman ensemble forcings (for prec, Tmax and Tmin)
    # and orig. Maurer forcing (for wind speed)
    # ====================================================== #
    
    # --- Load Maurer forcings --- #
    print('Processing Maurer forcings...')
    
    # Loop over each year
    list_da_wind = []
    for year in range(start_year, end_year+1):
        print('Year {}'.format(year))
        # --- Load in netCDF file for this year --- #
        da_wind = xr.open_dataset(os.path.join(
                        cfg['FORCING']['maurer_dir'],
                        'nldas_met_update.obs.daily.wind.{}.nc'.format(year)))['wind']
        # --- Mask out the target area --- #
        da_wind = da_wind.sel(latitude=slice(lat_min, lat_max),
                              longitude=slice(lon_min, lon_max))
        da_wind = da_wind.where(da_domain.values)
        # --- Rename lat and lon --- #
        da_wind = da_wind.rename({'latitude': 'lat', 'longitude': 'lon'})
        # --- Put in list --- #
        list_da_wind.append(da_wind)
    
    # Concat all years together
    da_wind_allyears = xr.concat(list_da_wind, dim='time')
   
    # --- Load Newman forcings --- #
    print('Processing Newman forcings...')

    # If 1 processor, do a regular process
    if nproc == 1:
        # Loop over each ensemble member
        for ens in ens_list:
            load_and_process_Newman(ens, cfg, da_domain, lat_min, lat_max,
                                    lon_min, lon_max, start_date, end_date,
                                    dirs, da_wind_allyears)
    # If multiple processors, use mp
    elif nproc > 1:
        # Set up multiprocessing
        pool = mp.Pool(processes=nproc)
        # Loop over each ensemble member
        for ens in ens_list:
            pool.apply_async(load_and_process_Newman,
                             (ens, cfg, da_domain, lat_min, lat_max, lon_min,
                              lon_max, start_date, end_date, dirs,
                              da_wind_allyears,))
        # Finish multiprocessing
        pool.close()
        pool.join()
    
    # ====================================================== #
    # Convert orig. forcings to ascii format
    # ====================================================== #
    
    print('Converting orig. netCDF forcings to VIC ascii...')

    # --- Setup subdirs for asc VIC orig. forcings for each ensemble member
    # --- #
    list_ens = []
    for ens in ens_list:
        list_ens.append('ens_{}'.format(ens))
    subdirs_output = setup_output_dirs(
                        dirs['forc_orig_asc'],
                        mkdirs=list_ens)
    
    # --- Prepare netcdf2vic config file --- #
    dict_cfg_file = {}
    for ens in ens_list:
        cfg_file = os.path.join(subdirs_config['netcdf2vic'],
                                'ens_{}.cfg'.format(ens))
        dict_cfg_file[ens] = cfg_file

        with open(cfg_file, 'w') as f:
            f.write('[options]\n')
            f.write('files: forc_orig.{}.nc\n')
            f.write('verbose: True\n')
            f.write('output_format: ASCII\n')
            f.write('out_prefix: forc_orig_\n')
            f.write('coord_keys: lon,lat\n')
            f.write('var_keys: pr,tasmax,tasmin,wind\n')
            f.write('start_year: {}\n'.format(start_year))
            f.write('end_year: {}\n'.format(end_year))
            f.write('latlon_precision: {}\n'.format(
                            cfg['OUTPUT']['latlon_precision']))

            f.write('\n[paths]\n')
            f.write('in_path: {}\n'.format(os.path.join(
                                        dirs['forc_orig_nc'],
                                        'ens_{}'.format(ens))))
            f.write('mask_path: {}\n'.format(cfg['DOMAIN']['domain_nc']))
            f.write('mask_varname: {}\n'.format(cfg['DOMAIN']['mask_name']))
            f.write('ASCIIoutPath: {}\n'.format(
                        subdirs_output['ens_{}'.format(ens)]))
        
    # --- Run nc_to_vic --- #
    # If 1 processor, do a regular process
    if nproc == 1:
        for ens in ens_list:
            nc_to_vic(dict_cfg_file[ens])
    # If multiple processors, use mp
    elif nproc > 1:
        # Set up multiprocessing
        pool = mp.Pool(processes=nproc)
        # Loop over each ensemble member
        for ens in ens_list:
            pool.apply_async(nc_to_vic, (dict_cfg_file[ens],))
        # Finish multiprocessing
        pool.close()
        pool.join()
    
    # ====================================================== #
    # Run VIC forcing disaggregator
    # ====================================================== #
    
    print('Running VIC as a disaggregator...')
    
    # --- Setup subdirs for asc VIC disagg. forcings and VIC log files for
    # each ensemble member --- #
    list_ens = []
    for ens in ens_list:
        list_ens.append('ens_{}'.format(ens))
    subdirs_output = setup_output_dirs(
                        dirs['forc_disagg_asc'],
                        mkdirs=list_ens)
    subdirs_logs = setup_output_dirs(
                        dirs['logs_vic'],
                        mkdirs=list_ens)
 
    # --- Prepare VIC global file for the disaggregation run --- #
    # Load in global file template
    with open(cfg['VIC_DISAGG']['global_template'], 'r') as f:
         global_param = f.read()
    # Create string template
    s = string.Template(global_param)
    # Loop over each ensemble member
    dict_global_file = {}
    for ens in ens_list:
        # Fill in variables in the template
        global_param = s.safe_substitute(
                            time_step=cfg['VIC_DISAGG']['time_step'],
                            startyear=start_year,
                            startmonth=start_date.month,
                            startday=start_date.day,
                            endyear=end_year,
                            endmonth=end_date.month,
                            endday=end_date.day,
                            forcing1=os.path.join(dirs['forc_orig_asc'],
                                                  'ens_{}'.format(ens),
                                                  'forc_orig_'),
                            grid_decimal=cfg['OUTPUT']['latlon_precision'],
                            prec='PREC',
                            tmax='TMAX',
                            tmin='TMIN',
                            wind='WIND',
                            forceyear=start_year,
                            forcemonth=start_date.month,
                            forceday=start_date.day,
                            result_dir=subdirs_output['ens_{}'.format(ens)])
        # Write global param file
        global_file = os.path.join(subdirs_config['vic4'],
                                   'vic.global.ens_{}.txt'.format(ens))
        dict_global_file[ens] = global_file
        with open(global_file, mode='w') as f:
            for line in global_param:
                f.write(line)
            
    # --- Run VIC --- #
    # Prepare VIC exe
    vic_exe = VIC(cfg['VIC_DISAGG']['vic4_exe'])

    # If 1 processor, do a regular process
    if nproc == 1:
        for ens in ens_list:
            vic_exe.run(dict_global_file[ens],
                        logdir=subdirs_logs['ens_{}'.format(ens)])
    # If multiple processors, use mp
    elif nproc > 1:
        # Set up multiprocessing
        pool = mp.Pool(processes=nproc)
        # Loop over each ensemble member
        for ens in ens_list:
            pool.apply_async(run_vic_for_multiprocess,
                             (vic_exe, dict_global_file[ens],
                              subdirs_logs['ens_{}'.format(ens)],))
        # Finish multiprocessing
        pool.close()
        pool.join()
    
    # ====================================================== #
    # Convert disaggregated forcings to netCDF format
    # ====================================================== #
    
    # --- Prepare config file for vic2nc --- #
    print('Converting disaggregated forcings to netCDF...')
    
    # --- Setup subdirs for VIC disagg. netCDF forcings for each ensemble
    # member --- #
    list_ens = []
    for ens in ens_list:
        list_ens.append('ens_{}'.format(ens))
    subdirs_output = setup_output_dirs(
                        dirs['forc_disagg_nc'],
                        mkdirs=list_ens)

    # --- Prepare netcdf2vic config file --- #
    # Extract disaggregated forcing variable names and order
    with open(cfg['VIC_DISAGG']['global_template'], 'r') as f:
         global_param = f.read()
    outvar_list = find_outvar_global_param(global_param)
    for i, var in enumerate(outvar_list):
        outvar_list[i] = var.strip('OUT_')
   
    # Extract end date and hour 
    end_date_with_hour = end_date + pd.DateOffset(days=1) -\
                         pd.DateOffset(hours=cfg['VIC_DISAGG']['time_step'])

    # Loop over each ensemble member 
    dict_cfg_file = {}
    for ens in ens_list:
        cfg_file = os.path.join(subdirs_config['vic2nc'],
                                'ens_{}.cfg'.format(ens))
        dict_cfg_file[ens] = cfg_file
        
        with open(cfg_file, 'w') as f:
            f.write('[OPTIONS]\n')
            f.write('input_files: {}\n'.format(
                        os.path.join(dirs['forc_disagg_asc'],
                                     'ens_{}'.format(ens),
                                     'force_*')))
            f.write('input_file_format: ascii\n')
            f.write('bin_dt_sec: {}\n'.format(cfg['VIC_DISAGG']['time_step']*3600))
            f.write('bin_start_date: {}\n'.format(start_date.strftime("%Y-%m-%d-%H")))
            f.write('bin_end_date: {}\n'.format(end_date_with_hour.strftime("%Y-%m-%d-%H")))
            f.write('regular_grid: False\n')
            f.write('out_directory: {}\n'.format(subdirs_output['ens_{}'.format(ens)]))
            f.write('memory_mode: big_memory\n')
            f.write('chunksize: 100\n')
            f.write('out_file_prefix: force\n')
            f.write('out_file_format: NETCDF4\n')
            f.write('precision: single\n')
            f.write('start_date: {}\n'.format(start_date.strftime("%Y-%m-%d-%H")))
            f.write('end_date: {}\n'.format(end_date_with_hour.strftime("%Y-%m-%d-%H")))
            f.write('calendar: proleptic_gregorian\n')
            f.write('time_segment: year\n')
            f.write('snow_bands: False\n')
            f.write('veg_tiles: False\n')
            f.write('soil_layers: False\n')
            
            f.write('\n[DOMAIN]\n')
            f.write('filename: {}\n'.format(cfg['DOMAIN']['domain_nc']))
            f.write('longitude_var: {}\n'.format(cfg['DOMAIN']['lon_name']))
            f.write('latitude_var: {}\n'.format(cfg['DOMAIN']['lat_name']))
            f.write('y_x_dims: {}, {}\n'.format(cfg['DOMAIN']['lat_name'],
                                                cfg['DOMAIN']['lon_name']))
            f.write('copy_vars: {}, {}, {}\n'.format(cfg['DOMAIN']['mask_name'],
                                                     cfg['DOMAIN']['lat_name'],
                                                     cfg['DOMAIN']['lon_name']))
            
            f.write('\n[GLOBAL_ATTRIBUTES]\n')
            f.write('title: VIC forcings\n')
            f.write('version: VIC4.2\n')
            f.write('grid: 1/8\n')
            
            for i, var in enumerate(outvar_list):
                if var == 'AIR_TEMP':
                    f.write('\n[AIR_TEMP]\n')
                    f.write('column: {}\n'.format(i))
                    f.write('units: C\n')
                    f.write('standard_name: air_temperature\n')
                    f.write('description: air temperature\n')
                elif var == 'PREC':
                    f.write('\n[PREC]\n')
                    f.write('column: {}\n'.format(i))
                    f.write('units: mm/step\n')
                    f.write('standard_name: precipitation\n')
                    f.write('description: precipitation\n')
                elif var == 'PRESSURE':
                    f.write('\n[PRESSURE]\n')
                    f.write('column: {}\n'.format(i))
                    f.write('units: kPa\n')
                    f.write('standard_name: surface_air_pressure\n')
                    f.write('description: near-surface atmospheric pressure\n')
                elif var == 'SHORTWAVE':
                    f.write('\n[SHORTWAVE]\n')
                    f.write('column: {}\n'.format(i))
                    f.write('units: W m-2\n')
                    f.write('standard_name: incoming_shortwave_radiation\n')
                    f.write('description: incoming shortwave radiation\n')
                elif var == 'LONGWAVE':
                    f.write('\n[LONGWAVE]\n')
                    f.write('column: {}\n'.format(i))
                    f.write('units: W m-2\n')
                    f.write('standard_name: incoming_longwave_radiation\n')
                    f.write('description: incoming longwave radiation\n')
                elif var == 'VP':
                    f.write('\n[VP]\n')
                    f.write('column: {}\n'.format(i))
                    f.write('units: kPa\n')
                    f.write('standard_name: water_vapor_pressure\n')
                    f.write('description: near surface vapor pressure\n')
                elif var == 'WIND':
                    f.write('\n[WIND]\n')
                    f.write('column: {}\n'.format(i))
                    f.write('units: m/s\n')
                    f.write('standard_name: surface_air_pressure\n')
                    f.write('description: near-surface wind speed\n')
   
    # --- Run vic2nc --- #
    # If 1 processor, do a regular process
    if nproc == 1:
        for ens in ens_list:
            cfg_vic2nc = read_config(dict_cfg_file[ens])
            options = cfg_vic2nc.pop('OPTIONS')
            global_atts = cfg_vic2nc.pop('GLOBAL_ATTRIBUTES')
            if not options['regular_grid']:
                domain_dict = cfg_vic2nc.pop('DOMAIN')
            else:
                domain_dict = None
            # Set aside fields dict
            fields = cfg_vic2nc
            # Run vic2nc 
            vic2nc(options, global_atts, domain_dict, fields)

    # If multiple processors, use mp
    elif nproc > 1:
        # Set up multiprocessing
        pool = mp.Pool(processes=nproc)
        # Loop over each ensemble member
        for ens in ens_list:
            cfg_vic2nc = read_config(dict_cfg_file[ens])
            options = cfg_vic2nc.pop('OPTIONS')
            global_atts = cfg_vic2nc.pop('GLOBAL_ATTRIBUTES')
            if not options['regular_grid']:
                domain_dict = cfg_vic2nc.pop('DOMAIN')
            else:
                domain_dict = None
            # set aside fields dict
            fields = cfg_vic2nc
            pool.apply_async(vic2nc,
                             (options, global_atts, domain_dict, fields,))
        # Finish multiprocessing
        pool.close()
        pool.join()
示例#5
0
def main():
    '''
    Run VIC tests
    '''

    # dates and times
    starttime = datetime.datetime.now()
    ymd = starttime.strftime('%Y%m%d')

    # Parse arguments
    test_results = OrderedDict()

    parser = argparse.ArgumentParser(description=description,
                                     epilog=epilog,
                                     formatter_class=CustomFormatter)

    parser.add_argument(
        'tests',
        type=str,
        help='Test sets to run',
        choices=['all', 'unit', 'system', 'science', 'examples', 'release'],
        default=['unit', 'system'],
        nargs='+')
    parser.add_argument('--system',
                        type=str,
                        help='system tests configuration file',
                        default=os.path.join(test_dir,
                                             'system/system_tests.cfg'))
    parser.add_argument('--science',
                        type=str,
                        help='science tests configuration file',
                        default=os.path.join(test_dir, 'science/science.cfg'))
    parser.add_argument('--examples',
                        type=str,
                        help='examples tests configuration file',
                        default=os.path.join(test_dir,
                                             'examples/examples.cfg'))
    parser.add_argument('--release',
                        type=str,
                        help='release tests configuration file',
                        default=os.path.join(test_dir, 'release/release.cfg'))
    parser.add_argument('--classic',
                        type=str,
                        help='classic driver executable to test')
    parser.add_argument('--image',
                        type=str,
                        help='image driver executable to test')
    parser.add_argument('--output_dir',
                        type=str,
                        help='directory to write test output to',
                        default='$WORKDIR/VIC_tests_{0}'.format(ymd))
    parser.add_argument('--data_dir',
                        type=str,
                        help='directory to find test data',
                        default='./samples/VIC_sample_data')
    parser.add_argument('--science_test_data_dir',
                        type=str,
                        help='directory to find science test data',
                        default='./samples/VIC_sample_data')
    parser.add_argument('--nproc',
                        type=int,
                        help='number of processors to use for science tests',
                        default=1)

    args = parser.parse_args()

    # Define test directories
    data_dir = args.data_dir
    out_dir = os.path.expandvars(args.output_dir)
    os.makedirs(out_dir, exist_ok=True)

    # check to make sure science test data directory exists
    science_test_data_dir = args.science_test_data_dir
    if 'science' in args.tests and not os.path.exists(science_test_data_dir):
        raise VICTestError(
            "directory for science test data does not exist or has not been defined"
        )

    # Validate input directories
    if not (len(args.tests) == 1 and args.tests[0] == 'unit'):
        for d in [data_dir, test_dir]:
            if not os.path.exists(d):
                raise VICTestError('Directory: {0} does not exist'.format(d))

    # Print welcome information
    print(description)
    print('\nStarting tests now...Start Time: {0}\n'.format(starttime))
    print('Running Test Set: {0}'.format(', '.join(args.tests)))

    # Setup VIC executable
    # --- if not only unit test --- #
    if not (len(args.tests) == 1 and args.tests[0] == 'unit'):
        dict_drivers = {}
        if args.classic:
            dict_drivers['classic'] = VIC(args.classic)
            print('VIC classic version information:\n\n{0}'.format(
                dict_drivers['classic'].version.decode()))
        if args.image:
            dict_drivers['image'] = VIC(args.image)
            print('VIC image version information:\n\n{0}'.format(
                dict_drivers['image'].version.decode()))

    # run test sets
    # unit
    if any(i in ['all', 'unit'] for i in args.tests):
        test_results['unit'] = run_unit_tests(test_dir)

    # system
    if any(i in ['all', 'system'] for i in args.tests):
        test_results['system'] = run_system(args.system, dict_drivers,
                                            data_dir,
                                            os.path.join(out_dir, 'system'))

    # science
    if any(i in ['all', 'science'] for i in args.tests):
        test_results['science'] = run_science(args.science,
                                              dict_drivers['classic'],
                                              science_test_data_dir, data_dir,
                                              os.path.join(out_dir, 'science'),
                                              'classic', args.nproc)
    # examples
    if any(i in ['all', 'examples'] for i in args.tests):
        if len(dict_drivers) == 1:  # if only one driver
            driver = list(dict_drivers.keys())[0]
            vic_exe = dict_drivers[driver]
            test_results['examples'] = run_examples(
                args.examples, vic_exe, data_dir,
                os.path.join(out_dir, 'examples'), driver)
        else:
            raise ValueError('example test only supports single driver')
    # release
    if any(i in ['all', 'release'] for i in args.tests):
        test_results['release'] = run_release(args.release)

    # Print test results
    summary = OrderedDict()
    failed = 0
    print('\nTest Results:')
    for test_set, results in test_results.items():
        print('-'.ljust(OUTPUT_WIDTH, '-'))
        print(test_set.center(OUTPUT_WIDTH))
        print('-'.ljust(OUTPUT_WIDTH, '-'))
        print_test_dict(results)

        summary[test_set] = 0
        for r in results.values():
            if not r.passed:
                summary[test_set] += 1

        failed += summary[test_set]

    print('\nTest Summary:')
    print('-'.ljust(OUTPUT_WIDTH, '-'))
    for test_set, r in summary.items():
        print('Failed tests in {0}: {1}'.format(test_set, r))
    print('-'.ljust(OUTPUT_WIDTH, '-'))

    # end date and times
    endtime = datetime.datetime.now()
    elapsed = endtime - starttime
    print('\nFinished testing VIC. Endtime: {0}'.format(endtime))
    print('Time elapsed during testing:  {0}\n'.format(elapsed))

    # return exit code
    sys.exit(failed)
示例#6
0
# =========================================================== #
# Load config file
# =========================================================== #
cfg = read_configobj(sys.argv[1])

# =========================================================== #
# Set random generation seed
# =========================================================== #
np.random.seed(cfg['CONTROL']['seed'])

# =========================================================== #
# Run VIC
# =========================================================== #
# Create class VIC
vic_exe = VIC(cfg['VIC']['exe'])
# Run VIC
vic_exe.run(cfg['VIC']['global'],
            logdir=cfg['OUTPUT']['vic_log_dir'])

# =========================================================== #
# Extract VIC output soil moisture (layer 1) at the end of
# each day, and perturb
# =========================================================== #
# Load VIC output
ds = xr.open_dataset(cfg['OUTPUT']['vic_output_hist_path'])

# Resample surface sm to daily mean
da_sm1_true = ds['OUT_SOIL_MOIST'].sel(nlayer=0)
da_sm1_true_daily = da_sm1_true.resample(dim='time', freq='D', how='mean')