コード例 #1
0
def main(ctx, resource_h5, out_dir, compute_tree, verbose):
    """
    ResourceX Command Line Interface
    """
    ctx.ensure_object(dict)
    ctx.obj['H5'] = resource_h5
    ctx.obj['OUT_DIR'] = out_dir
    ctx.obj['CLS_KWARGS'] = {'compute_tree': compute_tree}

    multi_h5_res, hsds = check_res_file(resource_h5)
    if multi_h5_res:
        assert os.path.exists(os.path.dirname(resource_h5))
        ctx.obj['CLS'] = MultiFileResourceX
    else:
        if hsds:
            ctx.obj['CLS_KWARGS']['hsds'] = hsds
        else:
            assert os.path.exists(resource_h5)

        ctx.obj['CLS'] = ResourceX

    name = os.path.splitext(os.path.basename(resource_h5))[0]
    init_mult(name,
              out_dir,
              verbose=verbose,
              node=True,
              modules=[
                  __name__, 'rex.resource_extraction.resource_extraction',
                  'rex.resource'
              ])

    logger.info('Extracting Resource data from {}'.format(resource_h5))
    logger.info('Outputs to be stored in: {}'.format(out_dir))
コード例 #2
0
ファイル: cli_gen.py プロジェクト: mmdione/reV
def from_config(ctx, config_file, verbose):
    """Run reV gen from a config file."""
    name = ctx.obj['NAME']
    verbose = any([verbose, ctx.obj['VERBOSE']])

    # Instantiate the config object
    config = GenConfig(config_file)

    # take name from config if not default
    if config.name.lower() != 'rev':
        name = config.name
        ctx.obj['NAME'] = name

    # Enforce verbosity if logging level is specified in the config
    if config.log_level == logging.DEBUG:
        verbose = True

    # make output directory if does not exist
    if not os.path.exists(config.dirout):
        os.makedirs(config.dirout)

    # initialize loggers.
    init_mult(name, config.logdir, modules=['reV', 'rex'], verbose=verbose)

    # Initial log statements
    logger.info(
        'Running reV Generation from config file: "{}"'.format(config_file))
    logger.info('Target output directory: "{}"'.format(config.dirout))
    logger.info('Target logging directory: "{}"'.format(config.logdir))
    logger.info('The following project points were specified: "{}"'.format(
        config.get('project_points', None)))
    logger.info(
        'The following SAM configs are available to this run:\n{}'.format(
            pprint.pformat(config.get('sam_files', None), indent=4)))
    logger.info('The following is being used for site specific input data: '
                '"{}"'.format(config.site_data))
    logger.debug('The full configuration input is as follows:\n{}'.format(
        pprint.pformat(config, indent=4)))

    # set config objects to be passed through invoke to direct methods
    ctx.obj['TECH'] = config.technology
    ctx.obj['POINTS'] = config.project_points
    ctx.obj['SAM_FILES'] = config.sam_files
    ctx.obj['DIROUT'] = config.dirout
    ctx.obj['LOGDIR'] = config.logdir
    ctx.obj['OUTPUT_REQUEST'] = config.output_request
    ctx.obj['SITE_DATA'] = config.site_data
    ctx.obj['TIMEOUT'] = config.timeout
    ctx.obj['SITES_PER_WORKER'] = config.execution_control.sites_per_worker
    ctx.obj['MAX_WORKERS'] = config.execution_control.max_workers
    ctx.obj['MEM_UTIL_LIM'] = \
        config.execution_control.memory_utilization_limit

    ctx.obj['CURTAILMENT'] = None
    if config.curtailment is not None:
        # pass through the curtailment file, not the curtailment object
        ctx.obj['CURTAILMENT'] = config['curtailment']

    for i, year in enumerate(config.years):
        submit_from_config(ctx, name, year, config, i, verbose=verbose)
コード例 #3
0
ファイル: wind_rose_cli.py プロジェクト: NREL/rex
def main(wind_path, hub_height, out_dir, wspd_bins, wdir_bins, max_workers,
         res_cls, hsds, chunks_per_worker, log_file, verbose):
    """
    WindRose Command Line Interface
    """
    if not os.path.exists(out_dir):
        os.makedirs(out_dir)

    name = os.path.splitext(os.path.basename(wind_path))[0]
    name = name.replace('*', '')
    out_fpath = '{}_wind_rose-{}m.csv'.format(name, hub_height)
    out_fpath = os.path.join(out_dir, out_fpath)

    if log_file is not None:
        name = os.path.basename(log_file).split('.')[0]
        log_dir = os.path.dirname(log_file)
    else:
        name = None
        log_dir = None

    init_mult(name, log_dir, [__name__, 'rex'], verbose=verbose)
    logger.info('Computing wind rose from {}'.format(wind_path))
    logger.info('Outputs to be stored in: {}'.format(out_dir))

    JointPD.wind_rose(wind_path,
                      hub_height,
                      wspd_bins=wspd_bins,
                      wdir_bins=wdir_bins,
                      sites=None,
                      res_cls=RES_CLS[res_cls],
                      hsds=hsds,
                      max_workers=max_workers,
                      chunks_per_worker=chunks_per_worker,
                      out_fpath=out_fpath)
コード例 #4
0
def from_config(ctx, config_file, verbose):
    """Run reV gen from a config file."""
    name = ctx.obj['NAME']

    # Instantiate the config object
    config = MultiYearConfig(config_file)

    # take name from config if not default
    if config.name.lower() != 'rev':
        name = config.name
        ctx.obj['NAME'] = name

    # Enforce verbosity if logging level is specified in the config
    if config.log_level == logging.DEBUG:
        verbose = True

    # make output directory if does not exist
    if not os.path.exists(config.dirout):
        os.makedirs(config.dirout)

    # initialize loggers.
    init_mult(name, config.logdir, modules=[__name__, 'reV'], verbose=verbose)

    # Initial log statements
    logger.info(
        'Running reV multi-year from config file: "{}"'.format(config_file))
    logger.info('Target output directory: "{}"'.format(config.dirout))
    logger.info('Target logging directory: "{}"'.format(config.logdir))

    ctx.obj['MY_FILE'] = config.my_file
    if config.execution_control.option == 'local':

        ctx.obj['NAME'] = name
        status = Status.retrieve_job_status(config.dirout, 'multi-year', name)
        if status != 'successful':
            Status.add_job(config.dirout,
                           'multi-year',
                           name,
                           replace=True,
                           job_attrs={
                               'hardware': 'local',
                               'fout': ctx.obj['MY_FILE'],
                               'dirout': config.dirout
                           })
            group_params = json.dumps(config.group_params)
            ctx.invoke(multi_year_groups, group_params=group_params)

    elif config.execution_control.option in ('eagle', 'slurm'):
        ctx.obj['NAME'] = name
        ctx.invoke(multi_year_slurm,
                   alloc=config.execution_control.allocation,
                   walltime=config.execution_control.walltime,
                   feature=config.execution_control.feature,
                   memory=config.execution_control.memory,
                   conda_env=config.execution_control.conda_env,
                   module=config.execution_control.module,
                   stdout_path=os.path.join(config.logdir, 'stdout'),
                   group_params=json.dumps(config.group_params),
                   verbose=verbose)
コード例 #5
0
def from_config(ctx, config_file, verbose):
    """Run reV econ from a config file."""
    name = ctx.obj['NAME']
    verbose = any([verbose, ctx.obj['VERBOSE']])

    # Instantiate the config object
    config = EconConfig(config_file)

    # take name from config if not default
    if config.name.lower() != 'rev':
        name = config.name
        ctx.obj['NAME'] = name

    # Enforce verbosity if logging level is specified in the config
    if config.log_level == logging.DEBUG:
        verbose = True

    # make output directory if does not exist
    if not os.path.exists(config.dirout):
        os.makedirs(config.dirout)

    # initialize loggers.
    init_mult(name, config.logdir, modules=['reV', 'rex'], verbose=verbose)
    cf_files = config.parse_cf_files()
    # Initial log statements
    logger.info('Running reV Econ from config file: "{}"'.format(config_file))
    logger.info('Target output directory: "{}"'.format(config.dirout))
    logger.info('Target logging directory: "{}"'.format(config.logdir))
    logger.info('The following project points were specified: "{}"'.format(
        config.get('project_points', None)))
    logger.info(
        'The following SAM configs are available to this run:\n{}'.format(
            pprint.pformat(config.get('sam_files', None), indent=4)))
    logger.debug(
        'Submitting jobs for the following cf_files: {}'.format(cf_files))
    logger.debug('The full configuration input is as follows:\n{}'.format(
        pprint.pformat(config, indent=4)))

    # set config objects to be passed through invoke to direct methods
    ctx.obj['POINTS'] = config.project_points
    ctx.obj['SAM_FILES'] = config.parse_sam_config()
    ctx.obj['SITE_DATA'] = config.site_data
    ctx.obj['DIROUT'] = config.dirout
    ctx.obj['LOGDIR'] = config.logdir
    ctx.obj['APPEND'] = config.append
    ctx.obj['OUTPUT_REQUEST'] = config.output_request
    ctx.obj['SITES_PER_WORKER'] = config.execution_control.sites_per_worker
    ctx.obj['MAX_WORKERS'] = config.execution_control.max_workers
    ctx.obj['TIMEOUT'] = config.timeout

    if len(config.years) == len(cf_files):
        for i, year in enumerate(config.years):
            cf_file = cf_files[i]
            submit_from_config(ctx, name, cf_file, year, config, verbose)
    else:
        for i, cf_file in enumerate(cf_files):
            year = parse_year(cf_file)
            if str(year) in [str(y) for y in config.years]:
                submit_from_config(ctx, name, cf_file, year, config, verbose)
コード例 #6
0
def direct(ctx, gen_fpath, rev_summary, reg_cols, cf_dset, rep_method,
           err_method, weight, n_profiles, out_dir, log_dir, max_workers,
           aggregate_profiles, verbose):
    """reV representative profiles CLI."""
    name = ctx.obj['NAME']
    ctx.obj['GEN_FPATH'] = gen_fpath
    ctx.obj['REV_SUMMARY'] = rev_summary
    ctx.obj['REG_COLS'] = reg_cols
    ctx.obj['CF_DSET'] = cf_dset
    ctx.obj['REP_METHOD'] = rep_method
    ctx.obj['ERR_METHOD'] = err_method
    ctx.obj['WEIGHT'] = weight
    ctx.obj['N_PROFILES'] = n_profiles
    ctx.obj['OUT_DIR'] = out_dir
    ctx.obj['LOG_DIR'] = log_dir
    ctx.obj['MAX_WORKERS'] = max_workers
    ctx.obj['AGGREGATE_PROFILES'] = aggregate_profiles
    ctx.obj['VERBOSE'] = verbose

    if ctx.invoked_subcommand is None:
        t0 = time.time()
        init_mult(name, log_dir, modules=['reV', 'rex'], verbose=verbose)

        fn_out = '{}.h5'.format(name)
        fout = os.path.join(out_dir, fn_out)

        if aggregate_profiles:
            AggregatedRepProfiles.run(gen_fpath,
                                      rev_summary,
                                      cf_dset=cf_dset,
                                      weight=weight,
                                      fout=fout,
                                      max_workers=max_workers)
        else:
            RepProfiles.run(gen_fpath,
                            rev_summary,
                            reg_cols,
                            cf_dset=cf_dset,
                            rep_method=rep_method,
                            err_method=err_method,
                            weight=weight,
                            fout=fout,
                            n_profiles=n_profiles,
                            max_workers=max_workers)

        runtime = (time.time() - t0) / 60
        logger.info('reV representative profiles complete. '
                    'Time elapsed: {:.2f} min. Target output dir: {}'.format(
                        runtime, out_dir))

        status = {
            'dirout': out_dir,
            'fout': fn_out,
            'job_status': 'successful',
            'runtime': runtime,
            'finput': [gen_fpath, rev_summary]
        }
        Status.make_job_file(out_dir, 'rep-profiles', name, status)
コード例 #7
0
def multi_year_groups(ctx, group_params, verbose):
    """Run multi year collection and means for multiple groups."""
    name = ctx.obj['NAME']
    my_file = ctx.obj['MY_FILE']
    verbose = any([verbose, ctx.obj['VERBOSE']])

    # initialize loggers for multiple modules
    log_dir = os.path.dirname(my_file)
    init_mult(name,
              log_dir,
              modules=[__name__, 'reV.handlers.multi_year'],
              verbose=verbose,
              node=True)

    for key, val in ctx.obj.items():
        logger.debug('ctx var passed to collection method: "{}" : "{}" '
                     'with type "{}"'.format(key, val, type(val)))

    logger.info('Multi-year collection is being run with job name "{}". '
                'Target output path is: {}'.format(name, my_file))
    ts = time.time()
    for group_name, group in json.loads(group_params).items():
        logger.info('- Collecting datasets "{}" from "{}" into "{}/"'.format(
            group['dsets'], group['source_files'], group_name))
        t0 = time.time()
        for dset in group['dsets']:
            if MultiYear.is_profile(group['source_files'], dset):
                MultiYear.collect_profiles(my_file,
                                           group['source_files'],
                                           dset,
                                           group=group['group'])
            else:
                MultiYear.collect_means(my_file,
                                        group['source_files'],
                                        dset,
                                        group=group['group'])

        runtime = (time.time() - t0) / 60
        logger.info('- {} collection completed in: {:.2f} min.'.format(
            group_name, runtime))

    runtime = (time.time() - ts) / 60
    logger.info(
        'Multi-year collection completed in : {:.2f} min.'.format(runtime))

    # add job to reV status file.
    status = {
        'dirout': os.path.dirname(my_file),
        'fout': os.path.basename(my_file),
        'job_status': 'successful',
        'runtime': runtime
    }
    Status.make_job_file(os.path.dirname(my_file), 'multi-year', name, status)
コード例 #8
0
def multi_year(ctx, source_files, group, dsets, pass_through_dsets, verbose):
    """Run multi year collection and means on local worker."""

    name = ctx.obj['NAME']
    my_file = ctx.obj['MY_FILE']
    verbose = any([verbose, ctx.obj['VERBOSE']])

    # initialize loggers for multiple modules
    log_dir = os.path.dirname(my_file)
    init_mult(name,
              log_dir,
              modules=[__name__, 'reV.handlers.multi_year'],
              verbose=verbose,
              node=True)

    for key, val in ctx.obj.items():
        logger.debug('ctx var passed to collection method: "{}" : "{}" '
                     'with type "{}"'.format(key, val, type(val)))

    logger.info('Multi-year collection is being run for "{}" '
                'with job name "{}" on {}. Target output path is: {}'.format(
                    dsets, name, source_files, my_file))
    t0 = time.time()

    for dset in dsets:
        if MultiYear.is_profile(source_files, dset):
            MultiYear.collect_profiles(my_file,
                                       source_files,
                                       dset,
                                       group=group)
        else:
            MultiYear.collect_means(my_file, source_files, dset, group=group)

    if pass_through_dsets is not None:
        for dset in pass_through_dsets:
            MultiYear.pass_through(my_file, source_files, dset, group=group)

    runtime = (time.time() - t0) / 60
    logger.info(
        'Multi-year collection completed in: {:.2f} min.'.format(runtime))

    # add job to reV status file.
    status = {
        'dirout': os.path.dirname(my_file),
        'fout': os.path.basename(my_file),
        'job_status': 'successful',
        'runtime': runtime,
        'finput': source_files
    }
    Status.make_job_file(os.path.dirname(my_file), 'multi-year', name, status)
コード例 #9
0
ファイル: cli_offshore.py プロジェクト: barbarmarc/reV
def direct(ctx, gen_fpath, offshore_fpath, points, sam_files, log_dir,
           verbose):
    """Main entry point to run offshore wind aggregation"""
    name = ctx.obj['NAME']
    ctx.obj['GEN_FPATH'] = gen_fpath
    ctx.obj['OFFSHORE_FPATH'] = offshore_fpath
    ctx.obj['POINTS'] = points
    ctx.obj['SAM_FILES'] = sam_files
    ctx.obj['OUT_DIR'] = os.path.dirname(gen_fpath)
    ctx.obj['LOG_DIR'] = log_dir
    ctx.obj['VERBOSE'] = verbose

    if ctx.invoked_subcommand is None:
        t0 = time.time()
        init_mult(name,
                  log_dir,
                  modules=[__name__, 'reV.offshore', 'reV.handlers', 'rex'],
                  verbose=verbose,
                  node=True)

        fpath_out = gen_fpath.replace('.h5', '_offshore.h5')

        try:
            Offshore.run(gen_fpath,
                         offshore_fpath,
                         points,
                         sam_files,
                         fpath_out=fpath_out)
        except Exception as e:
            logger.exception('Offshore module failed, received the '
                             'following exception:\n{}'.format(e))
            raise e

        runtime = (time.time() - t0) / 60

        status = {
            'dirout': os.path.dirname(fpath_out),
            'fout': os.path.basename(fpath_out),
            'job_status': 'successful',
            'runtime': runtime,
            'finput': gen_fpath
        }
        Status.make_job_file(os.path.dirname(fpath_out), 'offshore', name,
                             status)
コード例 #10
0
ファイル: temporal_stats_cli.py プロジェクト: NREL/rex
def main(ctx, resource_path, dataset, out_dir, statistics, max_workers,
         res_cls, hsds, chunks_per_worker, lat_lon_only, log_file, verbose):
    """
    TemporalStats Command Line Interface
    """
    ctx.ensure_object(dict)
    if not os.path.exists(out_dir):
        os.makedirs(out_dir)

    name = os.path.splitext(os.path.basename(resource_path))[0]
    out_fpath = '{}_{}.csv'.format(name, dataset)
    out_fpath = os.path.join(out_dir, out_fpath)

    if log_file is not None:
        name = os.path.basename(log_file).split('.')[0]
        log_dir = os.path.dirname(log_file)
    else:
        name = None
        log_dir = None

    init_mult(name, log_dir, [__name__, 'rex'], verbose=verbose)
    logger.info('Computing stats data from {}'.format(resource_path))
    logger.info('Outputs to be stored in: {}'.format(out_dir))

    res_stats = TemporalStats(resource_path, statistics=statistics,
                              res_cls=RES_CLS[res_cls], hsds=hsds)

    if ctx.invoked_subcommand is None:
        all_stats = res_stats.all_stats(
            dataset, max_workers=max_workers,
            chunks_per_worker=chunks_per_worker,
            lat_lon_only=lat_lon_only)

        res_stats.save_stats(all_stats, out_fpath)
    else:
        ctx.obj['STATS'] = res_stats
        ctx.obj['DSET'] = dataset
        ctx.obj['MAX_WORKERS'] = max_workers
        ctx.obj['CPW'] = chunks_per_worker
        ctx.obj['LL'] = lat_lon_only
        ctx.obj['OUT_PATH'] = out_fpath
コード例 #11
0
ファイル: cli_qa_qc.py プロジェクト: mmdione/reV
def from_config(ctx, config_file, verbose):
    """Run reV QA/QC from a config file."""
    name = ctx.obj['NAME']

    # Instantiate the config object
    config = QaQcConfig(config_file)

    # take name from config if not default
    if config.name.lower() != 'rev':
        name = config.name
        ctx.obj['NAME'] = name

    # Enforce verbosity if logging level is specified in the config
    verbose = config.log_level == logging.DEBUG

    # initialize loggers
    init_mult(name, config.logdir, modules=['reV', 'rex'], verbose=verbose)

    # Initial log statements
    logger.info('Running reV supply curve from config '
                'file: "{}"'.format(config_file))
    logger.info('Target output directory: "{}"'.format(config.dirout))
    logger.info('Target logging directory: "{}"'.format(config.logdir))
    logger.debug('The full configuration input is as follows:\n{}'.format(
        pprint.pformat(config, indent=4)))

    if config.execution_control.option == 'local':
        status = Status.retrieve_job_status(config.dirout, 'qa-qc', name)
        if status != 'successful':
            Status.add_job(config.dirout,
                           'qa-qc',
                           name,
                           replace=True,
                           job_attrs={
                               'hardware': 'local',
                               'dirout': config.dirout
                           })

            terminal = False
            for i, module in enumerate(config.module_names):
                if i == len(config.module_names) - 1:
                    terminal = True

                module_config = config.get_module_inputs(module)
                fpath = module_config.fpath
                if module.lower() == 'exclusions':
                    log_file = os.path.join(
                        config.logdir,
                        os.path.basename(fpath).replace('.h5', '.log'))
                    afk = module_config.area_filter_kernel
                    ctx.invoke(exclusions,
                               excl_fpath=fpath,
                               out_dir=config.dirout,
                               sub_dir=module_config.sub_dir,
                               excl_dict=module_config.excl_dict,
                               area_filter_kernel=afk,
                               min_area=module_config.min_area,
                               plot_type=module_config.plot_type,
                               cmap=module_config.cmap,
                               plot_step=module_config.plot_step,
                               log_file=log_file,
                               verbose=verbose,
                               terminal=terminal)

                elif fpath.endswith('.h5'):
                    log_file = os.path.join(
                        config.logdir,
                        os.path.basename(fpath).replace('.h5', '.log'))
                    ctx.invoke(reV_h5,
                               h5_file=fpath,
                               out_dir=config.dirout,
                               sub_dir=module_config.sub_dir,
                               dsets=module_config.dsets,
                               group=module_config.group,
                               process_size=module_config.process_size,
                               max_workers=module_config.max_workers,
                               plot_type=module_config.plot_type,
                               cmap=module_config.cmap,
                               log_file=log_file,
                               verbose=verbose,
                               terminal=terminal)

                elif fpath.endswith('.csv'):
                    log_file = os.path.join(
                        config.logdir,
                        os.path.basename(fpath).replace('.csv', '.log'))
                    ctx.invoke(supply_curve,
                               sc_table=fpath,
                               out_dir=config.dirout,
                               sub_dir=module_config.sub_dir,
                               columns=module_config.columns,
                               plot_type=module_config.plot_type,
                               cmap=module_config.cmap,
                               lcoe=module_config.lcoe,
                               log_file=log_file,
                               verbose=verbose,
                               terminal=terminal)
                else:
                    msg = (
                        "Cannot run QA/QC for {}: 'fpath' must be a '*.h5' "
                        "or '*.csv' reV output file, but {} was given!".format(
                            module, fpath))
                    logger.error(msg)
                    raise ValueError(msg)

    elif config.execution_control.option in ('eagle', 'slurm'):
        launch_slurm(config, verbose)
コード例 #12
0
ファイル: cli_collect.py プロジェクト: barbarmarc/reV
def from_config(ctx, config_file, verbose):
    """Run reV gen from a config file."""
    name = ctx.obj['NAME']

    # Instantiate the config object
    config = CollectionConfig(config_file)

    # take name from config if not default
    if config.name.lower() != 'rev':
        name = config.name
        ctx.obj['NAME'] = name

    # Enforce verbosity if logging level is specified in the config
    if config.log_level == logging.DEBUG:
        verbose = True

    # make output directory if does not exist
    if not os.path.exists(config.dirout):
        os.makedirs(config.dirout)

    # initialize loggers.
    init_mult(name,
              config.logdir,
              modules=[__name__, 'reV.handlers.collection'],
              verbose=verbose)

    # Initial log statements
    logger.info(
        'Running reV collection from config file: "{}"'.format(config_file))
    logger.info('Target output directory: "{}"'.format(config.dirout))
    logger.info('Target logging directory: "{}"'.format(config.logdir))
    logger.info('Target collection directory: "{}"'.format(config.coldir))
    logger.info('The following project points were specified: "{}"'.format(
        config.get('project_points', None)))
    logger.debug('The full configuration input is as follows:\n{}'.format(
        pprint.pformat(config, indent=4)))

    # set config objects to be passed through invoke to direct methods
    ctx.obj['H5_DIR'] = config.coldir
    ctx.obj['LOG_DIR'] = config.logdir
    ctx.obj['DSETS'] = config.dsets
    ctx.obj['PROJECT_POINTS'] = config.project_points
    ctx.obj['PURGE_CHUNKS'] = config.purge_chunks
    ctx.obj['VERBOSE'] = verbose

    for file_prefix in config.file_prefixes:
        ctx.obj['NAME'] = name + '_{}'.format(file_prefix)
        ctx.obj['H5_FILE'] = os.path.join(config.dirout, file_prefix + '.h5')
        ctx.obj['FILE_PREFIX'] = file_prefix

        if config.execution_control.option == 'local':
            status = Status.retrieve_job_status(config.dirout, 'collect',
                                                ctx.obj['NAME'])
            if status != 'successful':
                Status.add_job(config.dirout,
                               'collect',
                               ctx.obj['NAME'],
                               replace=True,
                               job_attrs={
                                   'hardware': 'local',
                                   'fout': file_prefix + '.h5',
                                   'dirout': config.dirout
                               })
                ctx.invoke(collect)

        elif config.execution_control.option in ('eagle', 'slurm'):
            ctx.invoke(collect_slurm,
                       alloc=config.execution_control.allocation,
                       memory=config.execution_control.memory,
                       walltime=config.execution_control.walltime,
                       feature=config.execution_control.feature,
                       conda_env=config.execution_control.conda_env,
                       module=config.execution_control.module,
                       stdout_path=os.path.join(config.logdir, 'stdout'),
                       verbose=verbose)
コード例 #13
0
ファイル: cli_collect.py プロジェクト: barbarmarc/reV
def collect(ctx, verbose):
    """Run collection on local worker."""

    name = ctx.obj['NAME']
    h5_file = ctx.obj['H5_FILE']
    h5_dir = ctx.obj['H5_DIR']
    project_points = ctx.obj['PROJECT_POINTS']
    dsets = ctx.obj['DSETS']
    file_prefix = ctx.obj['FILE_PREFIX']
    log_dir = ctx.obj['LOG_DIR']
    purge_chunks = ctx.obj['PURGE_CHUNKS']
    verbose = any([verbose, ctx.obj['VERBOSE']])

    # initialize loggers for multiple modules
    init_mult(name,
              log_dir,
              modules=[__name__, 'reV.handlers.collection'],
              verbose=verbose,
              node=True)

    for key, val in ctx.obj.items():
        logger.debug('ctx var passed to collection method: "{}" : "{}" '
                     'with type "{}"'.format(key, val, type(val)))

    logger.info('Collection is being run for "{}" with job name "{}" '
                'and collection dir: {}. Target output path is: {}'.format(
                    dsets, name, h5_dir, h5_file))
    t0 = time.time()

    Collector.collect(h5_file,
                      h5_dir,
                      project_points,
                      dsets[0],
                      file_prefix=file_prefix)

    if len(dsets) > 1:
        for dset_name in dsets[1:]:
            Collector.add_dataset(h5_file,
                                  h5_dir,
                                  dset_name,
                                  file_prefix=file_prefix)

    if purge_chunks:
        Collector.purge_chunks(h5_file,
                               h5_dir,
                               project_points,
                               file_prefix=file_prefix)
    else:
        Collector.move_chunks(h5_file,
                              h5_dir,
                              project_points,
                              file_prefix=file_prefix)

    runtime = (time.time() - t0) / 60
    logger.info('Collection completed in: {:.2f} min.'.format(runtime))

    # add job to reV status file.
    status = {
        'dirout': os.path.dirname(h5_file),
        'fout': os.path.basename(h5_file),
        'job_status': 'successful',
        'runtime': runtime,
        'finput': os.path.join(h5_dir, '{}*.h5'.format(file_prefix))
    }
    Status.make_job_file(os.path.dirname(h5_file), 'collect', name, status)
コード例 #14
0
ファイル: cli_gen.py プロジェクト: mmdione/reV
def local(ctx, max_workers, timeout, points_range, verbose):
    """Run generation on local worker(s)."""

    name = ctx.obj['NAME']
    tech = ctx.obj['TECH']
    points = ctx.obj['POINTS']
    sam_files = ctx.obj['SAM_FILES']
    res_file = ctx.obj['RES_FILE']
    sites_per_worker = ctx.obj['SITES_PER_WORKER']
    fout = ctx.obj['FOUT']
    dirout = ctx.obj['DIROUT']
    logdir = ctx.obj['LOGDIR']
    output_request = ctx.obj['OUTPUT_REQUEST']
    site_data = ctx.obj['SITE_DATA']
    mem_util_lim = ctx.obj['MEM_UTIL_LIM']
    curtailment = ctx.obj['CURTAILMENT']
    verbose = any([verbose, ctx.obj['VERBOSE']])

    # initialize loggers for multiple modules
    init_mult(name, logdir, modules=['reV', 'rex'], verbose=verbose, node=True)

    for key, val in ctx.obj.items():
        logger.debug('ctx var passed to local method: "{}" : "{}" with type '
                     '"{}"'.format(key, val, type(val)))

    logger.info('Gen local is being run with with job name "{}" and resource '
                'file: {}. Target output path is: {}'.format(
                    name, res_file, os.path.join(dirout, fout)))
    t0 = time.time()

    points = _parse_points(ctx)

    # Execute the Generation module with smart data flushing.
    Gen.reV_run(tech=tech,
                points=points,
                sam_files=sam_files,
                res_file=res_file,
                site_data=site_data,
                output_request=output_request,
                curtailment=curtailment,
                max_workers=max_workers,
                sites_per_worker=sites_per_worker,
                points_range=points_range,
                fout=fout,
                dirout=dirout,
                mem_util_lim=mem_util_lim,
                timeout=timeout)

    tmp_str = ' with points range {}'.format(points_range)
    runtime = (time.time() - t0) / 60
    logger.info('Gen compute complete for project points "{0}"{1}. '
                'Time elapsed: {2:.2f} min. Target output dir: {3}'.format(
                    points, tmp_str if points_range else '', runtime, dirout))

    # add job to reV status file.
    status = {
        'dirout': dirout,
        'fout': fout,
        'job_status': 'successful',
        'runtime': runtime,
        'finput': res_file
    }
    Status.make_job_file(dirout, 'generation', name, status)
コード例 #15
0
ファイル: cli_econ.py プロジェクト: barbarmarc/reV
def slurm(ctx, nodes, alloc, memory, walltime, feature, module, conda_env,
          stdout_path, verbose):
    """Run econ on HPC via SLURM job submission."""

    name = ctx.obj['NAME']
    points = ctx.obj['POINTS']
    sam_files = ctx.obj['SAM_FILES']
    cf_file = ctx.obj['CF_FILE']
    cf_year = ctx.obj['CF_YEAR']
    site_data = ctx.obj['SITE_DATA']
    sites_per_worker = ctx.obj['SITES_PER_WORKER']
    max_workers = ctx.obj['MAX_WORKERS']
    timeout = ctx.obj['TIMEOUT']
    fout = ctx.obj['FOUT']
    dirout = ctx.obj['DIROUT']
    logdir = ctx.obj['LOGDIR']
    output_request = ctx.obj['OUTPUT_REQUEST']
    append = ctx.obj['APPEND']
    verbose = any([verbose, ctx.obj['VERBOSE']])

    # initialize an info logger on the year level
    init_mult(name,
              logdir,
              modules=[
                  __name__, 'reV.econ.econ', 'reV.config', 'reV.utilities',
                  'reV.SAM', 'rex.utilities'
              ],
              verbose=False)

    if append:
        pc = [None]
    else:
        pc = get_node_pc(points, sam_files, nodes)

    for i, split in enumerate(pc):
        node_name, fout_node = get_node_name_fout(name,
                                                  fout,
                                                  i,
                                                  pc,
                                                  hpc='slurm')
        node_name = node_name.replace('gen', 'econ')

        points_range = split.split_range if split is not None else None
        cmd = get_node_cmd(node_name,
                           sam_files,
                           cf_file,
                           cf_year=cf_year,
                           site_data=site_data,
                           points=points,
                           points_range=points_range,
                           sites_per_worker=sites_per_worker,
                           max_workers=max_workers,
                           timeout=timeout,
                           fout=fout_node,
                           dirout=dirout,
                           logdir=logdir,
                           output_request=output_request,
                           append=append,
                           verbose=verbose)

        status = Status.retrieve_job_status(dirout, 'econ', node_name)

        if status == 'successful':
            msg = ('Job "{}" is successful in status json found in "{}", '
                   'not re-running.'.format(node_name, dirout))
        else:
            logger.info('Running reV econ on SLURM with node name "{}" for '
                        '{} (points range: {}).'.format(
                            node_name, pc, points_range))
            # create and submit the SLURM job
            slurm = SLURM(cmd,
                          alloc=alloc,
                          memory=memory,
                          walltime=walltime,
                          feature=feature,
                          name=node_name,
                          stdout_path=stdout_path,
                          conda_env=conda_env,
                          module=module)
            if slurm.id:
                msg = (
                    'Kicked off reV econ job "{}" (SLURM jobid #{}).'.format(
                        node_name, slurm.id))
                # add job to reV status file.
                Status.add_job(dirout,
                               'econ',
                               node_name,
                               replace=True,
                               job_attrs={
                                   'job_id': slurm.id,
                                   'hardware': 'eagle',
                                   'fout': fout_node,
                                   'dirout': dirout
                               })
            else:
                msg = (
                    'Was unable to kick off reV econ job "{}". '
                    'Please see the stdout error messages'.format(node_name))

        click.echo(msg)
        logger.info(msg)
コード例 #16
0
ファイル: cli_rep_profiles.py プロジェクト: clhughes/reV
def from_config(ctx, config_file, verbose):
    """Run reV representative profiles from a config file."""
    name = ctx.obj['NAME']

    # Instantiate the config object
    config = RepProfilesConfig(config_file)

    # take name from config if not default
    if config.name.lower() != 'rev':
        name = config.name
        ctx.obj['NAME'] = name

    # Enforce verbosity if logging level is specified in the config
    if config.log_level == logging.DEBUG:
        verbose = True

    # initialize loggers
    init_mult(
        name,
        config.logdir,
        modules=[__name__, 'reV.config', 'reV.utilities', 'rex.utilities'],
        verbose=verbose)

    # Initial log statements
    logger.info('Running reV representative profiles from config '
                'file: "{}"'.format(config_file))
    logger.info('Target output directory: "{}"'.format(config.dirout))
    logger.info('Target logging directory: "{}"'.format(config.logdir))
    logger.debug('The full configuration input is as follows:\n{}'.format(
        pprint.pformat(config, indent=4)))

    if config.years[0] is not None and '{}' in config.cf_dset:
        fpaths = [config.gen_fpath for _ in config.years]
        names = [name + '_{}'.format(y) for y in config.years]
        dsets = [config.cf_dset.format(y) for y in config.years]

    elif config.years[0] is not None and '{}' in config.gen_fpath:
        fpaths = [config.gen_fpath.format(y) for y in config.years]
        names = [name + '_{}'.format(y) for y in config.years]
        dsets = [config.cf_dset for _ in config.years]

    else:
        fpaths = [config.gen_fpath]
        names = [name]
        dsets = [config.cf_dset]

    for name, gen_fpath, dset in zip(names, fpaths, dsets):

        if config.execution_control.option == 'local':
            status = Status.retrieve_job_status(config.dirout, 'rep-profiles',
                                                name)
            if status != 'successful':
                Status.add_job(config.dirout,
                               'rep-profiles',
                               name,
                               replace=True,
                               job_attrs={
                                   'hardware': 'local',
                                   'fout': '{}.h5'.format(name),
                                   'dirout': config.dirout
                               })
                ctx.invoke(direct,
                           gen_fpath=gen_fpath,
                           rev_summary=config.rev_summary,
                           reg_cols=config.reg_cols,
                           cf_dset=dset,
                           rep_method=config.rep_method,
                           err_method=config.err_method,
                           weight=config.weight,
                           out_dir=config.dirout,
                           log_dir=config.logdir,
                           n_profiles=config.n_profiles,
                           max_workers=config.execution_control.max_workers,
                           aggregate_profiles=config.aggregate_profiles,
                           verbose=verbose)

        elif config.execution_control.option in ('eagle', 'slurm'):
            ctx.obj['NAME'] = name
            ctx.obj['GEN_FPATH'] = gen_fpath
            ctx.obj['REV_SUMMARY'] = config.rev_summary
            ctx.obj['REG_COLS'] = config.reg_cols
            ctx.obj['CF_DSET'] = dset
            ctx.obj['REP_METHOD'] = config.rep_method
            ctx.obj['ERR_METHOD'] = config.err_method
            ctx.obj['WEIGHT'] = config.weight
            ctx.obj['N_PROFILES'] = config.n_profiles
            ctx.obj['OUT_DIR'] = config.dirout
            ctx.obj['LOG_DIR'] = config.logdir
            ctx.obj['MAX_WORKERS'] = config.execution_control.max_workers
            ctx.obj['AGGREGATE_PROFILES'] = config.aggregate_profiles
            ctx.obj['VERBOSE'] = verbose

            ctx.invoke(slurm,
                       alloc=config.execution_control.allocation,
                       memory=config.execution_control.memory,
                       walltime=config.execution_control.walltime,
                       feature=config.execution_control.feature,
                       conda_env=config.execution_control.conda_env,
                       module=config.execution_control.module)
コード例 #17
0
ファイル: cli_econ.py プロジェクト: barbarmarc/reV
def local(ctx, max_workers, timeout, points_range, verbose):
    """Run econ on local worker(s)."""

    name = ctx.obj['NAME']
    points = ctx.obj['POINTS']
    sam_files = ctx.obj['SAM_FILES']
    cf_file = ctx.obj['CF_FILE']
    cf_year = ctx.obj['CF_YEAR']
    site_data = ctx.obj['SITE_DATA']
    sites_per_worker = ctx.obj['SITES_PER_WORKER']
    fout = ctx.obj['FOUT']
    dirout = ctx.obj['DIROUT']
    logdir = ctx.obj['LOGDIR']
    output_request = ctx.obj['OUTPUT_REQUEST']
    append = ctx.obj['APPEND']
    verbose = any([verbose, ctx.obj['VERBOSE']])

    if append:
        fout = os.path.basename(cf_file)
        dirout = os.path.dirname(cf_file)

    # initialize loggers for multiple modules
    log_modules = [
        __name__, 'reV.econ.econ', 'reV.generation', 'reV.config',
        'reV.utilities', 'reV.SAM', 'reV.handlers', 'rex.utilities'
    ]
    init_mult(name, logdir, modules=log_modules, verbose=verbose, node=True)

    for key, val in ctx.obj.items():
        logger.debug('ctx var passed to local method: "{}" : "{}" with type '
                     '"{}"'.format(key, val, type(val)))

    logger.info(
        'Econ local is being run with with job name "{}" and '
        'generation results file: {}. Target output path is: {}'.format(
            name, cf_file, os.path.join(dirout, fout)))
    t0 = time.time()

    # Execute the Generation module with smart data flushing.
    Econ.reV_run(points=points,
                 sam_files=sam_files,
                 cf_file=cf_file,
                 cf_year=cf_year,
                 site_data=site_data,
                 output_request=output_request,
                 max_workers=max_workers,
                 timeout=timeout,
                 sites_per_worker=sites_per_worker,
                 points_range=points_range,
                 fout=fout,
                 dirout=dirout,
                 append=append)

    tmp_str = ' with points range {}'.format(points_range)
    runtime = (time.time() - t0) / 60
    logger.info('Econ compute complete for project points "{0}"{1}. '
                'Time elapsed: {2:.2f} min. Target output dir: {3}'.format(
                    points, tmp_str if points_range else '', runtime, dirout))

    # add job to reV status file.
    status = {
        'dirout': dirout,
        'fout': fout,
        'job_status': 'successful',
        'runtime': runtime,
        'finput': cf_file
    }
    Status.make_job_file(dirout, 'econ', name, status)
コード例 #18
0
ファイル: cli_offshore.py プロジェクト: barbarmarc/reV
def from_config(ctx, config_file, verbose):
    """Run reV offshore aggregation from a config file."""
    # Instantiate the config object
    config = OffshoreConfig(config_file)
    name = ctx.obj['NAME']

    # take name from config if not default
    if config.name.lower() != 'rev':
        name = config.name
        ctx.obj['NAME'] = name

    # Enforce verbosity if logging level is specified in the config
    if config.log_level == logging.DEBUG:
        verbose = True

    # initialize loggers
    init_mult(
        name,
        config.logdir,
        modules=[__name__, 'reV.config', 'reV.utilities', 'rex.utilities'],
        verbose=verbose)

    # Initial log statements
    logger.info('Running reV offshore aggregation from config '
                'file: "{}"'.format(config_file))
    logger.info('Target output directory: "{}"'.format(config.dirout))
    logger.info('Target logging directory: "{}"'.format(config.logdir))
    logger.debug('The full configuration input is as follows:\n{}'.format(
        pprint.pformat(config, indent=4)))

    for i, gen_fpath in enumerate(config.parse_gen_fpaths()):
        job_name = '{}_{}'.format(name, str(i).zfill(2))

        if config.execution_control.option == 'local':
            status = Status.retrieve_job_status(config.dirout, 'offshore',
                                                job_name)
            if status != 'successful':
                Status.add_job(config.dirout,
                               'offshore',
                               job_name,
                               replace=True,
                               job_attrs={
                                   'hardware': 'local',
                                   'fout': '{}_offshore.h5'.format(job_name),
                                   'dirout': config.dirout,
                                   'finput': gen_fpath
                               })
                ctx.invoke(direct,
                           gen_fpath=gen_fpath,
                           offshore_fpath=config.offshore_fpath,
                           points=config.project_points,
                           sam_files=config.sam_files,
                           logdir=config.logdir,
                           verbose=verbose)

        elif config.execution_control.option in ('eagle', 'slurm'):

            ctx.obj['NAME'] = job_name
            ctx.obj['GEN_FPATH'] = gen_fpath
            ctx.obj['OFFSHORE_FPATH'] = config.offshore_fpath
            ctx.obj['PROJECT_POINTS'] = config.project_points
            ctx.obj['SAM_FILES'] = config.sam_files
            ctx.obj['OUT_DIR'] = config.dirout
            ctx.obj['LOG_DIR'] = config.logdir
            ctx.obj['VERBOSE'] = verbose

            ctx.invoke(slurm,
                       alloc=config.execution_control.allocation,
                       memory=config.execution_control.memory,
                       walltime=config.execution_control.walltime,
                       feature=config.execution_control.feature,
                       module=config.execution_control.module,
                       conda_env=config.execution_control.conda_env)
コード例 #19
0
ファイル: cli_gen.py プロジェクト: barbarmarc/reV
def slurm(ctx, nodes, alloc, memory, walltime, feature, conda_env, module,
          stdout_path, verbose):
    """Run generation on HPC via SLURM job submission."""

    name = ctx.obj['NAME']
    tech = ctx.obj['TECH']
    points = ctx.obj['POINTS']
    sam_files = ctx.obj['SAM_FILES']
    res_file = ctx.obj['RES_FILE']
    sites_per_worker = ctx.obj['SITES_PER_WORKER']
    fout = ctx.obj['FOUT']
    dirout = ctx.obj['DIROUT']
    logdir = ctx.obj['LOGDIR']
    output_request = ctx.obj['OUTPUT_REQUEST']
    max_workers = ctx.obj['MAX_WORKERS']
    mem_util_lim = ctx.obj['MEM_UTIL_LIM']
    timeout = ctx.obj['TIMEOUT']
    curtailment = ctx.obj['CURTAILMENT']
    downscale = ctx.obj['DOWNSCALE']
    verbose = any([verbose, ctx.obj['VERBOSE']])

    # initialize an info logger on the year level
    init_mult(name, logdir, modules=[__name__, 'reV.generation.generation',
                                     'reV.config', 'reV.utilities', 'reV.SAM'],
              verbose=False)

    pc = get_node_pc(points, sam_files, tech, res_file, nodes)

    for i, split in enumerate(pc):
        node_name, fout_node = get_node_name_fout(name, fout, i, pc,
                                                  hpc='slurm')

        cmd = get_node_cmd(node_name, tech, sam_files, res_file,
                           points=points, points_range=split.split_range,
                           sites_per_worker=sites_per_worker,
                           max_workers=max_workers, fout=fout_node,
                           dirout=dirout, logdir=logdir,
                           output_request=output_request,
                           mem_util_lim=mem_util_lim, timeout=timeout,
                           curtailment=curtailment,
                           downscale=downscale, verbose=verbose)

        status = Status.retrieve_job_status(dirout, 'generation', node_name)
        if status == 'successful':
            msg = ('Job "{}" is successful in status json found in "{}", '
                   'not re-running.'
                   .format(node_name, dirout))
        else:
            logger.info('Running reV generation on SLURM with node name "{}" '
                        'for {} (points range: {}).'
                        .format(node_name, pc, split.split_range))
            # create and submit the SLURM job
            slurm = SLURM(cmd, alloc=alloc, memory=memory, walltime=walltime,
                          feature=feature, name=node_name,
                          stdout_path=stdout_path, conda_env=conda_env,
                          module=module)
            if slurm.id:
                msg = ('Kicked off reV generation job "{}" (SLURM jobid #{}).'
                       .format(node_name, slurm.id))
                # add job to reV status file.
                Status.add_job(
                    dirout, 'generation', node_name, replace=True,
                    job_attrs={'job_id': slurm.id, 'hardware': 'eagle',
                               'fout': fout_node, 'dirout': dirout})
            else:
                msg = ('Was unable to kick off reV generation job "{}". '
                       'Please see the stdout error messages'
                       .format(node_name))

        click.echo(msg)
        logger.info(msg)
コード例 #20
0
ファイル: cli_supply_curve.py プロジェクト: clhughes/reV
def direct(ctx, sc_points, trans_table, fixed_charge_rate, sc_features,
           transmission_costs, sort_on, offshore_trans_table, wind_dirs,
           n_dirs, downwind, offshore_compete, max_workers, out_dir, log_dir,
           simple, line_limited, verbose):
    """reV Supply Curve CLI."""
    name = ctx.obj['NAME']
    ctx.obj['SC_POINTS'] = sc_points
    ctx.obj['TRANS_TABLE'] = trans_table
    ctx.obj['FIXED_CHARGE_RATE'] = fixed_charge_rate
    ctx.obj['SC_FEATURES'] = sc_features
    ctx.obj['TRANSMISSION_COSTS'] = transmission_costs
    ctx.obj['SORT_ON'] = sort_on
    ctx.obj['OFFSHORE_TRANS_TABLE'] = offshore_trans_table
    ctx.obj['WIND_DIRS'] = wind_dirs
    ctx.obj['N_DIRS'] = n_dirs
    ctx.obj['DOWNWIND'] = downwind
    ctx.obj['MAX_WORKERS'] = max_workers
    ctx.obj['OFFSHORE_COMPETE'] = offshore_compete
    ctx.obj['OUT_DIR'] = out_dir
    ctx.obj['LOG_DIR'] = log_dir
    ctx.obj['SIMPLE'] = simple
    ctx.obj['LINE_LIMITED'] = line_limited
    ctx.obj['VERBOSE'] = verbose

    if ctx.invoked_subcommand is None:
        t0 = time.time()
        init_mult(
            name,
            log_dir,
            modules=[__name__, 'reV.supply_curve', 'reV.handlers', 'rex'],
            verbose=verbose)

        if isinstance(transmission_costs, str):
            transmission_costs = dict_str_load(transmission_costs)

        offshore_table = offshore_trans_table
        try:
            if simple:
                out = SupplyCurve.simple(sc_points,
                                         trans_table,
                                         fixed_charge_rate,
                                         sc_features=sc_features,
                                         transmission_costs=transmission_costs,
                                         sort_on=sort_on,
                                         wind_dirs=wind_dirs,
                                         n_dirs=n_dirs,
                                         downwind=downwind,
                                         max_workers=max_workers,
                                         offshore_trans_table=offshore_table,
                                         offshore_compete=offshore_compete)
            else:
                out = SupplyCurve.full(sc_points,
                                       trans_table,
                                       fixed_charge_rate,
                                       sc_features=sc_features,
                                       transmission_costs=transmission_costs,
                                       line_limited=line_limited,
                                       sort_on=sort_on,
                                       wind_dirs=wind_dirs,
                                       n_dirs=n_dirs,
                                       downwind=downwind,
                                       max_workers=max_workers,
                                       offshore_trans_table=offshore_table,
                                       offshore_compete=offshore_compete)
        except Exception as e:
            logger.exception('Supply curve compute failed. Received the '
                             'following error:\n{}'.format(e))
            raise e

        fn_out = '{}.csv'.format(name)
        fpath_out = os.path.join(out_dir, fn_out)
        out.to_csv(fpath_out, index=False)

        runtime = (time.time() - t0) / 60
        logger.info('Supply curve complete. Time elapsed: {:.2f} min. '
                    'Target output dir: {}'.format(runtime, out_dir))

        finput = [sc_points, trans_table]
        if sc_features is not None:
            finput.append(sc_features)

        if transmission_costs is not None:
            finput.append(transmission_costs)

        # add job to reV status file.
        status = {
            'dirout': out_dir,
            'fout': fn_out,
            'job_status': 'successful',
            'runtime': runtime,
            'finput': finput
        }
        Status.make_job_file(out_dir, 'supply-curve', name, status)
コード例 #21
0
def direct(ctx, excl_fpath, gen_fpath, econ_fpath, res_fpath, tm_dset,
           excl_dict, check_excl_layers, res_class_dset, res_class_bins,
           cf_dset, lcoe_dset, h5_dsets, data_layers, resolution, excl_area,
           power_density, area_filter_kernel, min_area, friction_fpath,
           friction_dset, out_dir, log_dir, verbose):
    """reV Supply Curve Aggregation Summary CLI."""
    name = ctx.obj['NAME']
    ctx.obj['EXCL_FPATH'] = excl_fpath
    ctx.obj['GEN_FPATH'] = gen_fpath
    ctx.obj['ECON_FPATH'] = econ_fpath
    ctx.obj['RES_FPATH'] = res_fpath
    ctx.obj['TM_DSET'] = tm_dset
    ctx.obj['EXCL_DICT'] = excl_dict
    ctx.obj['CHECK_LAYERS'] = check_excl_layers
    ctx.obj['RES_CLASS_DSET'] = res_class_dset
    ctx.obj['RES_CLASS_BINS'] = res_class_bins
    ctx.obj['CF_DSET'] = cf_dset
    ctx.obj['LCOE_DSET'] = lcoe_dset
    ctx.obj['H5_DSETS'] = h5_dsets
    ctx.obj['DATA_LAYERS'] = data_layers
    ctx.obj['RESOLUTION'] = resolution
    ctx.obj['EXCL_AREA'] = excl_area
    ctx.obj['POWER_DENSITY'] = power_density
    ctx.obj['AREA_FILTER_KERNEL'] = area_filter_kernel
    ctx.obj['MIN_AREA'] = min_area
    ctx.obj['FRICTION_FPATH'] = friction_fpath
    ctx.obj['FRICTION_DSET'] = friction_dset
    ctx.obj['OUT_DIR'] = out_dir
    ctx.obj['LOG_DIR'] = log_dir
    ctx.obj['VERBOSE'] = verbose

    if ctx.invoked_subcommand is None:
        t0 = time.time()
        init_mult(name,
                  log_dir,
                  modules=[__name__, 'reV.supply_curve'],
                  verbose=verbose)

        with h5py.File(excl_fpath, mode='r') as f:
            dsets = list(f)
        if tm_dset not in dsets:
            try:
                TechMapping.run(excl_fpath, res_fpath, tm_dset)
            except Exception as e:
                logger.exception('TechMapping process failed. Received the '
                                 'following error:\n{}'.format(e))
                raise e

        if isinstance(excl_dict, str):
            excl_dict = dict_str_load(excl_dict)

        if isinstance(data_layers, str):
            data_layers = dict_str_load(data_layers)

        try:
            summary = SupplyCurveAggregation.summary(
                excl_fpath,
                gen_fpath,
                tm_dset,
                econ_fpath=econ_fpath,
                excl_dict=excl_dict,
                res_class_dset=res_class_dset,
                res_class_bins=res_class_bins,
                cf_dset=cf_dset,
                lcoe_dset=lcoe_dset,
                h5_dsets=h5_dsets,
                data_layers=data_layers,
                resolution=resolution,
                excl_area=excl_area,
                power_density=power_density,
                area_filter_kernel=area_filter_kernel,
                min_area=min_area,
                friction_fpath=friction_fpath,
                friction_dset=friction_dset,
                check_excl_layers=check_excl_layers)

        except Exception as e:
            logger.exception('Supply curve Aggregation failed. Received the '
                             'following error:\n{}'.format(e))
            raise e

        fn_out = '{}.csv'.format(name)
        fpath_out = os.path.join(out_dir, fn_out)
        summary.to_csv(fpath_out)

        runtime = (time.time() - t0) / 60
        logger.info('Supply curve aggregation complete. '
                    'Time elapsed: {:.2f} min. Target output dir: {}'.format(
                        runtime, out_dir))

        finput = [excl_fpath, gen_fpath]
        if res_fpath is not None:
            finput.append(res_fpath)

        # add job to reV status file.
        status = {
            'dirout': out_dir,
            'fout': fn_out,
            'job_status': 'successful',
            'runtime': runtime,
            'finput': finput,
            'excl_fpath': excl_fpath,
            'excl_dict': excl_dict,
            'area_filter_kernel': area_filter_kernel,
            'min_area': min_area
        }
        Status.make_job_file(out_dir, 'supply-curve-aggregation', name, status)
コード例 #22
0
def from_config(ctx, config_file, verbose):
    """Run reV SC aggregation from a config file."""
    name = ctx.obj['NAME']

    # Instantiate the config object
    config = SupplyCurveAggregationConfig(config_file)

    # take name from config if not default
    if config.name.lower() != 'rev':
        name = config.name
        ctx.obj['NAME'] = name

    # Enforce verbosity if logging level is specified in the config
    if config.log_level == logging.DEBUG:
        verbose = True

    # initialize loggers
    init_mult(
        name,
        config.logdir,
        modules=[__name__, 'reV.config', 'reV.utilities', 'rex.utilities'],
        verbose=verbose)

    # Initial log statements
    logger.info('Running reV supply curve aggregation from config '
                'file: "{}"'.format(config_file))
    logger.info('Target output directory: "{}"'.format(config.dirout))
    logger.info('Target logging directory: "{}"'.format(config.logdir))
    logger.debug('The full configuration input is as follows:\n{}'.format(
        pprint.pformat(config, indent=4)))

    if config.execution_control.option == 'local':
        status = Status.retrieve_job_status(config.dirout,
                                            'supply-curve-aggregation', name)
        if status != 'successful':
            Status.add_job(config.dirout,
                           'supply-curve-aggregation',
                           name,
                           replace=True,
                           job_attrs={
                               'hardware': 'local',
                               'fout': '{}.csv'.format(name),
                               'dirout': config.dirout
                           })
            ctx.invoke(direct,
                       excl_fpath=config.excl_fpath,
                       gen_fpath=config.gen_fpath,
                       econ_fpath=config.econ_fpath,
                       res_fpath=config.res_fpath,
                       tm_dset=config.tm_dset,
                       excl_dict=config.excl_dict,
                       check_excl_layers=config.check_excl_layers,
                       res_class_dset=config.res_class_dset,
                       res_class_bins=config.res_class_bins,
                       cf_dset=config.cf_dset,
                       lcoe_dset=config.lcoe_dset,
                       h5_dsets=config.h5_dsets,
                       data_layers=config.data_layers,
                       resolution=config.resolution,
                       excl_area=config.excl_area,
                       power_density=config.power_density,
                       area_filter_kernel=config.area_filter_kernel,
                       min_area=config.min_area,
                       friction_fpath=config.friction_fpath,
                       friction_dset=config.friction_dset,
                       out_dir=config.dirout,
                       log_dir=config.logdir,
                       verbose=verbose)

    elif config.execution_control.option in ('eagle', 'slurm'):

        ctx.obj['NAME'] = name
        ctx.obj['EXCL_FPATH'] = config.excl_fpath
        ctx.obj['GEN_FPATH'] = config.gen_fpath
        ctx.obj['ECON_FPATH'] = config.econ_fpath
        ctx.obj['RES_FPATH'] = config.res_fpath
        ctx.obj['TM_DSET'] = config.tm_dset
        ctx.obj['EXCL_DICT'] = config.excl_dict
        ctx.obj['CHECK_LAYERS'] = config.check_excl_layers
        ctx.obj['RES_CLASS_DSET'] = config.res_class_dset
        ctx.obj['RES_CLASS_BINS'] = config.res_class_bins
        ctx.obj['CF_DSET'] = config.cf_dset
        ctx.obj['LCOE_DSET'] = config.lcoe_dset
        ctx.obj['H5_DSETS'] = config.h5_dsets
        ctx.obj['DATA_LAYERS'] = config.data_layers
        ctx.obj['RESOLUTION'] = config.resolution
        ctx.obj['EXCL_AREA'] = config.excl_area
        ctx.obj['POWER_DENSITY'] = config.power_density
        ctx.obj['AREA_FILTER_KERNEL'] = config.area_filter_kernel
        ctx.obj['MIN_AREA'] = config.min_area
        ctx.obj['FRICTION_FPATH'] = config.friction_fpath
        ctx.obj['FRICTION_DSET'] = config.friction_dset
        ctx.obj['OUT_DIR'] = config.dirout
        ctx.obj['LOG_DIR'] = config.logdir
        ctx.obj['VERBOSE'] = verbose

        ctx.invoke(slurm,
                   alloc=config.execution_control.allocation,
                   memory=config.execution_control.memory,
                   feature=config.execution_control.feature,
                   walltime=config.execution_control.walltime,
                   conda_env=config.execution_control.conda_env,
                   module=config.execution_control.module)
コード例 #23
0
ファイル: cli_supply_curve.py プロジェクト: clhughes/reV
def from_config(ctx, config_file, verbose):
    """Run reV supply curve compute from a config file."""
    name = ctx.obj['NAME']

    # Instantiate the config object
    config = SupplyCurveConfig(config_file)

    # take name from config if not default
    if config.name.lower() != 'rev':
        name = config.name
        ctx.obj['NAME'] = name

    # Enforce verbosity if logging level is specified in the config
    if config.log_level == logging.DEBUG:
        verbose = True

    # initialize loggers
    init_mult(
        name,
        config.logdir,
        modules=[__name__, 'reV.config', 'reV.utilities', 'rex.utilities'],
        verbose=verbose)

    # Initial log statements
    logger.info('Running reV supply curve from config '
                'file: "{}"'.format(config_file))
    logger.info('Target output directory: "{}"'.format(config.dirout))
    logger.info('Target logging directory: "{}"'.format(config.logdir))
    logger.debug('The full configuration input is as follows:\n{}'.format(
        pprint.pformat(config, indent=4)))

    if config.execution_control.option == 'local':
        status = Status.retrieve_job_status(config.dirout, 'supply-curve',
                                            name)
        if status != 'successful':
            Status.add_job(config.dirout,
                           'supply-curve',
                           name,
                           replace=True,
                           job_attrs={
                               'hardware': 'local',
                               'fout': '{}.csv'.format(name),
                               'dirout': config.dirout
                           })
            ctx.invoke(direct,
                       sc_points=config.sc_points,
                       trans_table=config.trans_table,
                       fixed_charge_rate=config.fixed_charge_rate,
                       sc_features=config.sc_features,
                       transmission_costs=config.transmission_costs,
                       sort_on=config.sort_on,
                       wind_dirs=config.wind_dirs,
                       n_dirs=config.n_dirs,
                       downwind=config.downwind,
                       max_workers=config.max_workers,
                       out_dir=config.dirout,
                       log_dir=config.logdir,
                       simple=config.simple,
                       line_limited=config.line_limited,
                       verbose=verbose)

    elif config.execution_control.option in ('eagle', 'slurm'):

        ctx.obj['NAME'] = name
        ctx.obj['SC_POINTS'] = config.sc_points
        ctx.obj['TRANS_TABLE'] = config.trans_table
        ctx.obj['FIXED_CHARGE_RATE'] = config.fixed_charge_rate
        ctx.obj['SC_FEATURES'] = config.sc_features
        ctx.obj['TRANSMISSION_COSTS'] = config.transmission_costs
        ctx.obj['SORT_ON'] = config.sort_on
        ctx.obj['OFFSHORE_TRANS_TABLE'] = config.offshore_trans_table
        ctx.obj['WIND_DIRS'] = config.wind_dirs
        ctx.obj['N_DIRS'] = config.n_dirs
        ctx.obj['DOWNWIND'] = config.downwind
        ctx.obj['OFFSHORE_COMPETE'] = config.offshore_compete
        ctx.obj['MAX_WORKERS'] = config.max_workers
        ctx.obj['OUT_DIR'] = config.dirout
        ctx.obj['LOG_DIR'] = config.logdir
        ctx.obj['SIMPLE'] = config.simple
        ctx.obj['LINE_LIMITED'] = config.line_limited
        ctx.obj['VERBOSE'] = verbose

        ctx.invoke(slurm,
                   alloc=config.execution_control.allocation,
                   memory=config.execution_control.memory,
                   walltime=config.execution_control.walltime,
                   feature=config.execution_control.feature,
                   conda_env=config.execution_control.conda_env,
                   module=config.execution_control.module)
コード例 #24
0
ファイル: cli_gen.py プロジェクト: barbarmarc/reV
def from_config(ctx, config_file, verbose):
    """Run reV gen from a config file."""
    name = ctx.obj['NAME']
    verbose = any([verbose, ctx.obj['VERBOSE']])

    # Instantiate the config object
    config = GenConfig(config_file)

    # take name from config if not default
    if config.name.lower() != 'rev':
        name = config.name
        ctx.obj['NAME'] = name

    # Enforce verbosity if logging level is specified in the config
    if config.log_level == logging.DEBUG:
        verbose = True

    # make output directory if does not exist
    if not os.path.exists(config.dirout):
        os.makedirs(config.dirout)

    # initialize loggers.
    init_mult(name, config.logdir,
              modules=[__name__, 'reV.generation.generation',
                       'reV.config', 'reV.utilities', 'reV.SAM',
                       'rex.utilities'],
              verbose=verbose)

    # Initial log statements
    logger.info('Running reV Generation from config file: "{}"'
                .format(config_file))
    logger.info('Target output directory: "{}"'.format(config.dirout))
    logger.info('Target logging directory: "{}"'.format(config.logdir))
    logger.info('The following project points were specified: "{}"'
                .format(config.get('project_points', None)))
    logger.info('The following SAM configs are available to this run:\n{}'
                .format(pprint.pformat(config.get('sam_files', None),
                                       indent=4)))
    logger.debug('The full configuration input is as follows:\n{}'
                 .format(pprint.pformat(config, indent=4)))

    # set config objects to be passed through invoke to direct methods
    ctx.obj['TECH'] = config.technology
    ctx.obj['POINTS'] = config.project_points
    ctx.obj['SAM_FILES'] = config.sam_files
    ctx.obj['DIROUT'] = config.dirout
    ctx.obj['LOGDIR'] = config.logdir
    ctx.obj['OUTPUT_REQUEST'] = config.output_request
    ctx.obj['TIMEOUT'] = config.timeout
    ctx.obj['SITES_PER_WORKER'] = config.execution_control.sites_per_worker
    ctx.obj['MAX_WORKERS'] = config.execution_control.max_workers
    ctx.obj['MEM_UTIL_LIM'] = \
        config.execution_control.mememory_utilization_limit

    # get downscale request and raise exception if not NSRDB
    ctx.obj['DOWNSCALE'] = config.downscale
    if config.downscale is not None and 'pv' not in config.technology.lower():
        raise ConfigError('User requested downscaling for a non-solar '
                          'technology. reV does not have this capability at '
                          'the current time. Please contact a developer for '
                          'more information on this feature.')

    ctx.obj['CURTAILMENT'] = None
    if config.curtailment is not None:
        # pass through the curtailment file, not the curtailment object
        ctx.obj['CURTAILMENT'] = config['curtailment']

    for i, year in enumerate(config.years):
        submit_from_config(ctx, name, year, config, i, verbose=verbose)