Exemple #1
0
 def _cancel_all(self):
     """Cancel all reV pipeline modules for all batch jobs."""
     for d in self.sub_dirs:
         pipeline_config = os.path.join(
             d, os.path.basename(self._config.pipeline_config))
         if os.path.isfile(pipeline_config):
             Pipeline.cancel_all(pipeline_config)
Exemple #2
0
def from_config(ctx, config_file, cancel, monitor, background, verbose):
    """Run reV pipeline from a config file."""
    verbose = any([verbose, ctx.obj['VERBOSE']])

    if cancel:
        Pipeline.cancel_all(config_file)
    elif monitor and background:
        pipeline_monitor_background(config_file, verbose=verbose)
    else:
        Pipeline.run(config_file, monitor=monitor, verbose=verbose)
Exemple #3
0
    def fpath(self):
        """Get the reV module output filepath(s)

        Returns
        -------
        fpaths : str | list
            One or more filepaths output by current module being QA'd
        """

        fpath = self._config['fpath']

        if fpath == 'PIPELINE':
            target_modules = [self._name]
            for target_module in target_modules:
                try:
                    fpath = Pipeline.parse_previous(
                        self._out_root,
                        'qa-qc',
                        target='fpath',
                        target_module=target_module)
                except KeyError:
                    pass
                else:
                    break

            if fpath == 'PIPELINE':
                raise PipelineError('Could not parse fpath from previous '
                                    'pipeline jobs.')
            else:
                logger.info('QA/QC using the following '
                            'pipeline input for fpath: {}'.format(fpath))

        return fpath
Exemple #4
0
    def gen_fpath(self):
        """Get the generation data filepath"""

        fpath = self['gen_fpath']

        if fpath == 'PIPELINE':
            target_modules = ['multi-year', 'collect', 'generation']
            for target_module in target_modules:
                try:
                    fpath = Pipeline.parse_previous(
                        self.dirout, 'supply-curve-aggregation',
                        target='fpath',
                        target_module=target_module)[0]
                except KeyError:
                    pass
                else:
                    break

            if fpath == 'PIPELINE':
                raise PipelineError('Could not parse gen_fpath from previous '
                                    'pipeline jobs.')
            else:
                logger.info('Supply curve aggregation using the following '
                            'pipeline input for gen_fpath: {}'.format(fpath))

        return fpath
Exemple #5
0
    def rev_summary(self):
        """Get the rev summary input arg."""

        fpath = self['rev_summary']

        if fpath == 'PIPELINE':
            target_modules = ['aggregation', 'supply-curve']
            for target_module in target_modules:
                try:
                    fpath = Pipeline.parse_previous(
                        self.dirout,
                        'rep-profiles',
                        target='fpath',
                        target_module=target_module)[0]
                except KeyError:
                    pass
                else:
                    break

            if fpath == 'PIPELINE':
                raise PipelineError('Could not parse rev_summary from '
                                    'previous pipeline jobs.')
            else:
                logger.info('Rep profiles using the following '
                            'pipeline input for rev_summary: {}'.format(fpath))

        return fpath
Exemple #6
0
    def econ_fpath(self):
        """Get the econ data filepath. This is an optional argument only used
        if reV gen and econ outputs are being used from different files."""

        fpath = self.get('econ_fpath', None)

        if fpath == 'PIPELINE':
            target_modules = ['multi-year', 'collect', 'econ']
            for target_module in target_modules:
                try:
                    fpath = Pipeline.parse_previous(
                        self.dirout, 'supply-curve-aggregation',
                        target='fpath',
                        target_module=target_module)[0]
                except KeyError:
                    pass
                else:
                    break

            if fpath == 'PIPELINE':
                raise PipelineError('Could not parse econ_fpath from previous '
                                    'pipeline jobs.')
            else:
                logger.info('Supply curve aggregation using the following '
                            'pipeline input for econ_fpath: {}'.format(fpath))

        return fpath
Exemple #7
0
    def source_files(self):
        """
        Returns
        -------
        source_files : list
            list of source files to collect from
        """
        if self._source_files is not None:
            if isinstance(self._source_files, (list, tuple)):
                source_files = self._source_files
            elif self._source_files == "PIPELINE":
                source_files = Pipeline.parse_previous(self._dirout,
                                                       'multi-year',
                                                       target='fpath')
            else:
                raise ConfigError("source_files must be a list, tuple, "
                                  "or 'PIPELINE'")
        else:
            if self._source_dir and self._source_prefix:
                source_files = []
                for file in os.listdir(self._source_dir):
                    if (file.startswith(self._source_prefix)
                            and file.endswith('.h5') and '_node' not in file):
                        source_files.append(os.path.join(self._source_dir,
                                                         file))
            else:
                raise ConfigError("source_files or both source_dir and "
                                  "source_prefix must be provided")

        if not any(source_files):
            raise FileNotFoundError('Could not find any source files for '
                                    'multi-year collection group: "{}"'
                                    .format(self.name))

        return source_files
Exemple #8
0
    def excl_fpath(self):
        """Get the exclusions filepath"""

        fpath = self['excl_fpath']

        if fpath == 'PIPELINE':
            fpath = Pipeline.parse_previous(
                self.dirout, 'aggregation', target='fpath',
                target_module='exclusions')[0]

        return fpath
Exemple #9
0
    def sc_points(self):
        """Get the supply curve points summary file path"""

        sc_points = self['sc_points']

        if sc_points == 'PIPELINE':
            sc_points = Pipeline.parse_previous(
                self.dirout, 'supply-curve', target='fpath')[0]

            logger.info('Supply curve using the following '
                        'pipeline input for sc_points: {}'.format(sc_points))

        return sc_points
Exemple #10
0
    def _run_pipelines(self, monitor_background=False, verbose=False):
        """Run the reV pipeline modules for each batch job.

        Parameters
        ----------
        monitor_background : bool
            Flag to monitor all batch pipelines continuously
            in the background using the nohup command. Note that the
            stdout/stderr will not be captured, but you can set a
            pipeline "log_file" to capture logs.
        verbose : bool
            Flag to turn on debug logging for the pipelines.
        """

        for d in self.sub_dirs:
            pipeline_config = os.path.join(
                d, os.path.basename(self._config.pipeline_config))
            if not os.path.isfile(pipeline_config):
                raise PipelineError('Could not find pipeline config to run: '
                                    '"{}"'.format(pipeline_config))
            elif monitor_background:
                pipeline_monitor_background(pipeline_config, verbose=verbose)
            else:
                Pipeline.run(pipeline_config, monitor=False, verbose=verbose)
Exemple #11
0
def test_pipeline_local():
    """Test the reV pipeline execution on a local machine."""

    pipeline_dir = os.path.join(TESTDATADIR, 'pipeline/')
    log_dir = os.path.join(pipeline_dir, 'logs/')
    out_dir = os.path.join(pipeline_dir, 'outputs/')
    fpipeline = os.path.join(pipeline_dir, 'config_pipeline.json')
    fbaseline = os.path.join(pipeline_dir, 'baseline_pipeline_multi-year.h5')

    Pipeline.run(fpipeline, monitor=True)

    fpath_out = Pipeline.parse_previous(out_dir, 'multi-year',
                                        target_module='multi-year')[0]

    dsets = ['generation/cf_mean-means', 'econ/lcoe_fcr-means']
    with h5py.File(fpath_out, 'r') as f_new:
        with h5py.File(fbaseline, 'r') as f_base:
            for dset in dsets:
                msg = 'Local pipeline failed for "{}"'.format(dset)
                assert np.allclose(f_new[dset][...], f_base[dset][...]), msg

    if PURGE_OUT:
        shutil.rmtree(log_dir)
        shutil.rmtree(out_dir)
Exemple #12
0
    def parse_gen_fpaths(self):
        """
        Get a list of generation data filepaths

        Returns
        -------
        list
        """
        fpaths = self.gen_fpath
        if fpaths == 'PIPELINE':
            fpaths = Pipeline.parse_previous(self.dirout,
                                             'offshore',
                                             target='fpath',
                                             target_module='generation')

        if isinstance(fpaths, str):
            fpaths = [fpaths]

        return fpaths
Exemple #13
0
    def parse_cf_files(self):
        """Get the capacity factor files (reV generation output data).

        Returns
        -------
        cf_files : list
            Target paths for capacity factor files (reV generation output
            data) for input to reV LCOE calculation.
        """

        if self._cf_files is None:
            # get base filename, may have {} for year format
            fname = self.cf_file
            if '{}' in fname:
                # need to make list of res files for each year
                self._cf_files = [fname.format(year) for year in self.years]
            elif 'PIPELINE' in fname:
                self._cf_files = Pipeline.parse_previous(super().dirout,
                                                         'econ',
                                                         target='fpath')
            else:
                # only one resource file request, still put in list
                self._cf_files = [fname]

            self.check_files(self._cf_files)

            # check year/cf_file matching if not a pipeline input
            if 'PIPELINE' not in fname:
                if len(self._cf_files) != len(self.years):
                    raise ConfigError('The number of cf files does not match '
                                      'the number of analysis years!'
                                      '\n\tCF files: \n\t\t{}'
                                      '\n\tYears: \n\t\t{}'.format(
                                          self._cf_files, self.years))
                for year in self.years:
                    if str(year) not in str(self._cf_files):
                        raise ConfigError('Could not find year {} in cf '
                                          'files: {}'.format(
                                              year, self._cf_files))

        return self._cf_files
Exemple #14
0
    def min_area(self):
        """Get the minimum area filter minimum area in km2."""
        min_area = self._config.get('min_area', 'PIPELINE')

        if min_area == 'PIPELINE':
            try:
                min_area = Pipeline.parse_previous(
                    self._out_root,
                    'qa-qc',
                    target='min_area',
                    target_module='supply-curve-aggregation')[0]
            except KeyError:
                min_area = None
                msg = ('Could not parse min_area from previous '
                       'pipeline jobs, defaulting to: {}.'.format(min_area))
                logger.warning(msg)
                warn(msg)
            else:
                logger.info('QA/QC using the following '
                            'pipeline input for min_area: {}'.format(min_area))

        return min_area
Exemple #15
0
    def excl_dict(self):
        """Get the exclusions dictionary"""
        excl_dict = self._config.get('excl_dict', 'PIPELINE')

        if excl_dict == 'PIPELINE':
            try:
                excl_dict = Pipeline.parse_previous(
                    self._out_root,
                    'qa-qc',
                    target='excl_dict',
                    target_module='supply-curve-aggregation')[0]
            except KeyError:
                excl_dict = None
                msg = ('Could not parse excl_dict from previous '
                       'pipeline jobs, defaulting to: {}'.format(excl_dict))
                logger.warning(msg)
                warn(msg)
            else:
                logger.info(
                    'QA/QC using the following '
                    'pipeline input for excl_dict: {}'.format(excl_dict))

        return excl_dict
Exemple #16
0
    def area_filter_kernel(self):
        """Get the minimum area filter kernel name ('queen' or 'rook')."""
        area_filter_kernel = self._config.get('area_filter_kernel', 'PIPELINE')

        if area_filter_kernel == 'PIPELINE':
            try:
                area_filter_kernel = Pipeline.parse_previous(
                    self._out_root,
                    'qa-qc',
                    target='area_filter_kernel',
                    target_module='supply-curve-aggregation')[0]
            except KeyError:
                area_filter_kernel = self._default_area_filter_kernel
                msg = ('Could not parse area_filter_kernel from previous '
                       'pipeline jobs, defaulting to: {}'.format(
                           area_filter_kernel))
                logger.warning(msg)
                warn(msg)
            else:
                logger.info('QA/QC using the following '
                            'pipeline input for area_filter_kernel: {}'.format(
                                area_filter_kernel))

        return area_filter_kernel