def fpath(self): """Get the reV module output filepath(s) Returns ------- fpaths : str | list One or more filepaths output by current module being QA'd """ fpath = self._config['fpath'] if fpath == 'PIPELINE': target_modules = [self._name] for target_module in target_modules: try: fpath = Pipeline.parse_previous( self._out_root, 'qa-qc', target='fpath', target_module=target_module) except KeyError: pass else: break if fpath == 'PIPELINE': raise PipelineError('Could not parse fpath from previous ' 'pipeline jobs.') else: logger.info('QA/QC using the following ' 'pipeline input for fpath: {}'.format(fpath)) return fpath
def gen_fpath(self): """Get the generation data filepath""" fpath = self['gen_fpath'] if fpath == 'PIPELINE': target_modules = ['multi-year', 'collect', 'generation'] for target_module in target_modules: try: fpath = Pipeline.parse_previous( self.dirout, 'supply-curve-aggregation', target='fpath', target_module=target_module)[0] except KeyError: pass else: break if fpath == 'PIPELINE': raise PipelineError('Could not parse gen_fpath from previous ' 'pipeline jobs.') else: logger.info('Supply curve aggregation using the following ' 'pipeline input for gen_fpath: {}'.format(fpath)) return fpath
def rev_summary(self): """Get the rev summary input arg.""" fpath = self['rev_summary'] if fpath == 'PIPELINE': target_modules = ['aggregation', 'supply-curve'] for target_module in target_modules: try: fpath = Pipeline.parse_previous( self.dirout, 'rep-profiles', target='fpath', target_module=target_module)[0] except KeyError: pass else: break if fpath == 'PIPELINE': raise PipelineError('Could not parse rev_summary from ' 'previous pipeline jobs.') else: logger.info('Rep profiles using the following ' 'pipeline input for rev_summary: {}'.format(fpath)) return fpath
def econ_fpath(self): """Get the econ data filepath. This is an optional argument only used if reV gen and econ outputs are being used from different files.""" fpath = self.get('econ_fpath', None) if fpath == 'PIPELINE': target_modules = ['multi-year', 'collect', 'econ'] for target_module in target_modules: try: fpath = Pipeline.parse_previous( self.dirout, 'supply-curve-aggregation', target='fpath', target_module=target_module)[0] except KeyError: pass else: break if fpath == 'PIPELINE': raise PipelineError('Could not parse econ_fpath from previous ' 'pipeline jobs.') else: logger.info('Supply curve aggregation using the following ' 'pipeline input for econ_fpath: {}'.format(fpath)) return fpath
def _check_dirout_status(self): """Check unique status file in dirout.""" if os.path.exists(self.dirout): for fname in os.listdir(self.dirout): if (fname.endswith('_status.json') and fname != '{}_status.json'.format(self.name)): msg = ('Cannot run pipeline "{}" in directory ' '{}. Another pipeline appears to have ' 'been run here with status json: {}' .format(self.name, self.dirout, fname)) raise PipelineError(msg)
def _run_pipelines(self, monitor_background=False, verbose=False): """Run the reV pipeline modules for each batch job. Parameters ---------- monitor_background : bool Flag to monitor all batch pipelines continuously in the background using the nohup command. Note that the stdout/stderr will not be captured, but you can set a pipeline "log_file" to capture logs. verbose : bool Flag to turn on debug logging for the pipelines. """ for d in self.sub_dirs: pipeline_config = os.path.join( d, os.path.basename(self._config.pipeline_config)) if not os.path.isfile(pipeline_config): raise PipelineError('Could not find pipeline config to run: ' '"{}"'.format(pipeline_config)) elif monitor_background: pipeline_monitor_background(pipeline_config, verbose=verbose) else: Pipeline.run(pipeline_config, monitor=False, verbose=verbose)