def setup_framework(args, pipeline=QuickLookPipeline): # START HANDLING OF CONFIGURATION FILES ########## pkg = 'vysosdrp' framework_config_file = "framework.cfg" framework_config_fullpath = pkg_resources.resource_filename( pkg, framework_config_file) framework_logcfg_file = 'logger.cfg' framework_logcfg_fullpath = pkg_resources.resource_filename( pkg, framework_logcfg_file) # add PIPELINE specific config files if args.config_file is None: pipeline_config_file = 'pipeline.cfg' pipeline_config_fullpath = pkg_resources.resource_filename( pkg, pipeline_config_file) pipeline_config = ConfigClass(pipeline_config_fullpath, default_section='DEFAULT') else: pipeline_config = ConfigClass(args.pipeline_config_file, default_section='DEFAULT') if args.overwrite is True: pipeline_config.set('Telescope', 'overwrite', value='True') if args.norecord is True: pipeline_config.set('Telescope', 'norecord', value='True') # END HANDLING OF CONFIGURATION FILES ########## try: framework = Framework(QuickLookPipeline, framework_config_fullpath) logging.config.fileConfig(framework_logcfg_fullpath) framework.config.instrument = pipeline_config except Exception as e: print("Failed to initialize framework, exiting ...", e) traceback.print_exc() sys.exit(1) # this part defines a specific logger for the pipeline, so that we can # separate the output of the pipeline from the output of the framework framework.context.pipeline_logger = getLogger(framework_logcfg_fullpath, name="pipeline") framework.logger = getLogger(framework_logcfg_fullpath, name="DRPF") framework.logger.info("Framework initialized") return framework
def init_context(): config = ConfigClass() logger = getLogger(config.logger_config_file, name="DRPF") eq = SimpleEventQueue() eqhi = SimpleEventQueue() # The regular event queue can be local or shared via proxy manager pc = ProcessingContext(eq, eqhi, logger, config) return pc
def setup_framework(args, pipeline=IngestPipeline): # START HANDLING OF CONFIGURATION FILES ########## pkg = 'iqmon' framework_config_file = "configs/framework.cfg" framework_config_fullpath = pkg_resources.resource_filename( pkg, framework_config_file) framework_logcfg_file = 'configs/logger_ingest.cfg' framework_logcfg_fullpath = pkg_resources.resource_filename( pkg, framework_logcfg_file) # add PIPELINE specific config files if args.config_file is None: pipeline_config_file = 'configs/pipeline.cfg' pipeline_config_fullpath = pkg_resources.resource_filename( pkg, pipeline_config_file) pipeline_config = ConfigClass(pipeline_config_fullpath, default_section='DEFAULT') else: pipeline_config = ConfigClass(args.pipeline_config_file, default_section='DEFAULT') # END HANDLING OF CONFIGURATION FILES ########## try: framework = Framework(IngestPipeline, framework_config_fullpath) logging.config.fileConfig(framework_logcfg_fullpath) framework.config.instrument = pipeline_config except Exception as e: print("Failed to initialize framework, exiting ...", e) traceback.print_exc() sys.exit(1) # this part defines a specific logger for the pipeline, so that we can # separate the output of the pipeline from the output of the framework framework.context.pipeline_logger = getLogger(framework_logcfg_fullpath, name="pipeline") framework.logger = getLogger(framework_logcfg_fullpath, name="DRPF") framework.logger.info("Framework initialized") return framework
def test_as_object(): """ config is an object context if an object pipeline is an object """ config = ConfigClass() logger = getLogger(config.logger_config_file, name="DRPF") context = ProcessingContext(event_queue=None, event_queue_hi=None, logger=logger, config=config) obj = fits2png_pipeline.Fits2pngPipeline(context) f = Framework(obj, "example_config.cfg") assert f is not None, "Could not create framework using instance of class"
def __init__(self, pipeline_name, configFile, testing=False): """ pipeline_name: name of the pipeline class containing recipes Creates the event_queue and the action queue """ if configFile is None: self.config = ConfigClass() elif isinstance(configFile, str): self.config = ConfigClass(configFile) else: self.config = configFile self.testing = testing self.logger = getLogger(self.config.logger_config_file, name="DRPF") self.logger.info("") self.logger.info("Initialization Framework cwd={}".format(os.getcwd())) self.wait_for_event = False # The high priority event queue is local to the process self.event_queue_hi = queues.SimpleEventQueue() # The regular event queue can be local or shared via queue manager self.queue_manager = None self.event_queue = self._get_event_queue() # The done_queue self.done_queue = None # The handlers self.on_exit = self.default_on_exit self.on_state = self.default_on_state self.on_error = self.default_on_error self.context = ProcessingContext(self.event_queue, self.event_queue_hi, self.logger, self.config) pipeline = find_pipeline(pipeline_name, self.config.pipeline_path, self.context, self.logger) if pipeline is None: raise Exception(f"Failed to initialize pipeline {pipeline_name}") self.pipeline = pipeline self.keep_going = True self.init_signal() self.store_arguments = Arguments()
def create_context(event_queue=None, event_queue_hi=None, logger=None, config=None): """ Convenient function to create a context for working withtout the framework. Useful in Jupyter notebooks for example. """ if config is None: config = ConfigClass() if logger is None: logger = getLogger(config.logger_config_file, name="DRPF") if event_queue_hi is None: event_queue_hi = queues.SimpleEventQueue() if event_queue is None: event_queue = queues.SimpleEventQueue() return ProcessingContext(event_queue, event_queue_hi, logger, config)
def main(): def process_subset(in_subset): for in_frame in in_subset.index: arguments = Arguments(name=in_frame) framework.append_event('next_file', arguments, recurrent=True) def process_list(in_list): for in_frame in in_list: arguments = Arguments(name=in_frame) framework.append_event('next_file', arguments, recurrent=True) args = _parse_arguments(sys.argv) # START HANDLING OF CONFIGURATION FILES ########## pkg = 'kcwidrp' # check for the logs diretory check_directory("logs") # check for the plots directory check_directory("plots") framework_config_file = "configs/framework.cfg" framework_config_fullpath = \ pkg_resources.resource_filename(pkg, framework_config_file) framework_logcfg_file = 'configs/logger.cfg' framework_logcfg_fullpath = \ pkg_resources.resource_filename(pkg, framework_logcfg_file) # add kcwi specific config files # make changes here to allow this file # to be loaded from the command line if args.kcwi_config_file is None: kcwi_config_file = 'configs/kcwi.cfg' kcwi_config_fullpath = pkg_resources.resource_filename( pkg, kcwi_config_file) kcwi_config = ConfigClass(kcwi_config_fullpath, default_section='KCWI') else: # kcwi_config_fullpath = os.path.abspath(args.kcwi_config_file) kcwi_config = ConfigClass(args.kcwi_config_file, default_section='KCWI') # END HANDLING OF CONFIGURATION FILES ########## # Add current working directory to config info kcwi_config.cwd = os.getcwd() # check for the output directory check_directory(kcwi_config.output_directory) try: framework = Framework(Kcwi_pipeline, framework_config_fullpath) # add this line ONLY if you are using a local logging config file logging.config.fileConfig(framework_logcfg_fullpath) framework.config.instrument = kcwi_config except Exception as e: print("Failed to initialize framework, exiting ...", e) traceback.print_exc() sys.exit(1) framework.context.pipeline_logger = getLogger(framework_logcfg_fullpath, name="KCWI") framework.logger = getLogger(framework_logcfg_fullpath, name="DRPF") if args.infiles is not None: framework.config.file_type = args.infiles # check for taperfrac argument if args.taperfrac: def_tf = getattr(framework.config.instrument, 'TAPERFRAC', None) if def_tf is not None: framework.context.pipeline_logger.info( "Setting new taperfrac = %.3f" % args.taperfrac) framework.config.instrument.TAPERFRAC = args.taperfrac # check for atlas line list argument if args.atlas_line_list: def_ll = getattr(framework.config.instrument, 'LINELIST', None) if def_ll is not None: framework.context.pipeline_logger.info( "Using line list %s instead of generated list" % args.atlas_line_list) framework.config.instrument.LINELIST = args.atlas_line_list # start the bokeh server is requested by the configuration parameters if framework.config.instrument.enable_bokeh is True: if check_running_process(process='bokeh') is False: subprocess.Popen('bokeh serve', shell=True) # --session-ids=unsigned --session-token-expiration=86400', # shell=True) time.sleep(5) # subprocess.Popen('open http://localhost:5006?bokeh-session-id=kcwi', # shell=True) # initialize the proctab and read it framework.context.proctab = Proctab(framework.logger) framework.context.proctab.read_proctab(tfil=args.proctab) framework.logger.info("Framework initialized") # add a start_bokeh event to the processing queue, # if requested by the configuration parameters if framework.config.instrument.enable_bokeh: framework.append_event('start_bokeh', None) # important: to be able to use the grouping mode, we need to reset # the default ingestion action to no-event, # otherwise the system will automatically trigger the next_file event # which initiates the processing if args.group_mode is True: # set the default ingestion event to None framework.config.default_ingestion_event = "add_only" # start queue manager only (useful for RPC) if args.queue_manager_only: # The queue manager runs for ever. framework.logger.info("Starting queue manager only, no processing") framework.start_queue_manager() # in the next two ingest_data command, if we are using standard mode, # the first event generated is next_file. # if we are in groups mode (aka smart mode), then the first event generated # is no_event then, manually, a next_file event is generated for each group # specified in the variable imtypes # single frame processing elif args.frames: framework.ingest_data(None, args.frames, False) # processing of a list of files contained in a file elif args.file_list: frames = [] with open(args.file_list) as file_list: for frame in file_list: if "#" not in frame: frames.append(frame.strip('\n')) framework.ingest_data(None, frames, False) # ingest an entire directory, trigger "next_file" (which is an option # specified in the config file) on each file, # optionally continue to monitor if -m is specified elif args.dirname is not None: framework.ingest_data(args.dirname, None, args.monitor) # implement the group mode if args.group_mode is True: data_set = framework.context.data_set # remove focus images and ccd clearing images from the dataset focus_frames = data_set.data_table[data_set.data_table.OBJECT == "focus"].index ccdclear_frames = data_set.data_table[data_set.data_table.OBJECT == "Clearing ccd"].index data_set.data_table.drop(focus_frames, inplace=True) data_set.data_table.drop(ccdclear_frames, inplace=True) # processing imtypes = [ 'BIAS', 'CONTBARS', 'ARCLAMP', 'FLATLAMP', 'DOMEFLAT', 'TWIFLAT', 'OBJECT' ] for imtype in imtypes: subset = data_set.data_table[ framework.context.data_set.data_table.IMTYPE == imtype] if 'OBJECT' in imtype: # Ensure that standards are processed first object_order = [] standard_order = [] for frame in subset.index: if is_file_kcwi_std(frame, logger=framework.context.logger): standard_order.append(frame) else: object_order.append(frame) order = standard_order + object_order # Standards first process_list(order) else: process_subset(subset) framework.start(args.queue_manager_only, args.ingest_data_only, args.wait_for_event, args.continuous)
def main(): args = _parseArguments(sys.argv) # load the framework config file from the config directory of this package # this part uses the pkg_resources package to find the full path location # of framework.cfg framework_config_file = "configs/config.cfg" # load the logger config file from the config directory of this package # this part uses the pkg_resources package to find the full path location # of logger.cfg framework_logcfg_file = 'configs/logger.cfg' # add PIPELINE specific config files # this part uses the pkg_resource package to find the full path location # of template.cfg or uses the one defines in the command line with the option -c if args.config_file is None: pipeline_config_file = 'configs/fits2png.cfg' pipeline_config = ConfigClass(pipeline_config_file) else: pipeline_config = ConfigClass(args.pipeline_config_file) # END HANDLING OF CONFIGURATION FILES ########## try: framework = Framework(Fits2pngPipeline, framework_config_file) logging.config.fileConfig(framework_logcfg_file) framework.config.instrument = pipeline_config except Exception as e: print("Failed to initialize framework, exiting ...", e) traceback.print_exc() sys.exit(1) # this part defines a specific logger for the pipeline, so that # we can separate the output of the pipeline # from the output of the framework framework.logger = getLogger(framework_logcfg_file, name="DRPF") framework.logger.info("Framework initialized") # start queue manager only (useful for RPC) if args.queue_manager_only: # The queue manager runs for ever. framework.logger.info("Starting queue manager only, no processing") framework.start_queue_manager() # frames processing elif args.frames: for frame in args.frames: # ingesting and triggering the default ingestion event specified in the configuration file framework.ingest_data(None, args.frames, False) # manually triggering an event upon ingestion, if desired. # arguments = Arguments(name=frame) # framework.append_event('template', arguments) # ingest an entire directory, trigger "next_file" on each file, optionally continue to monitor if -m is specified elif (len(args.infiles) > 0) or args.dirname is not None: framework.ingest_data(args.dirname, args.infiles, args.monitor) nfiles = framework.context.data_set.get_size() cfg = framework.config cargs = Arguments(cnt=nfiles, out_name="test.html", pattern="*.png", dir_name=cfg.output_directory) framework.append_event("contact_sheet", cargs) framework.start(args.queue_manager_only, args.ingest_data_only, args.wait_for_event, args.continuous)
def init_data_set(): config = ConfigClass() logger = getLogger(config.logger_config_file, name="DRPF") data_set = DataSet("test_files", logger, config, SimpleEventQueue()) return data_set
def main(): def process(subset): for frame in subset.index: arguments = Arguments(name=frame) framework.append_event('next_file', arguments) args = _parse_arguments(sys.argv) # START HANDLING OF CONFIGURATION FILES ########## pkg = 'kcwidrp' # check for the logs diretory check_directory("logs") # check for the plots directory check_directory("plots") framework_config_file = "configs/framework.cfg" framework_config_fullpath = pkg_resources.resource_filename( pkg, framework_config_file) framework_logcfg_file = 'configs/logger.cfg' framework_logcfg_fullpath = pkg_resources.resource_filename( pkg, framework_logcfg_file) # add kcwi specific config files # make changes here to allow this file # to be loaded from the command line if args.kcwi_config_file is None: kcwi_config_file = 'configs/kcwi.cfg' kcwi_config_fullpath = pkg_resources.resource_filename( pkg, kcwi_config_file) kcwi_config = ConfigClass(kcwi_config_fullpath, default_section='KCWI') else: kcwi_config_fullpath = os.path.abspath(args.kcwi_config_file) kcwi_config = ConfigClass(args.kcwi_config_file, default_section='KCWI') # END HANDLING OF CONFIGURATION FILES ########## try: framework = Framework(Kcwi_pipeline, framework_config_fullpath) # add this line ONLY if you are using a local logging config file logging.config.fileConfig(framework_logcfg_fullpath) framework.config.instrument = kcwi_config except Exception as e: print("Failed to initialize framework, exiting ...", e) traceback.print_exc() sys.exit(1) framework.context.pipeline_logger = getLogger(framework_logcfg_fullpath, name="KCWI") # check for the REDUX directory check_directory(framework.config.instrument.output_directory) # start the bokeh server is requested by the configuration parameters if framework.config.instrument.enable_bokeh is True: if check_bokeh_server() is False: subprocess.Popen( 'bokeh serve --session-ids=unsigned --session-token-expiration=86400', shell=True) time.sleep(5) subprocess.Popen('open http://localhost:5006?bokeh-session-id=kcwi', shell=True) # initialize the proctab and read it framework.context.proctab = Proctab(framework.logger) framework.context.proctab.read_proctab(tfil=args.proctab) framework.logger.info("Framework initialized") # add a start_bokeh event to the processing queue, if requested by the configuration parameters if framework.config.instrument.enable_bokeh: framework.append_event('start_bokeh', None) # set the default ingestion event to None framework.config.default_ingestion_event = "no_event" # single frame processing if args.frames: framework.ingest_data(None, args.frames, False) # processing of a list of files contained in a file elif args.file_list: frames = [] with open(args.file_list) as file_list: for frame in file_list: if "#" not in frame: frames.append(frame.strip('\n')) framework.ingest_data(None, frames, False) data_set = framework.context.data_set # remove focus images and ccd clearing images from the dataset focus_frames = data_set.data_table[data_set.data_table.OBJECT == "focus"].index ccdclear_frames = data_set.data_table[data_set.data_table.OBJECT == "Clearing ccd"].index data_set.data_table.drop(focus_frames, inplace=True) data_set.data_table.drop(ccdclear_frames, inplace=True) # processing imtypes = ['BIAS', 'CONTBARS', 'ARCLAMP', 'FLATLAMP', 'OBJECT'] for imtype in imtypes: subset = data_set.data_table[ framework.context.data_set.data_table.IMTYPE == imtype] process(subset) framework.start(False, False, True, True)