Esempio n. 1
0
def test_as_string():
    """
    pipeline is a string, with given pipeline_path
    """
    cfg = ConfigClass()
    cfg.pipeline_path = ("", "keckdrpframework.examples.pipelines")
    f = Framework("fits2png_pipeline", cfg)
    assert f is not None, "Could not create framework as a string"
Esempio n. 2
0
    def __init__(self, pipeline_name, configFile, testing=False):
        """
        pipeline_name: name of the pipeline class containing recipes

        Creates the event_queue and the action queue
        """
        if configFile is None:
            self.config = ConfigClass()
        elif isinstance(configFile, str):
            self.config = ConfigClass(configFile)
        else:
            self.config = configFile

        self.testing = testing

        self.logger = getLogger(self.config.logger_config_file, name="DRPF")
        self.logger.info("")
        self.logger.info("Initialization Framework cwd={}".format(os.getcwd()))

        self.wait_for_event = False
        # The high priority event queue is local to the process
        self.event_queue_hi = queues.SimpleEventQueue()

        # The regular event queue can be local or shared via queue manager
        self.queue_manager = None
        self.event_queue = self._get_event_queue()

        # The done_queue
        self.done_queue = None

        # The handlers
        self.on_exit = self.default_on_exit
        self.on_state = self.default_on_state
        self.on_error = self.default_on_error

        self.context = ProcessingContext(self.event_queue, self.event_queue_hi,
                                         self.logger, self.config)

        pipeline = find_pipeline(pipeline_name, self.config.pipeline_path,
                                 self.context, self.logger)

        if pipeline is None:
            raise Exception(f"Failed to initialize pipeline {pipeline_name}")

        self.pipeline = pipeline

        self.keep_going = True
        self.init_signal()
        self.store_arguments = Arguments()
Esempio n. 3
0
def change_directory():
    args = _parseArguments(sys.argv)
    if args.input is not '':
        newdir = Path(args.input).expanduser().absolute()
    else:
        date_string = datetime.utcnow().strftime('%Y%m%dUT')
        newdir = Path(f'~/V20Data/Images/{date_string}').expanduser()
        if args.cals is True:
            newdir = Path(
                f'~/V20Data/Images/{date_string}/Calibration').expanduser()
        if args.flats is True:
            newdir = Path(
                f'~/V20Data/Images/{date_string}/AutoFlat').expanduser()

    args.input = str(newdir)
    if newdir.exists() is False:
        newdir.mkdir(parents=True)

    framework_config_fullpath = pkg_resources.resource_filename(
        "vysosdrp", "framework.cfg")
    cfg = ConfigClass(framework_config_fullpath)
    queue = queues.get_event_queue(cfg.queue_manager_hostname,
                                   cfg.queue_manager_portnr,
                                   cfg.queue_manager_auth_code)

    if queue is None:
        print("Failed to connect to Queue Manager")
    else:
        pending = queue.get_pending()
        event = Event("set_file_type", args)
        queue.put(event)
        event = Event("update_directory", args)
        queue.put(event)
Esempio n. 4
0
def analyze_one():
    args = _parseArguments(sys.argv)
    p = Path(args.input).expanduser().absolute()
    if p.exists() is False:
        print(f'Unable to find file: {p}')
        return
    args.name = f"{p}"

    framework_config_fullpath = pkg_resources.resource_filename(
        "vysosdrp", "framework.cfg")
    cfg = ConfigClass(framework_config_fullpath)
    queue = queues.get_event_queue(cfg.queue_manager_hostname,
                                   cfg.queue_manager_portnr,
                                   cfg.queue_manager_auth_code)
    if queue is None:
        print("Failed to connect to Queue Manager")
        return

    if args.overwrite is True:
        pending = queue.get_pending()
        event = Event("set_overwrite", args)
        queue.put(event)

    pending = queue.get_pending()
    event = Event("next_file", args)
    queue.put(event)
Esempio n. 5
0
def change_directory():
    args = _parseArguments(sys.argv)
    if args.input is not '':
        newdir = Path(args.input).expanduser().absolute()
    else:
        now = datetime.utcnow()
        data_path = framework.config.instrument.get('FileHandling',
                                                    'ingest_dir')
        data_path = data_path.replace('YYYY', f'{now.year:4d}')
        data_path = data_path.replace('MM', f'{now.month:02d}')
        data_path = data_path.replace('DD', f'{now.day:02d}')
        newdir = Path(data_path).expanduser()

    args.input = str(newdir)
    if newdir.exists() is False:
        newdir.mkdir(parents=True)

    pkg = 'iqmon'
    framework_config_file = "configs/framework.cfg"
    framework_config_fullpath = pkg_resources.resource_filename(
        pkg, framework_config_file)
    cfg = ConfigClass(framework_config_fullpath)
    queue = queues.get_event_queue(cfg.queue_manager_hostname,
                                   cfg.queue_manager_portnr,
                                   cfg.queue_manager_auth_code)

    if queue is None:
        print("Failed to connect to Queue Manager")
    else:
        pending = queue.get_pending()
        event = Event("set_file_type", args)
        queue.put(event)
        event = Event("update_directory", args)
        queue.put(event)
Esempio n. 6
0
    def __init__(self, pipeline_name, configFile):
        """
        pipeline_name: name of the pipeline class containing recipes
        
        Creates the event_queue and the action queue
        """

        self.config = ConfigClass(configFile)
        self.logger = getLogger(self.config.logger_config_file, name="DRPF")

        self.wait_for_event = False
        # The high priority event queue is local to the process
        self.event_queue_hi = queues.Simple_event_queue()

        # The regular event queue can be local or shared via proxy manager
        self.queue_manager = None
        self.event_queue = self._get_event_queue()

        pipeline = find_pipeline(pipeline_name, self.config.pipeline_path,
                                 self.logger)
        if pipeline is None:
            raise Exception("Failed to initialize pipeline")

        pipeline.set_logger(self.logger)

        self.pipeline = pipeline
        self.context = Processing_context(self.event_queue,
                                          self.event_queue_hi, self.logger,
                                          self.config)
        self.keep_going = True
        self.init_signal()
        self.store_arguments = Arguments()
def init_context():
    config = ConfigClass()
    logger = getLogger(config.logger_config_file, name="DRPF")
    eq = SimpleEventQueue()
    eqhi = SimpleEventQueue()

    # The regular event queue can be local or shared via proxy manager
    pc = ProcessingContext(eq, eqhi, logger, config)
    return pc
Esempio n. 8
0
def test_as_class2():
    """
    pipeline is an imported class
    config is an object
    """
    cfg = ConfigClass()
    cl = fits2png_pipeline.Fits2pngPipeline
    f = Framework(cl, cfg)
    assert f is not None, "Could not create framework using class"
Esempio n. 9
0
def setup_framework(args, pipeline=IngestPipeline):
    # START HANDLING OF CONFIGURATION FILES ##########
    pkg = 'iqmon'
    framework_config_file = "configs/framework.cfg"
    framework_config_fullpath = pkg_resources.resource_filename(
        pkg, framework_config_file)

    framework_logcfg_file = 'configs/logger_ingest.cfg'
    framework_logcfg_fullpath = pkg_resources.resource_filename(
        pkg, framework_logcfg_file)

    # add PIPELINE specific config files
    if args.config_file is None:
        pipeline_config_file = 'configs/pipeline.cfg'
        pipeline_config_fullpath = pkg_resources.resource_filename(
            pkg, pipeline_config_file)
        pipeline_config = ConfigClass(pipeline_config_fullpath,
                                      default_section='DEFAULT')
    else:
        pipeline_config = ConfigClass(args.pipeline_config_file,
                                      default_section='DEFAULT')

    # END HANDLING OF CONFIGURATION FILES ##########

    try:
        framework = Framework(IngestPipeline, framework_config_fullpath)
        logging.config.fileConfig(framework_logcfg_fullpath)
        framework.config.instrument = pipeline_config
    except Exception as e:
        print("Failed to initialize framework, exiting ...", e)
        traceback.print_exc()
        sys.exit(1)

    # this part defines a specific logger for the pipeline, so that we can
    # separate the output of the pipeline from the output of the framework
    framework.context.pipeline_logger = getLogger(framework_logcfg_fullpath,
                                                  name="pipeline")
    framework.logger = getLogger(framework_logcfg_fullpath, name="DRPF")
    framework.logger.info("Framework initialized")

    return framework
Esempio n. 10
0
def test_as_object():
    """
    config is an object
    context if an object
    pipeline is an object
    """
    config = ConfigClass()
    logger = getLogger(config.logger_config_file, name="DRPF")
    context = ProcessingContext(event_queue=None, event_queue_hi=None, logger=logger, config=config)
    obj = fits2png_pipeline.Fits2pngPipeline(context)
    f = Framework(obj, "example_config.cfg")
    assert f is not None, "Could not create framework using instance of class"
def init_framework():
    """
    Initializes frame work for testing.
    This is an non-multiprocessing framework. See example_config.cfg.
    The pipeline 'Fits2pngPipeline' is imported. See test_framework.py for other options.
    """
    f = Framework(Fits2pngPipeline, "example_config.cfg")
    assert f is not None, "Could not create framework"

    f.config.no_event_event = None  # Make sure the loop terminates
    f.config.fits2png = ConfigClass("../examples/fits2png.cfg")
    return f
Esempio n. 12
0
def list_queue():
    args = _parseArguments(sys.argv)
    framework_config_fullpath = pkg_resources.resource_filename(
        "vysosdrp", "framework.cfg")
    cfg = ConfigClass(framework_config_fullpath)
    drpif = FrameworkInterface(cfg)
    # Print pending Events
    if drpif.is_queue_ok():
        events = drpif.pending_events()
        print(f'Found {len(events)} in queue')
        if args.verbose is True:
            for event in events:
                print(event)
    else:
        print("Pending events: Queue not available", drpif.queue)
Esempio n. 13
0
def setup_framework(args, pipeline=QuickLookPipeline):
    # START HANDLING OF CONFIGURATION FILES ##########
    pkg = 'vysosdrp'
    framework_config_file = "framework.cfg"
    framework_config_fullpath = pkg_resources.resource_filename(
        pkg, framework_config_file)

    framework_logcfg_file = 'logger.cfg'
    framework_logcfg_fullpath = pkg_resources.resource_filename(
        pkg, framework_logcfg_file)

    # add PIPELINE specific config files
    if args.config_file is None:
        pipeline_config_file = 'pipeline.cfg'
        pipeline_config_fullpath = pkg_resources.resource_filename(
            pkg, pipeline_config_file)
        pipeline_config = ConfigClass(pipeline_config_fullpath,
                                      default_section='DEFAULT')
    else:
        pipeline_config = ConfigClass(args.pipeline_config_file,
                                      default_section='DEFAULT')

    if args.overwrite is True:
        pipeline_config.set('Telescope', 'overwrite', value='True')
    if args.norecord is True:
        pipeline_config.set('Telescope', 'norecord', value='True')

    # END HANDLING OF CONFIGURATION FILES ##########

    try:
        framework = Framework(QuickLookPipeline, framework_config_fullpath)
        logging.config.fileConfig(framework_logcfg_fullpath)
        framework.config.instrument = pipeline_config
    except Exception as e:
        print("Failed to initialize framework, exiting ...", e)
        traceback.print_exc()
        sys.exit(1)

    # this part defines a specific logger for the pipeline, so that we can
    # separate the output of the pipeline from the output of the framework
    framework.context.pipeline_logger = getLogger(framework_logcfg_fullpath,
                                                  name="pipeline")
    framework.logger = getLogger(framework_logcfg_fullpath, name="DRPF")
    framework.logger.info("Framework initialized")

    return framework
Esempio n. 14
0
def clear_queue():
    args = _parseArguments(sys.argv)
    framework_config_fullpath = pkg_resources.resource_filename(
        "vysosdrp", "framework.cfg")
    cfg = ConfigClass(framework_config_fullpath)
    drpif = FrameworkInterface(cfg)
    # Print pending Events
    if drpif.is_queue_ok():
        events = drpif.pending_events()
        print(f'Found {len(events)} in queue')
    else:
        print("Pending events: Queue not available", drpif.queue)

    if drpif.is_queue_ok():
        drpif.stop_event_queue()
        print("Queue manager stopped")
    else:
        print("Queue manager already stopped")
Esempio n. 15
0
    def __init__(self, pipeline, configFile):
        '''
        pipeline: a class containing recipes
        
        Creates the event_queue and the action queue
        '''
        self.config = ConfigClass(configFile)
        self.logger = getLogger(self.config.logger_config_file, name="DRPF")
        pipeline.set_logger(self.logger)

        self.event_queue = Event_queue()
        self.event_queue_hi = Event_queue()

        self.pipeline = pipeline
        self.context = Processing_context(self.event_queue_hi, self.logger,
                                          self.config)
        self.keep_going = True
        self.init_signal()
        self.store_arguments = Arguments()
Esempio n. 16
0
def create_context(event_queue=None,
                   event_queue_hi=None,
                   logger=None,
                   config=None):
    """
    Convenient function to create a context for working withtout the framework.
    Useful in Jupyter notebooks for example.

    """
    if config is None:
        config = ConfigClass()

    if logger is None:
        logger = getLogger(config.logger_config_file, name="DRPF")

    if event_queue_hi is None:
        event_queue_hi = queues.SimpleEventQueue()

    if event_queue is None:
        event_queue = queues.SimpleEventQueue()

    return ProcessingContext(event_queue, event_queue_hi, logger, config)
Esempio n. 17
0
    parser.add_argument("-p",
                        "--port",
                        dest="portnr",
                        type=str,
                        help="Port number")

    try:
        return parser.parse_args(in_args[1:])
    except:
        # parser.print_help()
        sys.exit(0)


if __name__ == "__main__":
    args = _parseArguments(sys.argv)
    cfg = ConfigClass(args.config_file)

    cfg.properties["want_multiprocessing"] = True
    hostname = cfg.queue_manager_hostname if args.hostname is None else args.hostname
    portnr = cfg.queue_manager_portnr if args.portnr is None else args.portnr
    auth_code = cfg.queue_manager_auth_code

    queue = queues.get_event_queue(hostname, portnr, auth_code)
    print(f"Hostname = {hostname}\nPort nr = {portnr}\n")
    if queue is None:
        print("Failed to connect to Queue Manager")
    else:
        try:
            queue.terminate()
        except:
            pass
Esempio n. 18
0
def init_data_set ():
    config = ConfigClass ()
    logger = getLogger(config.logger_config_file, name="DRPF")   
    
    data_set = Data_set ("test_files", logger, config)
    return data_set
Esempio n. 19
0
def main():
    def process_subset(in_subset):
        for in_frame in in_subset.index:
            arguments = Arguments(name=in_frame)
            framework.append_event('next_file', arguments, recurrent=True)

    def process_list(in_list):
        for in_frame in in_list:
            arguments = Arguments(name=in_frame)
            framework.append_event('next_file', arguments, recurrent=True)

    args = _parse_arguments(sys.argv)

    # START HANDLING OF CONFIGURATION FILES ##########
    pkg = 'kcwidrp'

    # check for the logs diretory
    check_directory("logs")
    # check for the plots directory
    check_directory("plots")

    framework_config_file = "configs/framework.cfg"
    framework_config_fullpath = \
        pkg_resources.resource_filename(pkg, framework_config_file)

    framework_logcfg_file = 'configs/logger.cfg'
    framework_logcfg_fullpath = \
        pkg_resources.resource_filename(pkg, framework_logcfg_file)

    # add kcwi specific config files # make changes here to allow this file
    # to be loaded from the command line
    if args.kcwi_config_file is None:
        kcwi_config_file = 'configs/kcwi.cfg'
        kcwi_config_fullpath = pkg_resources.resource_filename(
            pkg, kcwi_config_file)
        kcwi_config = ConfigClass(kcwi_config_fullpath, default_section='KCWI')
    else:
        # kcwi_config_fullpath = os.path.abspath(args.kcwi_config_file)
        kcwi_config = ConfigClass(args.kcwi_config_file,
                                  default_section='KCWI')

    # END HANDLING OF CONFIGURATION FILES ##########

    # Add current working directory to config info
    kcwi_config.cwd = os.getcwd()

    # check for the output directory
    check_directory(kcwi_config.output_directory)

    try:
        framework = Framework(Kcwi_pipeline, framework_config_fullpath)
        # add this line ONLY if you are using a local logging config file
        logging.config.fileConfig(framework_logcfg_fullpath)
        framework.config.instrument = kcwi_config
    except Exception as e:
        print("Failed to initialize framework, exiting ...", e)
        traceback.print_exc()
        sys.exit(1)
    framework.context.pipeline_logger = getLogger(framework_logcfg_fullpath,
                                                  name="KCWI")
    framework.logger = getLogger(framework_logcfg_fullpath, name="DRPF")

    if args.infiles is not None:
        framework.config.file_type = args.infiles

    # check for taperfrac argument
    if args.taperfrac:
        def_tf = getattr(framework.config.instrument, 'TAPERFRAC', None)
        if def_tf is not None:
            framework.context.pipeline_logger.info(
                "Setting new taperfrac = %.3f" % args.taperfrac)
            framework.config.instrument.TAPERFRAC = args.taperfrac

    # check for atlas line list argument
    if args.atlas_line_list:
        def_ll = getattr(framework.config.instrument, 'LINELIST', None)
        if def_ll is not None:
            framework.context.pipeline_logger.info(
                "Using line list %s instead of generated list" %
                args.atlas_line_list)
            framework.config.instrument.LINELIST = args.atlas_line_list

    # start the bokeh server is requested by the configuration parameters
    if framework.config.instrument.enable_bokeh is True:
        if check_running_process(process='bokeh') is False:
            subprocess.Popen('bokeh serve', shell=True)
            # --session-ids=unsigned --session-token-expiration=86400',
            # shell=True)
            time.sleep(5)
        # subprocess.Popen('open http://localhost:5006?bokeh-session-id=kcwi',
        # shell=True)

    # initialize the proctab and read it
    framework.context.proctab = Proctab(framework.logger)
    framework.context.proctab.read_proctab(tfil=args.proctab)

    framework.logger.info("Framework initialized")

    # add a start_bokeh event to the processing queue,
    # if requested by the configuration parameters
    if framework.config.instrument.enable_bokeh:
        framework.append_event('start_bokeh', None)

    # important: to be able to use the grouping mode, we need to reset
    # the default ingestion action to no-event,
    # otherwise the system will automatically trigger the next_file event
    # which initiates the processing
    if args.group_mode is True:
        # set the default ingestion event to None
        framework.config.default_ingestion_event = "add_only"

    # start queue manager only (useful for RPC)
    if args.queue_manager_only:
        # The queue manager runs for ever.
        framework.logger.info("Starting queue manager only, no processing")
        framework.start_queue_manager()

    # in the next two ingest_data command, if we are using standard mode,
    # the first event generated is next_file.
    # if we are in groups mode (aka smart mode), then the first event generated
    # is no_event then, manually, a next_file event is generated for each group
    # specified in the variable imtypes

    # single frame processing
    elif args.frames:
        framework.ingest_data(None, args.frames, False)

    # processing of a list of files contained in a file
    elif args.file_list:
        frames = []
        with open(args.file_list) as file_list:
            for frame in file_list:
                if "#" not in frame:
                    frames.append(frame.strip('\n'))
        framework.ingest_data(None, frames, False)

    # ingest an entire directory, trigger "next_file" (which is an option
    # specified in the config file) on each file,
    # optionally continue to monitor if -m is specified
    elif args.dirname is not None:

        framework.ingest_data(args.dirname, None, args.monitor)

    # implement the group mode
    if args.group_mode is True:
        data_set = framework.context.data_set

        # remove focus images and ccd clearing images from the dataset
        focus_frames = data_set.data_table[data_set.data_table.OBJECT ==
                                           "focus"].index
        ccdclear_frames = data_set.data_table[data_set.data_table.OBJECT ==
                                              "Clearing ccd"].index
        data_set.data_table.drop(focus_frames, inplace=True)
        data_set.data_table.drop(ccdclear_frames, inplace=True)

        # processing
        imtypes = [
            'BIAS', 'CONTBARS', 'ARCLAMP', 'FLATLAMP', 'DOMEFLAT', 'TWIFLAT',
            'OBJECT'
        ]

        for imtype in imtypes:
            subset = data_set.data_table[
                framework.context.data_set.data_table.IMTYPE == imtype]
            if 'OBJECT' in imtype:  # Ensure that standards are processed first
                object_order = []
                standard_order = []
                for frame in subset.index:
                    if is_file_kcwi_std(frame,
                                        logger=framework.context.logger):
                        standard_order.append(frame)
                    else:
                        object_order.append(frame)
                order = standard_order + object_order  # Standards first
                process_list(order)
            else:
                process_subset(subset)

    framework.start(args.queue_manager_only, args.ingest_data_only,
                    args.wait_for_event, args.continuous)
Esempio n. 20
0
def test_parse_kcwi_config():

    kcwi_config_file = 'configs/kcwi.cfg'
    kcwi_config_fullpath = pkg_resources.resource_filename(
        pkg, kcwi_config_file)
    kcwi_config = ConfigClass(kcwi_config_fullpath, default_section='KCWI')
Esempio n. 21
0
def test_parse_framework_config():

    framework_config_file = 'configs/framework.cfg'
    framework_config_fullpath = pkg_resources.resource_filename(
        pkg, framework_config_file)
    framework_config = ConfigClass(framework_config_fullpath)
def main():
    args = _parseArguments(sys.argv)

    # load the framework config file from the config directory of this package
    # this part uses the pkg_resources package to find the full path location
    # of framework.cfg
    framework_config_file = "configs/config.cfg"

    # load the logger config file from the config directory of this package
    # this part uses the pkg_resources package to find the full path location
    # of logger.cfg
    framework_logcfg_file = 'configs/logger.cfg'

    # add PIPELINE specific config files
    # this part uses the pkg_resource package to find the full path location
    # of template.cfg or uses the one defines in the command line with the option -c
    if args.config_file is None:
        pipeline_config_file = 'configs/fits2png.cfg'
        pipeline_config = ConfigClass(pipeline_config_file)
    else:
        pipeline_config = ConfigClass(args.pipeline_config_file)

    # END HANDLING OF CONFIGURATION FILES ##########

    try:
        framework = Framework(Fits2pngPipeline, framework_config_file)
        logging.config.fileConfig(framework_logcfg_file)
        framework.config.instrument = pipeline_config
    except Exception as e:
        print("Failed to initialize framework, exiting ...", e)
        traceback.print_exc()
        sys.exit(1)

    # this part defines a specific logger for the pipeline, so that
    # we can separate the output of the pipeline
    # from the output of the framework
    framework.logger = getLogger(framework_logcfg_file, name="DRPF")

    framework.logger.info("Framework initialized")

    # start queue manager only (useful for RPC)
    if args.queue_manager_only:
        # The queue manager runs for ever.
        framework.logger.info("Starting queue manager only, no processing")
        framework.start_queue_manager()

    # frames processing
    elif args.frames:
        for frame in args.frames:
            # ingesting and triggering the default ingestion event specified in the configuration file
            framework.ingest_data(None, args.frames, False)
            # manually triggering an event upon ingestion, if desired.
            # arguments = Arguments(name=frame)
            # framework.append_event('template', arguments)

    # ingest an entire directory, trigger "next_file" on each file, optionally continue to monitor if -m is specified
    elif (len(args.infiles) > 0) or args.dirname is not None:
        framework.ingest_data(args.dirname, args.infiles, args.monitor)

    nfiles = framework.context.data_set.get_size()
    cfg = framework.config
    cargs = Arguments(cnt=nfiles,
                      out_name="test.html",
                      pattern="*.png",
                      dir_name=cfg.output_directory)
    framework.append_event("contact_sheet", cargs)

    framework.start(args.queue_manager_only, args.ingest_data_only,
                    args.wait_for_event, args.continuous)
def init_data_set():
    config = ConfigClass()
    logger = getLogger(config.logger_config_file, name="DRPF")

    data_set = DataSet("test_files", logger, config, SimpleEventQueue())
    return data_set
Esempio n. 24
0
def main():
    def process(subset):
        for frame in subset.index:
            arguments = Arguments(name=frame)
            framework.append_event('next_file', arguments)

    args = _parse_arguments(sys.argv)

    # START HANDLING OF CONFIGURATION FILES ##########
    pkg = 'kcwidrp'

    # check for the logs diretory
    check_directory("logs")
    # check for the plots directory
    check_directory("plots")

    framework_config_file = "configs/framework.cfg"
    framework_config_fullpath = pkg_resources.resource_filename(
        pkg, framework_config_file)

    framework_logcfg_file = 'configs/logger.cfg'
    framework_logcfg_fullpath = pkg_resources.resource_filename(
        pkg, framework_logcfg_file)

    # add kcwi specific config files # make changes here to allow this file
    # to be loaded from the command line
    if args.kcwi_config_file is None:
        kcwi_config_file = 'configs/kcwi.cfg'
        kcwi_config_fullpath = pkg_resources.resource_filename(
            pkg, kcwi_config_file)
        kcwi_config = ConfigClass(kcwi_config_fullpath, default_section='KCWI')
    else:
        kcwi_config_fullpath = os.path.abspath(args.kcwi_config_file)
        kcwi_config = ConfigClass(args.kcwi_config_file,
                                  default_section='KCWI')

    # END HANDLING OF CONFIGURATION FILES ##########

    try:
        framework = Framework(Kcwi_pipeline, framework_config_fullpath)
        # add this line ONLY if you are using a local logging config file
        logging.config.fileConfig(framework_logcfg_fullpath)
        framework.config.instrument = kcwi_config
    except Exception as e:
        print("Failed to initialize framework, exiting ...", e)
        traceback.print_exc()
        sys.exit(1)
    framework.context.pipeline_logger = getLogger(framework_logcfg_fullpath,
                                                  name="KCWI")

    # check for the REDUX directory
    check_directory(framework.config.instrument.output_directory)

    # start the bokeh server is requested by the configuration parameters
    if framework.config.instrument.enable_bokeh is True:
        if check_bokeh_server() is False:
            subprocess.Popen(
                'bokeh serve --session-ids=unsigned --session-token-expiration=86400',
                shell=True)
            time.sleep(5)
        subprocess.Popen('open http://localhost:5006?bokeh-session-id=kcwi',
                         shell=True)

    # initialize the proctab and read it
    framework.context.proctab = Proctab(framework.logger)
    framework.context.proctab.read_proctab(tfil=args.proctab)

    framework.logger.info("Framework initialized")

    # add a start_bokeh event to the processing queue, if requested by the configuration parameters
    if framework.config.instrument.enable_bokeh:
        framework.append_event('start_bokeh', None)

    # set the default ingestion event to None
    framework.config.default_ingestion_event = "no_event"

    # single frame processing
    if args.frames:
        framework.ingest_data(None, args.frames, False)

    # processing of a list of files contained in a file
    elif args.file_list:
        frames = []
        with open(args.file_list) as file_list:
            for frame in file_list:
                if "#" not in frame:
                    frames.append(frame.strip('\n'))
        framework.ingest_data(None, frames, False)

    data_set = framework.context.data_set

    # remove focus images and ccd clearing images from the dataset
    focus_frames = data_set.data_table[data_set.data_table.OBJECT ==
                                       "focus"].index
    ccdclear_frames = data_set.data_table[data_set.data_table.OBJECT ==
                                          "Clearing ccd"].index
    data_set.data_table.drop(focus_frames, inplace=True)
    data_set.data_table.drop(ccdclear_frames, inplace=True)

    # processing
    imtypes = ['BIAS', 'CONTBARS', 'ARCLAMP', 'FLATLAMP', 'OBJECT']

    for imtype in imtypes:
        subset = data_set.data_table[
            framework.context.data_set.data_table.IMTYPE == imtype]
        process(subset)

    framework.start(False, False, True, True)
 def __init__(self, cfg=None):
     self.config = cfg if cfg is not None else ConfigClass()
     self.queue = None
     self._get_queue()
Esempio n. 26
0
from keckdrpframework.pipelines.kcwi_pipeline import Kcwi_pipeline
from keckdrpframework.models.arguments import Arguments
import subprocess
import time

if __name__ == '__main__':

    if len(sys.argv) >= 2:
        path = sys.argv[1]

        subprocess.Popen('bokeh serve', shell=True)
        time.sleep(2)

        pipeline = Kcwi_pipeline()
        framework = Framework(pipeline, 'KCWI_config.cfg')
        framework.config.instrument = ConfigClass("instr.cfg")
        framework.logger.info("Framework initialized")

        framework.logger.info("Checking path for files")

        if os.path.isdir(path):
            #flist = glob.glob(path + '/*.fits')
            flist = [
                '/Users/lrizzi/KCWI_DATA_1/kb181012_00014.fits',
                '/Users/lrizzi/KCWI_DATA_1/kb181012_00016.fits'
            ]
            #flist = ['/Users/lrizzi/KCWI_DATA_1/kb181012_00016.fits']
            for f in flist:
                args = Arguments(name=f)
                framework.append_event('next_file', args)
Esempio n. 27
0
def test_config2():
    """
    Reads config with custom default section
    """
    cfg = ConfigClass("my_config.cfg", default_section="CUSTOM")
    assert cfg.test_property == "test", f"Unexpected config property value, got {cfg.test_property}"
Esempio n. 28
0
def test_config1():
    """
    Reads simple config file in examples_config.cfg
    """
    cfg = ConfigClass("examples_config.cfg")
    assert cfg.file_type == "*.fits", f"Unexpected value for file_type ({cfg.file_type}), expected '*.fits'"