def _perform(self):
        args = self.action.args
        img = args.img
        h, w = img.shape

        out_args = Arguments()
        out_args.name = args.name
        out_args.img = self._applyAHEC(img).reshape((h, w)).astype(dtype="uint8")
        return out_args
def test_recurrent_1 (init_framework):
    f = init_framework

    max1 = 4
    max2 = 7
    f.append_event("test_event1", Arguments(name="dummy1", maxVal=max1), recurrent=True)
    f.append_event("test_event2", Arguments(name="dummy2", maxVal=max2), recurrent=True)

    f.main_loop()
    
    assert f.context.counter1 == max1, f"Counter1 mismatch, expectd {max1}, got {f.context.counter1}"
    assert f.context.counter2 == max2, f"Counter1 mismatch, expectd {max2}, got {f.context.counter2}"
    def _perform(self):
        '''
        Expects action.args.name as fits file name
        Returns HDUs or (later) data model
        '''
        name = self.action.args.name
        self.logger.info(f"Reading {name}")
        out_args = Arguments()
        out_args.name = name
        out_args.hdus = open_nowarning(name)

        return out_args
    def _perform(self):
        """
        Expects action.args.name as fits file name
        Returns HDUs or (later) data model
        """
        name = self.action.args.name
        self.logger.debug(f"Reading {name}")
        out_args = Arguments()
        out_args.name = name
        out_args.img = self.readData(name)

        return out_args
def test_append_new_event(init_context):
    """
    Append new event 
    """
    pc = init_context
    pc.append_event("test", Arguments("test", a=1))
    pc.append_event("test1", Arguments("test1", a=2))
    pc.append_event("test2", Arguments("test2", a=3))

    e1 = pc.event_queue.get()
    e2 = pc.event_queue.get()
    e3 = pc.event_queue.get()
    assert e1.args.a == 1 and e2.args.a == 2 and e3.args.a == 3, "Unexpected event arguments"
    def _perform(self):
        """
        Arguments:

        dir_name: name of the directory containing the images
        pattern: pattern to select input files, ie. "*.png"
        out_name: output file name, ie. some_name.html

        """
        args = self.action.args
        dir_name = args.dir_name
        out_name = args.out_name
        pattern = args.pattern
        self.logger.debug(
            f"Creating contact sheet in {dir_name}, out_name={out_name}")

        flist = sorted(glob.glob(dir_name + "/" + pattern))

        out = []
        for f in flist:
            out.append(self._genEntry(f))

        os.makedirs(dir_name, exist_ok=True)
        with open(dir_name + "/" + out_name, "w") as fh:
            print("<html><body>", file=fh)
            print("\n".join(out), file=fh)
            print("</body></html>", file=fh)

        return Arguments()
Example #7
0
    def append_item(self, filename):
        """
        Appends item, if not already exists.
        """
        if filename is None:
            self.logger.warning(f"filename is defined")
            return

        if not os.path.isfile(filename):
            self.logger.warning(f"{filename} is not a file")
            return

        if filename in self.data_table.index:
            self.logger.warning(f"{filename} is already in the table")
            return

        row = self.digest_new_item(filename)
        if not row is None:
            short = os.path.basename(filename)
            self.logger.debug(f"Appending {short} to the data set")
            self.data_table = self.data_table.append(row)
            try:
                self.event_queue.put(
                    Event(self.config.default_ingestion_event,
                          Arguments(name=filename)))
            except:
                self.logger.warning(
                    "There is no default ingestion event in the configuration file"
                )
Example #8
0
def process_flat(action, context):
    args = simple_fits_reader(action, context)()
    img = args.hdus[0].data
    name = args.name
    minV, maxV, std = img.min(), img.max(), img.std()
    context.logger.info(f"{name}, min={minV}, max={maxV}, std={std}")
    return Arguments(name="OK")
Example #9
0
 def get_event(self):
     """
     Retrieves and returns an event from the queues.
     First it checks the high priority queue, if fails then checks the regular event queue.
     
     If there are no more events, then it returns the no_event_event, which is defined in the configuration.
     """
     try:
         try:
             return self.event_queue_hi.get_nowait()
         except:
             ev = self.event_queue.get(True, self.config.event_timeout)
             self.wait_for_event = False
             return ev
     except Exception as e:
         if self.wait_for_event:
             ev = Event("no_event", None)
         else:
             ev = self.config.no_event_event
         if ev is None:
             return None
         time.sleep(self.config.no_event_wait_time)
         ev.args = Arguments(name=ev.name,
                             time=datetime.datetime.ctime(
                                 datetime.datetime.now()))
         return ev
Example #10
0
    def __init__(self, pipeline_name, configFile):
        """
        pipeline_name: name of the pipeline class containing recipes
        
        Creates the event_queue and the action queue
        """

        self.config = ConfigClass(configFile)
        self.logger = getLogger(self.config.logger_config_file, name="DRPF")

        self.wait_for_event = False
        # The high priority event queue is local to the process
        self.event_queue_hi = queues.Simple_event_queue()

        # The regular event queue can be local or shared via proxy manager
        self.queue_manager = None
        self.event_queue = self._get_event_queue()

        pipeline = find_pipeline(pipeline_name, self.config.pipeline_path,
                                 self.logger)
        if pipeline is None:
            raise Exception("Failed to initialize pipeline")

        pipeline.set_logger(self.logger)

        self.pipeline = pipeline
        self.context = Processing_context(self.event_queue,
                                          self.event_queue_hi, self.logger,
                                          self.config)
        self.keep_going = True
        self.init_signal()
        self.store_arguments = Arguments()
 def add_create_contact_sheet_event(self, req, qstr):
     self._getParameters(qstr)
     out_dir = self.DRPFramework.config.output_directory
     args = Arguments(dir_name=out_dir,
                      pattern="*.png",
                      out_name="contact_sheet.html",
                      cnt=-1)
     self.DRPFramework.append_event("contact_sheet", args)
     return json.dumps("OK"), self.jsonText
def test_simple_queue():
    eq = SimpleEventQueue()
    for i in range(5):
        eq.put(Event("test event", Arguments(name="test argument", i=i)))
    assert eq.qsize() == 5, "Size mismatch"

    e1 = eq.get()
    e2 = eq.get()
    assert e2.args.i == 1, "Wrong event argument"
Example #13
0
def test_simple_queue():
    eq = Simple_event_queue()
    for i in range(5):
        eq.put(Arguments(name='test', i=i))
    assert eq.qsize() == 5, "Size mismatch"

    a1 = eq.get()
    a2 = eq.get()
    assert a2.i == 1, "Wrong element queue"
def process_flat(action, context):
    """
    Example of a function as primitive
    """
    args = simple_fits_reader(action, context)()
    img = args.hdus[0].data
    name = args.name
    minV, maxV, std = img.min(), img.max(), img.std()
    context.logger.debug(f"{name}, min={minV}, max={maxV}, std={std}")
    return Arguments(name="OK")
Example #15
0
    def _perform(self):
        """
        Expects name, extension, associate_event as arguments.
        
        Adds associate event for each file in directory name with given extension.
        """
        path = self.name
        ext = self.extension
        event = self.associate_event
        queue = self.context.event_queue
        if os.path.isdir(path):
            flist = glob.glob(path + "/*." + ext)
            for f in flist:
                args = Arguments(name=f)
                queue.put(event, args)
        else:
            args = Arguments(name=path)
            queue.put(event, args)

        return Argument()
    def _perform(self):
        sigmas, sizes = self.sigmas, self.sizes
        args = self.action.args
        self.logger.debug(f"noise removal sigmas={sigmas}, sizes={sizes}")
        img = args.img

        for a, b in zip(sizes, sigmas):
            img = self._denoise(img)

        out_args = Arguments(name=args.name, img=img)
        return out_args
    def _perform(self):
        """
        Returns an Argument() with the parameters that depends on this operation.
        """
        args = self.action.args

        df = self.context.data_set.data_table
        files = df[df.IMTYPE == args.want_type]
        stacked = self.average(list(files.index))
        self.save_fits_like(args.new_file_name, stacked, files.index[0],
                            args.new_type)
        return Arguments(name=args.new_file_name)
Example #18
0
    def _perform(self):
        sigmas, sizes = self.sigmas, self.sizes
        args = self.action.args
        self.logger.info(f"noise removal sigmas={sigmas}, sizes={sizes}")
        hdus = args.hdus

        img = hdus[0].data
        for a, b in zip(sizes, sigmas):
            img = self._denoise(img)

        out_args = Arguments(name=args.name, img=img)
        return out_args
Example #19
0
    def execute(self, action, context):
        """
        Executes one action
        The input for the action is in action.args.
        The action returns action_output and it is passed to the next event if action is successful.
        """
        pipeline = self.pipeline
        action_name = action.name
        try:
            # Pre condition
            if pipeline.get_pre_action(action_name)(action, context):
                if self.config.print_trace:
                    self.logger.info("Executing action " + action.name)

                # Run action
                action_output = pipeline.get_action(action_name)(action,
                                                                 context)
                if action_output is not None:
                    self.store_arguments = action_output

                # Post condition
                if pipeline.get_post_action(action_name)(action, context):
                    if not action.new_event is None:
                        # Post new event
                        new_args = Arguments(
                        ) if action_output is None else action_output
                        self._push_event(action.new_event, new_args)

                    if not action.next_state is None:
                        # New state
                        context.state = action.next_state

                    if self.config.print_trace:
                        self.logger.info("Action " + action.name + " done")
                    return
                else:
                    # Post-condition failed
                    context.state = "stop"
            else:
                # Failed pre-condition
                if self.config.pre_condition_failed_stop:
                    context.state = "stop"
        except:
            self.logger.error(
                "Exception while invoking {}. Execution stopped.".format(
                    action_name))
            context.state = "stop"
            if self.config.print_trace:
                traceback.print_exc()
Example #20
0
    def __init__(self, pipeline_name, configFile, testing=False):
        """
        pipeline_name: name of the pipeline class containing recipes

        Creates the event_queue and the action queue
        """
        if configFile is None:
            self.config = ConfigClass()
        elif isinstance(configFile, str):
            self.config = ConfigClass(configFile)
        else:
            self.config = configFile

        self.testing = testing

        self.logger = getLogger(self.config.logger_config_file, name="DRPF")
        self.logger.info("")
        self.logger.info("Initialization Framework cwd={}".format(os.getcwd()))

        self.wait_for_event = False
        # The high priority event queue is local to the process
        self.event_queue_hi = queues.SimpleEventQueue()

        # The regular event queue can be local or shared via queue manager
        self.queue_manager = None
        self.event_queue = self._get_event_queue()

        # The done_queue
        self.done_queue = None

        # The handlers
        self.on_exit = self.default_on_exit
        self.on_state = self.default_on_state
        self.on_error = self.default_on_error

        self.context = ProcessingContext(self.event_queue, self.event_queue_hi,
                                         self.logger, self.config)

        pipeline = find_pipeline(pipeline_name, self.config.pipeline_path,
                                 self.context, self.logger)

        if pipeline is None:
            raise Exception(f"Failed to initialize pipeline {pipeline_name}")

        self.pipeline = pipeline

        self.keep_going = True
        self.init_signal()
        self.store_arguments = Arguments()
    def _perform(self):
        os.makedirs(self.output_dir, exist_ok=True)
        args = self.action.args
        name = os.path.basename(args.name)

        out_name = self.output_dir + "/" + name.replace(
            ".fits", self.extension)
        img = args.img
        h, w = img.shape
        img1 = np.stack((img, ) * 3, axis=-1)

        plt.imsave(out_name, img1, format=self.output_format)

        self.logger.debug("Saved {}".format(out_name))
        out_args = Arguments(name=out_name)
        return out_args
Example #22
0
 def get_event(self):
     try:
         try:
             return self.event_queue_hi.get_nowait()
         except:
             return self.event_queue.get(True, self.config.event_timeout)
     except Exception as e:
         ev = self.config.no_event_event
         if ev is None:
             return None
         time.sleep(self.config.no_event_wait_time)
         ev.args = Arguments(name=ev.name,
                             time=datetime.datetime.ctime(
                                 datetime.datetime.now()))
         ev.args = None
         return ev
Example #23
0
    def _perform(self):
        output_dir = self.config.output_directory
        os.makedirs(output_dir, exist_ok=True)
        args = self.action.args
        name = os.path.basename(args.name)

        os.makedirs(output_dir, exist_ok=True)
        out_name = output_dir + "/" + name.replace(".fits", ".png")
        img = args.img
        h, w = img.shape
        img1 = np.stack((img, ) * 3, axis=-1)

        plt.imsave(out_name, img1)

        self.logger.info("Saved {}".format(out_name))
        out_args = Arguments(name=out_name)
        return out_args
Example #24
0
    def ingest_data(self, path=None, files=None):
        """
        Adds files to the data_set.
        The data_set resides in the framework context.
        """
        ds = self.context.data_set
        if ds is None:
            # Data_set will scan and import the content of the directory
            ds = Data_set(path, self.logger, self.config)

        if files is not None:
            for f in files:
                ds.append_item(f)

        for ditem in ds.data_table.index:
            self.event_queue.put(Event("next_file", Arguments(name=ditem)))

        self.context.data_set = ds
Example #25
0
    def __init__(self, pipeline, configFile):
        '''
        pipeline: a class containing recipes
        
        Creates the event_queue and the action queue
        '''
        self.config = ConfigClass(configFile)
        self.logger = getLogger(self.config.logger_config_file, name="DRPF")
        pipeline.set_logger(self.logger)

        self.event_queue = Event_queue()
        self.event_queue_hi = Event_queue()

        self.pipeline = pipeline
        self.context = Processing_context(self.event_queue_hi, self.logger,
                                          self.config)
        self.keep_going = True
        self.init_signal()
        self.store_arguments = Arguments()
Example #26
0
 def _perform (self):
     """
     """
     args = self.action.args
     dir_name = args.dir_name
     out_name = args.out_name
     pattern = args.pattern
     self.logger.info (f"Creating contact sheet in {dir_name}, out_name={out_name}")
     
     flist = sorted (glob.glob(dir_name + "/" + pattern))
     
     out = []
     for f in flist:
         out.append (self._genEntry (f))
     
     with open (dir_name + "/" + out_name, "w") as fh:
         print ("<html><body>", file=fh)
         print ("\n".join (out), file=fh)
         print ("</body></html>", file=fh)
     
     return Arguments()
Example #27
0
 def execute(self, action, context):
     '''
     Executes one action
     The input for the action is in action.args.
     The action returns action_output and it is passed to the next event if action is successful.
     '''
     pipeline = self.pipeline
     action_name = action.name
     try:
         if pipeline.get_pre_action(action_name)(action, context):
             if self.config.print_trace:
                 self.logger.info('Executing action ' + action.name)
             action_output = pipeline.get_action(action_name)(action,
                                                              context)
             if action_output is not None:
                 self.store_arguments = action_output
             if pipeline.get_post_action(action_name)(action, context):
                 if not action.new_event is None:
                     new_args = Arguments(
                     ) if action_output is None else action_output
                     self._push_event(action.new_event, new_args)
                 if not action.next_state is None:
                     context.state = action.next_state
                 if self.config.print_trace:
                     self.logger.info('Action ' + action.name + ' done')
                 return
             else:
                 # post-condition failed
                 context.state = 'stop'
         else:
             # Failed pre-condition
             if self.config.pre_condition_failed_stop:
                 context.state = 'stop'
     except:
         self.logger.error(
             "Exception while invoking {}. Execution stopped.".format(
                 action_name))
         context.state = 'stop'
         if self.config.print_trace:
             traceback.print_exc()
def test_run_example_3(init_framework):
    """
    Adds events directly to the queue and runs the loop.
    There should be 6 output files.
    """
    f = init_framework

    flist = glob.glob("test_files/*.fits")

    for fn in flist:
        f.append_event("next_file", Arguments(name=fn))

    f.main_loop()

    flist = glob.glob("output/*.jpg")
    assert len(
        flist
    ) == 6, f"Unexpected number of files, expected {6}, got {len(flist)}"

    in_progress = f.event_queue.get_in_progress()
    print(len(in_progress))
    assert len(
        in_progress) == 0, f"Unexpected events in progress, should be none"
    try:
        framework = Framework(pipeline_name, config)
    except Exception as e:
        print("Test harness failed to initialize framework, exiting ...", e)
        traceback.print_exc()
        sys.exit(1)

    framework.logger.info("Framework initialized")

    if args.queue_manager_only:
        # The queue manager runs for ever.
        framework.logger.info("Starting queue manager only, no processing")
        framework.start_queue_manager()
    else:
        framework.logger.debug(
            f"infiles {args.infiles}, dirname {args.dirname}")
        if (len(args.infiles) > 0) or args.dirname is not None:
            # Ingest data and terminate
            framework.ingest_data(args.dirname, args.infiles)
            nfiles = framework.context.data_set.get_size()
            cfg = framework.config
            cargs = Arguments(cnt=nfiles,
                              out_name="test.html",
                              pattern="*.png",
                              dir_name=cfg.output_directory)
            framework.append_event("contact_sheet", cargs)

        framework.start(args.queue_manager_only, args.ingest_data_only,
                        args.wait_for_event, args.continuous)
    def _perform(self):
        """
        Returns an Argument() with the parameters that depends on this operation
        """
        method = 'average'
        suffix = self.action.args.new_type.lower()

        combine_list = list(self.combine_list['filename'])
        # get master bias output name
        # mbname = combine_list[-1].split('.fits')[0] + '_' + suffix + '.fits'
        mbname = master_bias_name(self.action.args.ccddata)

        stack = []
        stackf = []
        for bias in combine_list:
            stackf.append(bias)
            # using [0] drops the table
            stack.append(kcwi_fits_reader(bias)[0])

        stacked = ccdproc.combine(stack,
                                  method=method,
                                  sigma_clip=True,
                                  sigma_clip_low_thresh=None,
                                  sigma_clip_high_thresh=2.0)
        stacked.header['IMTYPE'] = self.action.args.new_type
        stacked.header['NSTACK'] = (len(combine_list),
                                    'number of images stacked')
        stacked.header['STCKMETH'] = (method, 'method used for stacking')
        for ii, fname in enumerate(stackf):
            fname_base = os.path.basename(fname)
            stacked.header['STACKF%d' % (ii + 1)] = (fname_base,
                                                     "stack input file")

        # for readnoise stats use 2nd and 3rd bias
        diff = stack[1].data.astype(np.float32) - \
            stack[2].data.astype(np.float32)
        namps = stack[1].header['NVIDINP']
        for ia in range(namps):
            # get gain
            gain = stacked.header['GAIN%d' % (ia + 1)]
            # get amp section
            sec, rfor = parse_imsec(stacked.header['DSEC%d' % (ia + 1)])
            noise = diff[sec[0]:(sec[1] + 1), sec[2]:(sec[3] + 1)]
            noise = np.reshape(noise, noise.shape[0]*noise.shape[1]) * \
                gain / 1.414
            # get stats on noise
            c, low, upp = sigmaclip(noise, low=3.5, high=3.5)
            bias_rn = c.std()
            self.logger.info("Amp%d read noise from bias in e-: %.3f" %
                             ((ia + 1), bias_rn))
            stacked.header['BIASRN%d' % (ia + 1)] = \
                (float("%.3f" % bias_rn), "RN in e- from bias")
            if self.config.instrument.plot_level >= 1:
                # output filename stub
                biasfnam = "bias_%05d_amp%d_rdnoise" % \
                          (self.action.args.ccddata.header['FRAMENO'], ia+1)
                plabel = '[ Img # %d' % self.action.args.ccddata.header[
                    'FRAMENO']
                plabel += ' (Bias)'
                plabel += ' %s' % self.action.args.ccddata.header['BINNING']
                plabel += ' %s' % self.action.args.ccddata.header['AMPMODE']
                plabel += ' %d' % self.action.args.ccddata.header['GAINMUL']
                plabel += ' %s' % ('fast' if self.action.args.ccddata.
                                   header['CCDMODE'] else 'slow')
                plabel += ' ] '
                hist, edges = np.histogram(noise,
                                           range=(low, upp),
                                           density=False,
                                           bins=50)
                x = np.linspace(low, upp, 500)
                pdf = np.max(hist) * np.exp(-x**2 / (2. * bias_rn**2))
                p = figure(title=plabel + 'BIAS NOISE amp %d = %.3f' %
                           (ia + 1, bias_rn),
                           x_axis_label='e-',
                           y_axis_label='N',
                           plot_width=self.config.instrument.plot_width,
                           plot_height=self.config.instrument.plot_height)
                p.quad(top=hist,
                       bottom=0,
                       left=edges[:-1],
                       right=edges[1:],
                       fill_color="navy",
                       line_color="white",
                       alpha=0.5)
                p.line(x,
                       pdf,
                       line_color="#ff8888",
                       line_width=4,
                       alpha=0.7,
                       legend_label="PDF")
                p.line([-bias_rn, -bias_rn], [0, np.max(hist)],
                       color='red',
                       legend_label="Sigma")
                p.line([bias_rn, bias_rn], [0, np.max(hist)], color='red')
                p.y_range.start = 0
                bokeh_plot(p, self.context.bokeh_session)
                if self.config.instrument.plot_level >= 2:
                    input("Next? <cr>: ")
                else:
                    time.sleep(self.config.instrument.plot_pause)
                save_plot(p, filename=biasfnam + ".png")

        log_string = MakeMasterBias.__module__
        stacked.header['HISTORY'] = log_string
        self.logger.info(log_string)

        kcwi_fits_writer(stacked,
                         output_file=mbname,
                         output_dir=self.config.instrument.output_directory)
        self.context.proctab.update_proctab(frame=stacked,
                                            suffix=suffix,
                                            newtype=self.action.args.new_type,
                                            filename=self.action.args.name)
        self.context.proctab.write_proctab()
        return Arguments(name=mbname)