class DIAClient:
    def __init__(self, Id, instrument=None, api_address=None, jf_name=None):
        self.Id = Id
        self._api_address = api_address
        self.client = DetectorIntegrationClient(api_address)
        print("\nDetector Integration API on %s" % api_address)
        # No pgroup by default
        self.pgroup = 0
        self.n_frames = 100
        self.jf_name = jf_name
        self.pede_file = ""
        self.gain_file = ""
        self.instrument = instrument
        if instrument is None:
            print(
                "ERROR: please configure the instrument parameter in DIAClient"
            )
        self.update_config()

    def update_config(self, ):
        self.writer_config = {
            "output_file":
            "/sf/%s/data/p%d/raw/test_data.h5" %
            (self.instrument, self.pgroup),
            "user_id":
            self.pgroup,
            "n_frames":
            self.n_frames,
            "general/user":
            str(self.pgroup),
            "general/process":
            __name__,
            "general/created":
            str(datetime.now()),
            "general/instrument":
            self.instrument,
            # "general/correction": "test"
        }

        self.backend_config = {
            "n_frames": self.n_frames,
            "bit_depth": 16,
            "gain_corrections_filename": self.
            gain_file,  # "/sf/alvra/config/jungfrau/jungfrau_4p5_gaincorrections_v0.h5",
            # "gain_corrections_dataset": "gains",
            # "pede_corrections_filename": "/sf/alvra/data/res/p%d/pedestal_20171210_1628_res.h5" % self.pgroup,
            # "pede_corrections_dataset": "gains",
            # "pede_mask_dataset": "pixel_mask",
            # "activate_corrections_preview": True,
            # FIXME: HARDCODED!!!
            "is_HG0": False,
        }

        if self.pede_file != "":
            self.backend_config["gain_corrections_filename"] = (
                self.gain_file
            )  # "/sf/alvra/config/jungfrau/jungfrau_4p5_gaincorrections_v0.h5",
            self.backend_config["gain_corrections_dataset"] = "gains"
            self.backend_config["pede_corrections_filename"] = (
                self.pede_file
            )  # "/sf/alvra/data/res/p%d/pedestal_20171210_1628_res.h5" % self.pgroup,
            self.backend_config["pede_corrections_dataset"] = "gains"
            self.backend_config["pede_mask_dataset"] = "pixel_mask"
            self.backend_config["activate_corrections_preview"] = True
        else:
            self.backend_config["pede_corrections_dataset"] = "gains"
            self.backend_config["pede_mask_dataset"] = "pixel_mask"
            self.backend_config["gain_corrections_filename"] = ""
            self.backend_config["pede_corrections_filename"] = ""
            self.backend_config["activate_corrections_preview"] = False

        self.detector_config = {
            "timing": "trigger",
            # FIXME: HARDCODED
            "exptime": 0.000005,
            "cycles": self.n_frames,
            # "delay"  : 0.001992,
            "frames": 1,
            "dr": 16,
        }

        # Not needed anymore?
        # default_channels_list = parseChannelListFile(
        #    '/sf/alvra/config/com/channel_lists/default_channel_list')

        self.bsread_config = {
            "output_file":
            "/sf/%s/data/p%d/raw/test_bsread.h5" %
            (self.instrument, self.pgroup),
            "user_id":
            self.pgroup,
            "general/user":
            str(self.pgroup),
            "general/process":
            __name__,
            "general/created":
            str(datetime.now()),
            "general/instrument":
            self.instrument,
            # 'Npulses':100,
            # 'channels': default_channels_list
        }

    #        self.default_channels_list = jungfrau_utils.load_default_channel_list()

    def reset(self):
        self.client.reset()
        # pass

    def get_status(self):
        return self.client.get_status()

    def get_config(self):
        config = self.client.get_config()
        return config

    def set_pgroup(self, pgroup):
        self.pgroup = pgroup
        self.update_config()

    def set_bs_channels(self, ):
        print(
            "Please update /sf/%s/config/com/channel_lists/default_channel_list and restart all services on the DAQ server"
            % self.instrument)

    def set_config(self):
        self.reset()
        self.client.set_config({
            "writer": self.writer_config,
            "backend": self.backend_config,
            "detector": self.detector_config,
            "bsread": self.bsread_config,
        })

    def check_still_running(self, time_interval=0.5):
        cfg = self.get_config()
        running = True
        while running:
            if not self.get_status()["status"][-7:] == "RUNNING":
                running = False
                break
            #            elif not self.get_status()['status'][-20:]=='BSREAD_STILL_RUNNING':
            #                running = False
            #                break
            else:
                sleep(time_interval)

    def take_pedestal(self,
                      n_frames,
                      analyze=True,
                      n_bad_modules=0,
                      update_config=True):
        from jungfrau_utils.scripts.jungfrau_run_pedestals import (
            run as jungfrau_utils_run, )

        directory = "/sf/%s/data/p%d/raw" % (self.instrument, self.pgroup)
        if not os.path.exists(directory):
            print("Directory %s not existing, creating it" % directory)
            os.makedirs(directory)

        res_dir = directory.replace("/raw/", "/res/")
        if not os.path.exists(res_dir):
            print("Directory %s not existing, creating it" % res_dir)
            os.makedirs(res_dir)
        filename = "pedestal_%s.h5" % datetime.now().strftime("%Y%m%d_%H%M")
        period = 0.02  # for 25 Hz this is 0.04, for 10 Hz this 0.1
        jungfrau_utils_run(
            self._api_address,
            filename,
            directory,
            self.pgroup,
            period,
            self.detector_config["exptime"],
            n_frames,
            1,
            analyze,
            n_bad_modules,
            self.instrument,
            self.jf_name,
        )

        if update_config:
            self.pede_file = ((directory + filename).replace(
                "raw/", "res/").replace(".h5", "_res.h5"))
            print("Pedestal file updated to %s" % self.pede_file)
        return self.pede_file

    def start(self):
        self.client.start()
        print("start acquisition")
        pass

    def stop(self):
        self.client.stop()
        print("stop acquisition")
        pass

    def config_and_start_test(self):
        self.reset()
        self.set_config()
        self.start()
        pass

    def wait_for_status(self, *args, **kwargs):
        return self.client.wait_for_status(*args, **kwargs)

    def acquire(self,
                file_name=None,
                Npulses=100,
                JF_factor=1,
                bsread_padding=0):
        """
        JF_factor?
        bsread_padding?
        """
        file_rootdir = "/sf/%s/data/p%d/raw/" % (self.instrument, self.pgroup)

        if file_name is None:
            # FIXME /dev/null crashes the data taking (h5py can't close /dev/null and crashes)
            print("Not saving any data, as file_name is not set")
            file_name_JF = file_rootdir + "DelMe" + "_JF4p5M.h5"
            file_name_bsread = file_rootdir + "DelMe" + ".h5"
        else:
            # FIXME hardcoded
            file_name_JF = file_rootdir + file_name + "_JF4p5M.h5"
            file_name_bsread = file_rootdir + file_name + ".h5"

        if self.pgroup == 0:
            raise ValueError("Please use set_pgroup() to set a pgroup value.")

        def acquire():
            self.n_frames = Npulses * JF_factor
            self.update_config()
            # self.detector_config.update({
            #    'cycles': n_frames})
            self.writer_config.update({
                "output_file": file_name_JF,
                #    'n_messages': n_frames
            })
            # self.backend_config.update({
            #    'n_frames': n_frames})
            self.bsread_config.update({
                "output_file": file_name_bsread,
                #    'Npulses': Npulses + bsread_padding
            })

            self.reset()
            self.set_config()
            # print(self.get_config())
            self.client.start()
            done = False

            while not done:
                stat = self.get_status()
                if stat["status"] == "IntegrationStatus.FINISHED":
                    done = True
                if stat["status"] == "IntegrationStatus.BSREAD_STILL_RUNNING":
                    done = True
                if stat["status"] == "IntegrationStatus.INITIALIZED":
                    done = True
                if stat["status"] == "IntegrationStatus.DETECTOR_STOPPED":
                    done = True
                sleep(0.1)

        return Acquisition(
            acquire=acquire,
            acquisition_kwargs={
                "file_names": [file_name_bsread, file_name_JF],
                "Npulses": Npulses,
            },
            hold=False,
        )

    def wait_done(self):
        self.check_running()
        self.check_still_running()
Esempio n. 2
0
def run_jungfrau(n_frames,
                 save=True,
                 exptime=0.000010,
                 outfile="",
                 outdir="",
                 uid=16852,
                 api_address="http://sf-daq-1:10001",
                 gain_filename="",
                 pede_filename="",
                 is_HG0=False,
                 instrument=""):  # caput=False):

    client = DetectorIntegrationClient(api_address)

    writer_config = {
        "output_file": outdir + "/" + outfile,
        "user_id": uid,
        "n_frames": n_frames,
        "general/user": str(uid),
        "general/process": __name__,
        "general/created": str(datetime.now()),
        "general/instrument": instrument
    }

    if not save:
        writer_config["output_file"] = "/dev/null"

    detector_config = {
        "exptime": exptime,
        "frames": 1,
        'cycles': n_frames,
        "timing": "trigger",
        "dr": 16
    }

    backend_config = {"n_frames": n_frames, "bit_depth": 16}

    bsread_config = {
        'output_file': outdir + "/" + outfile,
        'user_id': uid,
        "general/user": str(uid),
        "general/process": __name__,
        "general/created": str(datetime.now()),
        "general/instrument": instrument
    }

    if gain_filename != "" or pede_filename != "":
        backend_config["gain_corrections_filename"] = gain_filename
        backend_config["gain_corrections_dataset"] = "gains"
        backend_config["pede_corrections_filename"] = pede_filename
        backend_config["pede_corrections_dataset"] = "gains"
        backend_config["pede_mask_dataset"] = "pixel_mask"
        backend_config["activate_corrections_preview"] = True
        print("Corrections in online viewer activated")

    if is_HG0:
        backend_config["is_HG0"] = True
        detector_config["setbit"] = "0x5d 0"
        print("Running in highG0 mode")
    else:
        client.set_detector_value("clearbit", "0x5d 0")
        print("Running in normal mode (not highG0)")

    try:
        client.reset()

        configuration = {
            "writer": writer_config,
            "backend": backend_config,
            "detector": detector_config,
            "bsread": bsread_config
        }

        client.set_config(configuration)

        print(client.get_config())

        print("Starting acquisition")
        client.start()

        try:
            client.wait_for_status(["IntegrationStatus.FINISHED"],
                                   polling_interval=0.1)
        except:
            print("Got IntegrationStatus ERROR")
            print(client.get_status())
            print(client.get_status_details())

        print("Stopping acquisition")
        client.reset()

        print("Done")
    except KeyboardInterrupt:
        print("Caught CTRL-C, resetting")
        client.reset()
Esempio n. 3
0
class DIAAcquisition(BaseAcquisition):
    def __init__(self,
                 instrument,
                 pgroup,
                 default_data_base_dir="static_data",
                 default_channels=None,
                 default_dir=None,
                 api_address=None):
        self.instrument = instrument
        self.pgroup = pgroup
        self.default_data_base_dir = default_data_base_dir

        self.config = DIAConfig(instrument, pgroup)
        self.paths = SwissFELPaths(instrument, pgroup)

        if not default_channels:
            default_channel_list = self.paths.default_channel_list
            default_channels = Channels(default_channel_list)

        if not default_dir:
            default_dir = self.paths.raw

        self.default_channels = default_channels
        self.default_dir = default_dir

        if not api_address:
            api_address = "http://sf-daq-{}:10000".format(instrument)

        self.api_address = api_address
        self.client = DetectorIntegrationClient(api_address)

        self.current_task = None

    def acquire(self,
                filename=None,
                data_base_dir=None,
                channels=None,
                n_pulses=100,
                use_default_dir=True,
                is_HG0=False,
                wait=True):
        if not filename:
            filename = "/dev/null"
        else:
            if data_base_dir is None:
                print(
                    "No base directory specified, using default base directory."
                )
                data_base_dir = self.default_data_base_dir
            filename = os.path.join(data_base_dir, filename)

            if use_default_dir:
                filename = os.path.join(self.default_dir, filename)

            filenames = self.make_all_filenames(filename)
            if not can_create_all_files(filenames):
                return

        if channels is None:
            print("No channels specified, using default channel list.")
            channels = self.default_channels

        if not is_HG0:
            self.clear_HG0_bit()

        gain_file = self.paths.gain
        pede_file = self.get_last_pedestal()

        cfg = self.config.to_dict(filename=filename,
                                  channels=channels,
                                  n_pulses=n_pulses,
                                  gain_file=gain_file,
                                  pede_file=pede_file,
                                  is_HG0=is_HG0)
        self.set_config(cfg)

        def _acquire():
            self.client.start()
            self.wait_until_finished()
            self.client.reset()

        task = DAQTask(_acquire,
                       stopper=self.client.stop,
                       filenames=filenames,
                       hold=False)
        self.current_task = task

        if wait:
            try:
                task.wait()
            except KeyboardInterrupt:
                print("Stopped current DAQ task:")

        return task

    def make_all_filenames(self, base):
        res = []
        for client in self.active_clients:
            client = client.upper()
            fn = "{}.{}.h5".format(base, client)
            res.append(fn)
        return res

    @property
    def active_clients(self):
        cs = self.client.get_clients_enabled()
        cs = cs["clients_enabled"]
        cs = cs.keys()
        return sorted(cs)

    def clear_HG0_bit(self):
        self.client.set_detector_value("clearbit", "0x5d 0")

    def set_config(self, cfg):
        self.client.reset()
        self.client.set_config(cfg)
        self.client.wait_for_status("IntegrationStatus.CONFIGURED")

    def wait_until_finished(self, wait_time=0.5):
        while True:
            if self.status == "FINISHED":  #TODO: in ("INITIALIZED", "DETECTOR_STOPPED", "BSREAD_STILL_RUNNING", "FINISHED") ?
                break
            sleep(wait_time)

    def wait_while_running(self, wait_time=0.5):
        while True:
            if self.status != "RUNNING":  #TODO: "BSREAD_STILL_RUNNING" ?
                break
            sleep(wait_time)

    @property
    def status(self):
        stat = self.client.get_status()
        stat = stat["status"]
        prefix = "IntegrationStatus."
        if stat.startswith(prefix):
            stat = stat[len(prefix):]
        return stat

    def get_last_pedestal(self):
        return find_last_pedestal(self.active_clients, self.paths.pede)

    def take_pedestal(self,
                      analyze=True,
                      n_pulses=1000,
                      n_bad_modules=0,
                      freq=25,
                      user=None):
        instrument = self.instrument
        pgroup = self.pgroup
        api_address = self.api_address
        raw_dir = self.paths.raw
        res_dir = self.paths.res
        exptime = EXPTIME[instrument]
        return take_pedestal(instrument, pgroup, api_address, raw_dir, res_dir,
                             analyze, n_pulses, n_bad_modules, freq, exptime,
                             user)

    def __repr__(self):
        clients = self.active_clients
        clients = ", ".join(clients)
        return "Detector Integration API on {} (status: {})\nClients: {}".format(
            self.api_address, self.status, clients)
    def test_client_workflow(self):
        client = DetectorIntegrationClient()

        client.reset()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.INITIALIZED")

        writer_config = ({"output_file": "/tmp/test.h5",
                          "n_frames": 100,
                          "user_id": 0,
                          "group_id": 0})

        backend_config = {"bit_depth": 16,
                          "n_frames": 100}

        detector_config = {"period": 0.1,
                           "frames": 100,
                           "exptime": 0.01,
                           "dr": 16}

        configuration = {"writer": writer_config,
                         "backend": backend_config,
                         "detector": detector_config}

        response = client.set_config(configuration)

        self.assertDictEqual(response["config"]["writer"], writer_config)
        self.assertDictEqual(response["config"]["backend"], backend_config)
        self.assertDictEqual(response["config"]["detector"], detector_config)

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.CONFIGURED")

        client.start()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.RUNNING")

        client.stop()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.INITIALIZED")

        with self.assertRaisesRegex(Exception, "Cannot start acquisition"):
            client.start()

        client.set_last_config()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.CONFIGURED")

        client.start()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.RUNNING")

        client.stop()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.INITIALIZED")

        with self.assertRaisesRegex(Exception, "n_frames"):
            client.update_config({"writer": {"user_id": 1},
                                  "backend": {"n_frames": 50},
                                  "detector": {"frames": 50}})

        response = client.update_config({"writer": {"n_frames": 50,
                                                    "user_id": 1},
                                         "backend": {"n_frames": 50},
                                         "detector": {"frames": 50}})

        writer_config["user_id"] = 1
        writer_config["n_frames"] = 50
        backend_config["n_frames"] = 50
        detector_config["frames"] = 50

        self.assertDictEqual(response["config"]["writer"], writer_config)
        self.assertDictEqual(response["config"]["backend"], backend_config)
        self.assertDictEqual(response["config"]["detector"], detector_config)

        response = client.update_config({"writer": {"group_id": 1}})

        writer_config["group_id"] = 1

        self.assertDictEqual(response["config"]["writer"], writer_config)
        self.assertDictEqual(response["config"]["backend"], backend_config)
        self.assertDictEqual(response["config"]["detector"], detector_config)

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.CONFIGURED")

        client.reset()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.INITIALIZED")

        response = client.set_last_config()

        self.assertDictEqual(response["config"]["writer"], writer_config)
        self.assertDictEqual(response["config"]["backend"], backend_config)
        self.assertDictEqual(response["config"]["detector"], detector_config)

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.CONFIGURED")

        self.assertEqual(client.get_detector_value("frames"), response["config"]["detector"]["frames"])

        client.reset()

        client.set_config_from_file(os.path.join(os.path.dirname(os.path.realpath(__file__)), "debug_config.json"))

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.CONFIGURED")

        self.assertTrue("server_info" in client.get_server_info())

        client.kill()
# Write 1000 frames, as user id 11057 (gac-x12saop), to file "/sls/X12SA/Data10/gac-x12saop/tmp/dia_test.h5".
writer_config = {
    "n_frames": 1000,
    "user_id": 11057,
    "output_file": "/sls/X12SA/Data10/gac-x12saop/tmp/dia_test.h5"
}

# Expect 1000, 16 bit frames.
backend_config = {"bit_depth": 16, "n_frames": 1000}

# Acquire 1000, 16 bit images with a period of 0.02.
detector_config = {"dr": 16, "frames": 1000, "period": 0.02, "exptime": 0.0001}

configuration = {
    "writer": writer_config,
    "backend": backend_config,
    "detector": detector_config
}

# Set the configs.
client.set_config(configuration)

# Start the acquisition.
client.start()

# Get the current acquisition status (it should be "IntegrationStatus.RUNNING")
client.get_status()

# Block until the acquisition has finished (this is optional).
client.wait_for_status("IntegrationStatus.FINISHED")