def test_set_detector_value(self):
        client = DetectorIntegrationClient()

        value = 0.023
        client.set_detector_value("period", value)

        self.assertEqual(value, client.get_detector_value("period"))
Esempio n. 2
0
    def __init__(self,
                 instrument,
                 pgroup,
                 default_data_base_dir="static_data",
                 default_channels=None,
                 default_dir=None,
                 api_address=None):
        self.instrument = instrument
        self.pgroup = pgroup
        self.default_data_base_dir = default_data_base_dir

        self.config = DIAConfig(instrument, pgroup)
        self.paths = SwissFELPaths(instrument, pgroup)

        if not default_channels:
            default_channel_list = self.paths.default_channel_list
            default_channels = Channels(default_channel_list)

        if not default_dir:
            default_dir = self.paths.raw

        self.default_channels = default_channels
        self.default_dir = default_dir

        if not api_address:
            api_address = "http://sf-daq-{}:10000".format(instrument)

        self.api_address = api_address
        self.client = DetectorIntegrationClient(api_address)

        self.current_task = None
    def test_daq_test(self):
        client = DetectorIntegrationClient()

        configuration = {"not really": "important"}

        test_result = client.daq_test(configuration)["result"]

        self.assertDictEqual(configuration, test_result)
        def wait_for_status_thread():
            client2 = DetectorIntegrationClient()
            start_time = time()

            try:
                client2.wait_for_status("IntegrationStatus.RUNNING", timeout=timeout)
            except:
                pass

            nonlocal time_to_wait
            time_to_wait = time() - start_time
    def test_wait_for_status(self):

        time_to_wait = 0
        sleep_time = 1
        timeout = 1.5

        client = DetectorIntegrationClient()

        detector_config = {"frames": 10000, "dr": 16, "period": 0.001, "exptime": 0.0001}
        backend_config = {"n_frames": 10000, "bit_depth": 16}
        writer_config = {"user_id": 16371, "output_file": "something", "n_frames": 10000}

        configuration = {"detector": detector_config,
                         "backend": backend_config,
                         "writer": writer_config}

        def wait_for_status_thread():
            client2 = DetectorIntegrationClient()
            start_time = time()

            try:
                client2.wait_for_status("IntegrationStatus.RUNNING", timeout=timeout)
            except:
                pass

            nonlocal time_to_wait
            time_to_wait = time() - start_time

        wait_thread = Thread(target=wait_for_status_thread)
        wait_thread.start()

        sleep(sleep_time)

        client.reset()
        client.set_config(configuration)

        client.start()
        wait_thread.join()
        client.stop()

        self.assertTrue(time_to_wait > sleep_time)

        wait_thread = Thread(target=wait_for_status_thread)
        wait_thread.start()
        wait_thread.join()

        self.assertTrue(time_to_wait >= timeout)
 def __init__(self, Id, instrument=None, api_address=None, jf_name=None):
     self.Id = Id
     self._api_address = api_address
     self.client = DetectorIntegrationClient(api_address)
     print("\nDetector Integration API on %s" % api_address)
     # No pgroup by default
     self.pgroup = 0
     self.n_frames = 100
     self.jf_name = jf_name
     self.pede_file = ""
     self.gain_file = ""
     self.instrument = instrument
     if instrument is None:
         print(
             "ERROR: please configure the instrument parameter in DIAClient"
         )
     self.update_config()
 def __init__(self, ID, instrument=None, api_address=None, jf_name=None):
     self.ID = ID
     self._api_address = api_address
     self.client = DetectorIntegrationClient(api_address)
     print("\nDetector Integration API on %s" % api_address)
     # No pgroup by default
     self.pgroup = 0
     self.n_frames = 100
     self.jf_name = jf_name
     self.pede_file = ""
     self.gain_file = ""
     self.instrument = instrument
     if instrument is None:
         print(
             "ERROR: please configure the instrument parameter in DIAClient"
         )
     self.gain_file = "/sf/%s/config/jungfrau/gainMaps" % self.instrument
     self.update_config()
     self.active_clients = list(
         self.get_active_clients()["clients_enabled"].keys())
Esempio n. 8
0
 def __init__(self,
              ID,
              instrument=None,
              api_address="http://sf-daq-2:10000",
              jf_name="JF_1.5M"):
     self.ID = ID
     self._api_address = api_address
     self.client = DetectorIntegrationClient(api_address)
     print("\nDetector Integration API on %s" % api_address)
     # No pgroup by default
     self.pgroup = 17571
     self.n_frames = 100
     self.jf_name = jf_name
     self.pede_file = ""
     self.instrument = instrument
     if instrument is None:
         print(
             "ERROR: please configure the instrument parameter in DIAClient"
         )
     self.gain_file = "/sf/%s/config/jungfrau/gainMaps" % self.instrument
     self.update_config()
Esempio n. 9
0
    def __init__(
        self,
        name=None,
        instrument=None,
        pgroup=None,
        gain_path="",
        pedestal_filename="",
        pedestal_directory="",
        api_address="http://sf-daq-2:10000",
        jf_channels=[],
        n_frames_default=100,
        config_default=None,
        default_file_path=None,
    ):
        if config_default:
            for cnf, cnfdict in config_default.items():
                self.__dict__[cnf + "_config"] = cnfdict
        else:
            self.writer_config = {}
            self.backend_config = {}
            self.detector_config = {}
            self.bsread_config = {}

        self.name = name
        self._default_file_path = default_file_path
        self._api_address = api_address
        self.client = DetectorIntegrationClient(api_address)
        print("\nDetector Integration API on %s" % api_address)
        if pgroup:
            self.pgroup = int("".join([s for s in pgroup if s.isdigit()]))
        else:
            self.pgroup = None
        self.n_frames = n_frames_default
        self.jf_channels = jf_channels
        self.pede_file = pedestal_filename
        self.pedestal_directory = pedestal_directory
        self.gain_path = gain_path
        self.instrument = instrument
        if instrument is None:
            print(
                "ERROR: please configure the instrument parameter in DIAClient"
            )
        self.update_config()
        self.active_clients = list(
            self.get_active_clients()["clients_enabled"].keys())
        self.jf_channels = list(x for x in self.active_clients
                                if x != "bsread")
Esempio n. 10
0
def run(api_address,
        filename,
        directory,
        uid,
        period,
        exptime,
        numberFrames,
        trigger,
        analyze,
        number_bad_modules,
        instrument=""):
    if api_address == "":
        print(
            "[ERROR] Please specify an API address, like http://sf-daq-alvra:10000 (Alvra) or http://sf-daq-bernina:10000 (Bernina)"
        )
        return
    if uid == 0:
        print("[ERROR] Please specify the user id (the pgroup)")
        return
    if directory == "":
        print("[ERROR] Please specify an output directory")
        return

    client = DetectorIntegrationClient(api_address)

    #client.get_status() empty at this moment

    try:
        writer_config = {
            "output_file": directory + "/" + filename,
            "user_id": uid,
            "n_frames": numberFrames,
            "general/user": str(uid),
            "general/process": __name__,
            "general/created": str(datetime.now()),
            "general/instrument": instrument
        }

        if trigger == 0:
            detector_config = {
                "period": period,
                "exptime": exptime,
                "frames": numberFrames,
                'cycles': 1,
                "dr": 16
            }
        else:
            detector_config = {
                "period": period,
                "exptime": exptime,
                "frames": 1,
                'cycles': numberFrames,
                "timing": "trigger",
                "dr": 16
            }

        backend_config = {"n_frames": numberFrames, "bit_depth": 16}

        bsread_config = {
            'output_file': '/dev/null',
            'user_id': uid,
            "general/user": str(uid),
            "general/process": __name__,
            "general/created": str(datetime.now()),
            "general/instrument": instrument
        }

        client.reset()

        configuration = {
            "writer": writer_config,
            "backend": backend_config,
            "detector": detector_config,
            "bsread": bsread_config
        }

        client.set_config(configuration)
        print(client.get_config())

        if trigger == 1:
            print(
                "\nPedestal run use external trigger. To have enough statistics --period should be set to right value. Currently %d Hz\n"
                % int(1 / period))

        sleepTime = numberFrames * period / 7

        print("Resetting gain bits on Jungfrau")
        reset_bits(client)

        client.set_detector_value("setbit", "0x5d 0")
        sleep(
            1
        )  # for the moment there is a delay to make sure detector is in the highG0 mode
        print("Taking data at HG0")
        client.start()

        sleep(sleepTime * 3)

        client.set_detector_value("clearbit", "0x5d 0")
        print("Taking data at G0")
        sleep(sleepTime * 2)

        client.set_detector_value("setbit", "0x5d 12")
        print("Taking data at G1")
        sleep(sleepTime)

        client.set_detector_value("setbit", "0x5d 13")
        print("Taking data at G2")
        sleep(sleepTime)

        print("Waiting for acquisition to finish.")
        try:
            client.wait_for_status(["IntegrationStatus.FINISHED"],
                                   polling_interval=0.1)
        except IntegrationStatus.ERROR:
            print("Got IntegrationStatus ERROR")
            print(client.get_status_details())

        print("Reseting acquisition status.")
        client.reset()

        reset_bits(client)

        if analyze:
            print(
                "Running pedestal analysis. It will take some time, you can run in parallel using old pedestal files"
            )
        else:
            print(
                "Will not produce pedestal result files, do manually (it will be faster) using computing nodes:"
            )

        client_status = client.get_status_details()
        enabled_detectors = list(client_status['details'].keys())
        if 'bsread' in enabled_detectors:
            enabled_detectors.remove('bsread')
        print("Following detectors are enabled %s, will run over them" %
              enabled_detectors)

        for detector in enabled_detectors:
            print(
                "jungfrau_create_pedestals --filename %s --directory %s --verbosity 4"
                % (writer_config["output_file"] + "." + detector + ".h5",
                   os.path.join(directory.replace("raw", "res"), "")))
            if analyze:
                try:
                    subprocess.call([
                        "jungfrau_create_pedestals", "--filename",
                        writer_config["output_file"] + "." + detector + ".h5",
                        "--directory",
                        os.path.join(directory.replace("raw", "res"),
                                     ""), "--verbosity", "4"
                    ])
                except:
                    print(
                        "Pedestal analysis failed for detector %s. Do manually."
                        % detector)

        print("Done.")

    except KeyboardInterrupt:

        print("CTRL-C caught, stopping and resetting.")

        try:
            client.stop()
            client.reset()
            reset_bits(client)
        except:
            raise Exception(
                "Cannot stop the integration. Check status details or reset services."
            )

    print("Pedestal run data saved in %s" % writer_config["output_file"])

    return configuration
Esempio n. 11
0
def run_jungfrau(n_frames,
                 save=True,
                 exptime=0.000010,
                 outfile="",
                 outdir="",
                 uid=16852,
                 api_address="http://sf-daq-1:10001",
                 gain_filename="",
                 pede_filename="",
                 is_HG0=False,
                 instrument=""):  # caput=False):

    client = DetectorIntegrationClient(api_address)

    writer_config = {
        "output_file": outdir + "/" + outfile,
        "user_id": uid,
        "n_frames": n_frames,
        "general/user": str(uid),
        "general/process": __name__,
        "general/created": str(datetime.now()),
        "general/instrument": instrument
    }

    if not save:
        writer_config["output_file"] = "/dev/null"

    detector_config = {
        "exptime": exptime,
        "frames": 1,
        'cycles': n_frames,
        "timing": "trigger",
        "dr": 16
    }

    backend_config = {"n_frames": n_frames, "bit_depth": 16}

    bsread_config = {
        'output_file': outdir + "/" + outfile,
        'user_id': uid,
        "general/user": str(uid),
        "general/process": __name__,
        "general/created": str(datetime.now()),
        "general/instrument": instrument
    }

    if gain_filename != "" or pede_filename != "":
        backend_config["gain_corrections_filename"] = gain_filename
        backend_config["gain_corrections_dataset"] = "gains"
        backend_config["pede_corrections_filename"] = pede_filename
        backend_config["pede_corrections_dataset"] = "gains"
        backend_config["pede_mask_dataset"] = "pixel_mask"
        backend_config["activate_corrections_preview"] = True
        print("Corrections in online viewer activated")

    if is_HG0:
        backend_config["is_HG0"] = True
        detector_config["setbit"] = "0x5d 0"
        print("Running in highG0 mode")
    else:
        client.set_detector_value("clearbit", "0x5d 0")
        print("Running in normal mode (not highG0)")

    try:
        client.reset()

        configuration = {
            "writer": writer_config,
            "backend": backend_config,
            "detector": detector_config,
            "bsread": bsread_config
        }

        client.set_config(configuration)

        print(client.get_config())

        print("Starting acquisition")
        client.start()

        try:
            client.wait_for_status(["IntegrationStatus.FINISHED"],
                                   polling_interval=0.1)
        except:
            print("Got IntegrationStatus ERROR")
            print(client.get_status())
            print(client.get_status_details())

        print("Stopping acquisition")
        client.reset()

        print("Done")
    except KeyboardInterrupt:
        print("Caught CTRL-C, resetting")
        client.reset()
Esempio n. 12
0
    def test_client_workflow(self):
        client = DetectorIntegrationClient()

        client.reset()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.INITIALIZED")

        writer_config = ({"output_file": "/tmp/test.h5",
                          "n_frames": 100,
                          "user_id": 0,
                          "group_id": 0})

        backend_config = {"bit_depth": 16,
                          "n_frames": 100}

        detector_config = {"period": 0.1,
                           "frames": 100,
                           "exptime": 0.01,
                           "dr": 16}

        configuration = {"writer": writer_config,
                         "backend": backend_config,
                         "detector": detector_config}

        response = client.set_config(configuration)

        self.assertDictEqual(response["config"]["writer"], writer_config)
        self.assertDictEqual(response["config"]["backend"], backend_config)
        self.assertDictEqual(response["config"]["detector"], detector_config)

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.CONFIGURED")

        client.start()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.RUNNING")

        client.stop()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.INITIALIZED")

        with self.assertRaisesRegex(Exception, "Cannot start acquisition"):
            client.start()

        client.set_last_config()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.CONFIGURED")

        client.start()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.RUNNING")

        client.stop()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.INITIALIZED")

        with self.assertRaisesRegex(Exception, "n_frames"):
            client.update_config({"writer": {"user_id": 1},
                                  "backend": {"n_frames": 50},
                                  "detector": {"frames": 50}})

        response = client.update_config({"writer": {"n_frames": 50,
                                                    "user_id": 1},
                                         "backend": {"n_frames": 50},
                                         "detector": {"frames": 50}})

        writer_config["user_id"] = 1
        writer_config["n_frames"] = 50
        backend_config["n_frames"] = 50
        detector_config["frames"] = 50

        self.assertDictEqual(response["config"]["writer"], writer_config)
        self.assertDictEqual(response["config"]["backend"], backend_config)
        self.assertDictEqual(response["config"]["detector"], detector_config)

        response = client.update_config({"writer": {"group_id": 1}})

        writer_config["group_id"] = 1

        self.assertDictEqual(response["config"]["writer"], writer_config)
        self.assertDictEqual(response["config"]["backend"], backend_config)
        self.assertDictEqual(response["config"]["detector"], detector_config)

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.CONFIGURED")

        client.reset()

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.INITIALIZED")

        response = client.set_last_config()

        self.assertDictEqual(response["config"]["writer"], writer_config)
        self.assertDictEqual(response["config"]["backend"], backend_config)
        self.assertDictEqual(response["config"]["detector"], detector_config)

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.CONFIGURED")

        self.assertEqual(client.get_detector_value("frames"), response["config"]["detector"]["frames"])

        client.reset()

        client.set_config_from_file(os.path.join(os.path.dirname(os.path.realpath(__file__)), "debug_config.json"))

        self.assertEqual(client.get_status()["status"], "IntegrationStatus.CONFIGURED")

        self.assertTrue("server_info" in client.get_server_info())

        client.kill()
Esempio n. 13
0
class DIAAcquisition(BaseAcquisition):
    def __init__(self,
                 instrument,
                 pgroup,
                 default_data_base_dir="static_data",
                 default_channels=None,
                 default_dir=None,
                 api_address=None):
        self.instrument = instrument
        self.pgroup = pgroup
        self.default_data_base_dir = default_data_base_dir

        self.config = DIAConfig(instrument, pgroup)
        self.paths = SwissFELPaths(instrument, pgroup)

        if not default_channels:
            default_channel_list = self.paths.default_channel_list
            default_channels = Channels(default_channel_list)

        if not default_dir:
            default_dir = self.paths.raw

        self.default_channels = default_channels
        self.default_dir = default_dir

        if not api_address:
            api_address = "http://sf-daq-{}:10000".format(instrument)

        self.api_address = api_address
        self.client = DetectorIntegrationClient(api_address)

        self.current_task = None

    def acquire(self,
                filename=None,
                data_base_dir=None,
                channels=None,
                n_pulses=100,
                use_default_dir=True,
                is_HG0=False,
                wait=True):
        if not filename:
            filename = "/dev/null"
        else:
            if data_base_dir is None:
                print(
                    "No base directory specified, using default base directory."
                )
                data_base_dir = self.default_data_base_dir
            filename = os.path.join(data_base_dir, filename)

            if use_default_dir:
                filename = os.path.join(self.default_dir, filename)

            filenames = self.make_all_filenames(filename)
            if not can_create_all_files(filenames):
                return

        if channels is None:
            print("No channels specified, using default channel list.")
            channels = self.default_channels

        if not is_HG0:
            self.clear_HG0_bit()

        gain_file = self.paths.gain
        pede_file = self.get_last_pedestal()

        cfg = self.config.to_dict(filename=filename,
                                  channels=channels,
                                  n_pulses=n_pulses,
                                  gain_file=gain_file,
                                  pede_file=pede_file,
                                  is_HG0=is_HG0)
        self.set_config(cfg)

        def _acquire():
            self.client.start()
            self.wait_until_finished()
            self.client.reset()

        task = DAQTask(_acquire,
                       stopper=self.client.stop,
                       filenames=filenames,
                       hold=False)
        self.current_task = task

        if wait:
            try:
                task.wait()
            except KeyboardInterrupt:
                print("Stopped current DAQ task:")

        return task

    def make_all_filenames(self, base):
        res = []
        for client in self.active_clients:
            client = client.upper()
            fn = "{}.{}.h5".format(base, client)
            res.append(fn)
        return res

    @property
    def active_clients(self):
        cs = self.client.get_clients_enabled()
        cs = cs["clients_enabled"]
        cs = cs.keys()
        return sorted(cs)

    def clear_HG0_bit(self):
        self.client.set_detector_value("clearbit", "0x5d 0")

    def set_config(self, cfg):
        self.client.reset()
        self.client.set_config(cfg)
        self.client.wait_for_status("IntegrationStatus.CONFIGURED")

    def wait_until_finished(self, wait_time=0.5):
        while True:
            if self.status == "FINISHED":  #TODO: in ("INITIALIZED", "DETECTOR_STOPPED", "BSREAD_STILL_RUNNING", "FINISHED") ?
                break
            sleep(wait_time)

    def wait_while_running(self, wait_time=0.5):
        while True:
            if self.status != "RUNNING":  #TODO: "BSREAD_STILL_RUNNING" ?
                break
            sleep(wait_time)

    @property
    def status(self):
        stat = self.client.get_status()
        stat = stat["status"]
        prefix = "IntegrationStatus."
        if stat.startswith(prefix):
            stat = stat[len(prefix):]
        return stat

    def get_last_pedestal(self):
        return find_last_pedestal(self.active_clients, self.paths.pede)

    def take_pedestal(self,
                      analyze=True,
                      n_pulses=1000,
                      n_bad_modules=0,
                      freq=25,
                      user=None):
        instrument = self.instrument
        pgroup = self.pgroup
        api_address = self.api_address
        raw_dir = self.paths.raw
        res_dir = self.paths.res
        exptime = EXPTIME[instrument]
        return take_pedestal(instrument, pgroup, api_address, raw_dir, res_dir,
                             analyze, n_pulses, n_bad_modules, freq, exptime,
                             user)

    def __repr__(self):
        clients = self.active_clients
        clients = ", ".join(clients)
        return "Detector Integration API on {} (status: {})\nClients: {}".format(
            self.api_address, self.status, clients)
Esempio n. 14
0
    def test_update_config(self):
        client = DetectorIntegrationClient()

        config = client.get_config()
        self.assertEqual(config["config"]["backend"], {})
        self.assertEqual(config["config"]["writer"], {})
        self.assertEqual(config["config"]["detector"], {})

        detector_config = {"frames": 10000, "dr": 16, "period": 0.001, "exptime": 0.0001}
        backend_config = {"n_frames": 10000, "bit_depth": 16}
        writer_config = {"user_id": 16371, "output_file": "something", "n_frames": 10000}

        configuration = {"detector": detector_config,
                         "backend": backend_config,
                         "writer": writer_config}

        client.update_config(configuration)

        configuration = client.get_config()["config"]

        self.assertEqual(configuration["detector"], detector_config)
        self.assertEqual(configuration["writer"], writer_config)
        self.assertEqual(configuration["backend"], backend_config)

        with self.assertRaisesRegex(Exception, "Invalid config"):
            client.update_config({"detector": {"dr": 32}})

        client.update_config({"detector": {"dr": 32},
                              "backend": {"bit_depth": 32}})

        detector_config["dr"] = 32
        backend_config["bit_depth"] = 32

        configuration = client.get_config()["config"]
        self.assertEqual(configuration["detector"], detector_config)
        self.assertEqual(configuration["writer"], writer_config)
        self.assertEqual(configuration["backend"], backend_config)
Esempio n. 15
0
class DIAClient:
    def __init__(self, Id, instrument=None, api_address=None, jf_name=None):
        self.Id = Id
        self._api_address = api_address
        self.client = DetectorIntegrationClient(api_address)
        print("\nDetector Integration API on %s" % api_address)
        # No pgroup by default
        self.pgroup = 0
        self.n_frames = 100
        self.jf_name = jf_name
        self.pede_file = ""
        self.gain_file = ""
        self.instrument = instrument
        if instrument is None:
            print(
                "ERROR: please configure the instrument parameter in DIAClient"
            )
        self.update_config()

    def update_config(self, ):
        self.writer_config = {
            "output_file":
            "/sf/%s/data/p%d/raw/test_data.h5" %
            (self.instrument, self.pgroup),
            "user_id":
            self.pgroup,
            "n_frames":
            self.n_frames,
            "general/user":
            str(self.pgroup),
            "general/process":
            __name__,
            "general/created":
            str(datetime.now()),
            "general/instrument":
            self.instrument,
            # "general/correction": "test"
        }

        self.backend_config = {
            "n_frames": self.n_frames,
            "bit_depth": 16,
            "gain_corrections_filename": self.
            gain_file,  # "/sf/alvra/config/jungfrau/jungfrau_4p5_gaincorrections_v0.h5",
            # "gain_corrections_dataset": "gains",
            # "pede_corrections_filename": "/sf/alvra/data/res/p%d/pedestal_20171210_1628_res.h5" % self.pgroup,
            # "pede_corrections_dataset": "gains",
            # "pede_mask_dataset": "pixel_mask",
            # "activate_corrections_preview": True,
            # FIXME: HARDCODED!!!
            "is_HG0": False,
        }

        if self.pede_file != "":
            self.backend_config["gain_corrections_filename"] = (
                self.gain_file
            )  # "/sf/alvra/config/jungfrau/jungfrau_4p5_gaincorrections_v0.h5",
            self.backend_config["gain_corrections_dataset"] = "gains"
            self.backend_config["pede_corrections_filename"] = (
                self.pede_file
            )  # "/sf/alvra/data/res/p%d/pedestal_20171210_1628_res.h5" % self.pgroup,
            self.backend_config["pede_corrections_dataset"] = "gains"
            self.backend_config["pede_mask_dataset"] = "pixel_mask"
            self.backend_config["activate_corrections_preview"] = True
        else:
            self.backend_config["pede_corrections_dataset"] = "gains"
            self.backend_config["pede_mask_dataset"] = "pixel_mask"
            self.backend_config["gain_corrections_filename"] = ""
            self.backend_config["pede_corrections_filename"] = ""
            self.backend_config["activate_corrections_preview"] = False

        self.detector_config = {
            "timing": "trigger",
            # FIXME: HARDCODED
            "exptime": 0.000005,
            "cycles": self.n_frames,
            # "delay"  : 0.001992,
            "frames": 1,
            "dr": 16,
        }

        # Not needed anymore?
        # default_channels_list = parseChannelListFile(
        #    '/sf/alvra/config/com/channel_lists/default_channel_list')

        self.bsread_config = {
            "output_file":
            "/sf/%s/data/p%d/raw/test_bsread.h5" %
            (self.instrument, self.pgroup),
            "user_id":
            self.pgroup,
            "general/user":
            str(self.pgroup),
            "general/process":
            __name__,
            "general/created":
            str(datetime.now()),
            "general/instrument":
            self.instrument,
            # 'Npulses':100,
            # 'channels': default_channels_list
        }

    #        self.default_channels_list = jungfrau_utils.load_default_channel_list()

    def reset(self):
        self.client.reset()
        # pass

    def get_status(self):
        return self.client.get_status()

    def get_config(self):
        config = self.client.get_config()
        return config

    def set_pgroup(self, pgroup):
        self.pgroup = pgroup
        self.update_config()

    def set_bs_channels(self, ):
        print(
            "Please update /sf/%s/config/com/channel_lists/default_channel_list and restart all services on the DAQ server"
            % self.instrument)

    def set_config(self):
        self.reset()
        self.client.set_config({
            "writer": self.writer_config,
            "backend": self.backend_config,
            "detector": self.detector_config,
            "bsread": self.bsread_config,
        })

    def check_still_running(self, time_interval=0.5):
        cfg = self.get_config()
        running = True
        while running:
            if not self.get_status()["status"][-7:] == "RUNNING":
                running = False
                break
            #            elif not self.get_status()['status'][-20:]=='BSREAD_STILL_RUNNING':
            #                running = False
            #                break
            else:
                sleep(time_interval)

    def take_pedestal(self,
                      n_frames,
                      analyze=True,
                      n_bad_modules=0,
                      update_config=True):
        from jungfrau_utils.scripts.jungfrau_run_pedestals import (
            run as jungfrau_utils_run, )

        directory = "/sf/%s/data/p%d/raw" % (self.instrument, self.pgroup)
        if not os.path.exists(directory):
            print("Directory %s not existing, creating it" % directory)
            os.makedirs(directory)

        res_dir = directory.replace("/raw/", "/res/")
        if not os.path.exists(res_dir):
            print("Directory %s not existing, creating it" % res_dir)
            os.makedirs(res_dir)
        filename = "pedestal_%s.h5" % datetime.now().strftime("%Y%m%d_%H%M")
        period = 0.02  # for 25 Hz this is 0.04, for 10 Hz this 0.1
        jungfrau_utils_run(
            self._api_address,
            filename,
            directory,
            self.pgroup,
            period,
            self.detector_config["exptime"],
            n_frames,
            1,
            analyze,
            n_bad_modules,
            self.instrument,
            self.jf_name,
        )

        if update_config:
            self.pede_file = ((directory + filename).replace(
                "raw/", "res/").replace(".h5", "_res.h5"))
            print("Pedestal file updated to %s" % self.pede_file)
        return self.pede_file

    def start(self):
        self.client.start()
        print("start acquisition")
        pass

    def stop(self):
        self.client.stop()
        print("stop acquisition")
        pass

    def config_and_start_test(self):
        self.reset()
        self.set_config()
        self.start()
        pass

    def wait_for_status(self, *args, **kwargs):
        return self.client.wait_for_status(*args, **kwargs)

    def acquire(self,
                file_name=None,
                Npulses=100,
                JF_factor=1,
                bsread_padding=0):
        """
        JF_factor?
        bsread_padding?
        """
        file_rootdir = "/sf/%s/data/p%d/raw/" % (self.instrument, self.pgroup)

        if file_name is None:
            # FIXME /dev/null crashes the data taking (h5py can't close /dev/null and crashes)
            print("Not saving any data, as file_name is not set")
            file_name_JF = file_rootdir + "DelMe" + "_JF4p5M.h5"
            file_name_bsread = file_rootdir + "DelMe" + ".h5"
        else:
            # FIXME hardcoded
            file_name_JF = file_rootdir + file_name + "_JF4p5M.h5"
            file_name_bsread = file_rootdir + file_name + ".h5"

        if self.pgroup == 0:
            raise ValueError("Please use set_pgroup() to set a pgroup value.")

        def acquire():
            self.n_frames = Npulses * JF_factor
            self.update_config()
            # self.detector_config.update({
            #    'cycles': n_frames})
            self.writer_config.update({
                "output_file": file_name_JF,
                #    'n_messages': n_frames
            })
            # self.backend_config.update({
            #    'n_frames': n_frames})
            self.bsread_config.update({
                "output_file": file_name_bsread,
                #    'Npulses': Npulses + bsread_padding
            })

            self.reset()
            self.set_config()
            # print(self.get_config())
            self.client.start()
            done = False

            while not done:
                stat = self.get_status()
                if stat["status"] == "IntegrationStatus.FINISHED":
                    done = True
                if stat["status"] == "IntegrationStatus.BSREAD_STILL_RUNNING":
                    done = True
                if stat["status"] == "IntegrationStatus.INITIALIZED":
                    done = True
                if stat["status"] == "IntegrationStatus.DETECTOR_STOPPED":
                    done = True
                sleep(0.1)

        return Acquisition(
            acquire=acquire,
            acquisition_kwargs={
                "file_names": [file_name_bsread, file_name_JF],
                "Npulses": Npulses,
            },
            hold=False,
        )

    def wait_done(self):
        self.check_running()
        self.check_still_running()
Esempio n. 16
0
    def test_clients_enabled(self):
        client = DetectorIntegrationClient()
        clients_enabled = client.get_clients_enabled()["clients_enabled"]

        self.assertTrue(clients_enabled["writer"])
        self.assertTrue(clients_enabled["backend"])
        self.assertTrue(clients_enabled["detector"])

        client.set_clients_enabled({})
        clients_enabled = client.get_clients_enabled()["clients_enabled"]

        self.assertTrue(clients_enabled["writer"])
        self.assertTrue(clients_enabled["backend"])
        self.assertTrue(clients_enabled["detector"])

        client.set_clients_enabled({"writer": False})
        clients_enabled = client.get_clients_enabled()["clients_enabled"]

        self.assertFalse(clients_enabled["writer"])
        self.assertTrue(clients_enabled["backend"])
        self.assertTrue(clients_enabled["detector"])

        client.set_clients_enabled({"writer": True,
                                    "backend": False,
                                    "detector": False})
        clients_enabled = client.get_clients_enabled()["clients_enabled"]

        self.assertTrue(clients_enabled["writer"])
        self.assertFalse(clients_enabled["backend"])
        self.assertFalse(clients_enabled["detector"])

        client.set_clients_enabled({"writer": False,
                                    "backend": False,
                                    "detector": False})
        clients_enabled = client.get_clients_enabled()["clients_enabled"]

        self.assertFalse(clients_enabled["writer"])
        self.assertFalse(clients_enabled["backend"])
        self.assertFalse(clients_enabled["detector"])
# Import the client.
from detector_integration_api import DetectorIntegrationClient

# Connect to the Eiger 9M DIA.
client = DetectorIntegrationClient("http://xbl-daq-29:10000")

# Make sure the status of the DIA is initialized.
client.reset()

# Write 1000 frames, as user id 11057 (gac-x12saop), to file "/sls/X12SA/Data10/gac-x12saop/tmp/dia_test.h5".
writer_config = {
    "n_frames": 1000,
    "user_id": 11057,
    "output_file": "/sls/X12SA/Data10/gac-x12saop/tmp/dia_test.h5"
}

# Expect 1000, 16 bit frames.
backend_config = {"bit_depth": 16, "n_frames": 1000}

# Acquire 1000, 16 bit images with a period of 0.02.
detector_config = {"dr": 16, "frames": 1000, "period": 0.02, "exptime": 0.0001}

configuration = {
    "writer": writer_config,
    "backend": backend_config,
    "detector": detector_config
}

# Set the configs.
client.set_config(configuration)