Ejemplo n.º 1
0
    def setup_method(self, tmpdir):
        self.pth = os.path.dirname(os.path.abspath(__file__))

        self.f113 = PlatypusNexus(os.path.join(self.pth, 'PLP0011613.nx.hdf'))
        self.f641 = PlatypusNexus(os.path.join(self.pth, 'PLP0011641.nx.hdf'))
        self.cwd = os.getcwd()

        self.tmpdir = tmpdir.strpath
        os.chdir(self.tmpdir)
        return 0
Ejemplo n.º 2
0
    def setup_method(self, tmpdir, data_directory):
        self.pth = pjoin(data_directory, "reduce")

        with warnings.catch_warnings():
            warnings.simplefilter("ignore", RuntimeWarning)
            self.f113 = PlatypusNexus(pjoin(self.pth, "PLP0011613.nx.hdf"))
            self.f641 = PlatypusNexus(pjoin(self.pth, "PLP0011641.nx.hdf"))
        self.cwd = os.getcwd()

        self.tmpdir = tmpdir.strpath
        os.chdir(self.tmpdir)
        return 0
Ejemplo n.º 3
0
    def setup_method(self, tmpdir):
        self.pth = os.path.dirname(os.path.abspath(__file__))

        with warnings.catch_warnings():
            warnings.simplefilter('ignore', RuntimeWarning)
            self.f113 = PlatypusNexus(
                os.path.join(self.pth, 'PLP0011613.nx.hdf'))
            self.f641 = PlatypusNexus(
                os.path.join(self.pth, 'PLP0011641.nx.hdf'))
        self.cwd = os.getcwd()

        self.tmpdir = tmpdir.strpath
        os.chdir(self.tmpdir)
        return 0
Ejemplo n.º 4
0
    def test_accumulate_files_reduce(self):
        # test by adding a file to itself. Should have smaller stats
        fnames = ['PLP0000708.nx.hdf', 'PLP0000708.nx.hdf']
        pths = [os.path.join(self.pth, fname) for fname in fnames]
        plp.accumulate_HDF_files(pths)

        with warnings.catch_warnings():
            warnings.simplefilter('ignore', RuntimeWarning)
            # it should be processable
            fadd = PlatypusNexus(
                os.path.join(os.getcwd(), 'ADD_PLP0000708.nx.hdf'))
            fadd.process()

            # it should also be reduceable
            reducer = PlatypusReduce(
                os.path.join(self.pth, 'PLP0000711.nx.hdf'))

            datasets, reduced = reducer.reduce(
                os.path.join(os.getcwd(), 'ADD_PLP0000708.nx.hdf'))
            assert_('y' in reduced)

            # the error bars should be smaller
            datasets2, reduced2 = reducer.reduce(
                os.path.join(self.pth, 'PLP0000708.nx.hdf'))

            assert_(np.all(reduced['y_err'] < reduced2['y_err']))
Ejemplo n.º 5
0
    def test_PNR_metadata(self):
        self.f8861.process()
        self.f8862.process()
        self.f8863.process()
        self.f8864.process()

        # Check that we can read all of the flipper statuses in files

        # Flipper 1 on, flipper 2 on
        assert_almost_equal(self.f8861.cat.cat["pol_flip_current"], 5.0)
        assert_almost_equal(self.f8861.cat.cat["anal_flip_current"], 4.5)

        # Flipper 1 on, flipper 2 off
        assert_almost_equal(self.f8862.cat.cat["anal_flip_current"], 0)
        assert_almost_equal(self.f8862.cat.cat["pol_flip_current"], 5.0)

        # Flipper 1 off, flipper 2 on
        assert_almost_equal(self.f8863.cat.cat["anal_flip_current"], 4.5)
        assert_almost_equal(self.f8863.cat.cat["pol_flip_current"], 0)

        # Flipper 1 off, flipper 2 off
        assert_almost_equal(self.f8864.cat.cat["anal_flip_current"], 0)
        assert_almost_equal(self.f8864.cat.cat["pol_flip_current"], 0)

        # Check SpinChannel for each file
        assert self.f8861.spin_state == SpinChannel.UP_UP
        assert self.f8862.spin_state == SpinChannel.UP_DOWN
        assert self.f8863.spin_state == SpinChannel.DOWN_UP
        assert self.f8864.spin_state == SpinChannel.DOWN_DOWN

        # test spin channel setting
        # non spin analysed. mode is POL, not POLANAL
        pn = PlatypusNexus(pjoin(self.pth, "PLP0016427.nx.hdf"))
        assert pn.spin_state == SpinChannel.UP_UP
        pn = PlatypusNexus(pjoin(self.pth, "PLP0016426.nx.hdf"))
        assert pn.spin_state == SpinChannel.DOWN_DOWN
Ejemplo n.º 6
0
    def test_event_same_as_detector(self):
        # the detector file should be the same as the event file
        orig_file = PlatypusNexus(os.path.join(self.path,
                                               'PLP0011613.nx.hdf'))
        orig_det = orig_file.cat.detector
        frames = [np.arange(0, 23999)]
        event_det, fb = event.process_event_stream(self.event_list,
                                                   frames,
                                                   orig_file.cat.t_bins,
                                                   orig_file.cat.y_bins,
                                                   orig_file.cat.x_bins)
        assert_equal(event_det, orig_det)

        # PlatypusNexus.process_event_stream should be the same as well
        det, fc, bm = orig_file.process_event_stream(frame_bins=[])
        assert_equal(det, orig_det)
Ejemplo n.º 7
0
    def test_event_same_as_detector(self):
        # the detector file should be the same as the event file
        # warnings filter for pixel size
        with warnings.catch_warnings():
            warnings.simplefilter('ignore', RuntimeWarning)

            orig_file = PlatypusNexus(
                os.path.join(self.path, 'PLP0011641.nx.hdf'))

        orig_det = orig_file.cat.detector
        frames = [np.arange(0, 501744)]
        event_det, fb = event.process_event_stream(self.event_list, frames,
                                                   orig_file.cat.t_bins,
                                                   orig_file.cat.y_bins,
                                                   orig_file.cat.x_bins)
        assert_equal(event_det, orig_det)

        # PlatypusNexus.process_event_stream should be the same as well
        det, fc, bm = orig_file.process_event_stream(frame_bins=[])
        assert_equal(det, orig_det)
Ejemplo n.º 8
0
    def test_event_same_as_detector(self, event_setup):
        # the detector file should be the same as the event file
        # warnings filter for pixel size
        pth = pjoin(event_setup.data_directory, "reduce")

        with warnings.catch_warnings():
            warnings.simplefilter("ignore", RuntimeWarning)

            orig_file = PlatypusNexus(pjoin(pth, "PLP0011641.nx.hdf"))

        orig_det = orig_file.cat.detector
        frames = [np.arange(0, 501744)]
        event_det, fb = event.process_event_stream(
            event_setup.event_list,
            frames,
            orig_file.cat.t_bins,
            orig_file.cat.y_bins,
            orig_file.cat.x_bins,
        )
        assert_equal(event_det, orig_det)

        # PlatypusNexus.process_event_stream should be the same as well
        det, fc, bm = orig_file.process_event_stream(frame_bins=[])
        assert_equal(det, orig_det)
Ejemplo n.º 9
0
    def test_accumulate_files_reduce(self):
        # test by adding a file to itself. Should have smaller stats
        fnames = ["PLP0000708.nx.hdf", "PLP0000708.nx.hdf"]
        pths = [pjoin(self.pth, fname) for fname in fnames]
        plp.accumulate_HDF_files(pths)

        with warnings.catch_warnings():
            warnings.simplefilter("ignore", RuntimeWarning)
            # it should be processable
            fadd = PlatypusNexus(pjoin(os.getcwd(), "ADD_PLP0000708.nx.hdf"))
            fadd.process()

            # it should also be reduceable
            reducer = PlatypusReduce(pjoin(self.pth, "PLP0000711.nx.hdf"))

            datasets, reduced = reducer.reduce(
                pjoin(os.getcwd(), "ADD_PLP0000708.nx.hdf"))
            assert_("y" in reduced)

            # the error bars should be smaller
            datasets2, reduced2 = reducer.reduce(
                pjoin(self.pth, "PLP0000708.nx.hdf"))

            assert_(np.all(reduced["y_err"] < reduced2["y_err"]))
Ejemplo n.º 10
0
    def setup_method(self, tmpdir, data_directory):
        self.pth = pjoin(data_directory, "reduce")

        with warnings.catch_warnings():
            warnings.simplefilter("ignore", RuntimeWarning)
            self.f113 = PlatypusNexus(pjoin(self.pth, "PLP0011613.nx.hdf"))
            self.f641 = PlatypusNexus(pjoin(self.pth, "PLP0011641.nx.hdf"))

            # These PNR datasets all have different flipper settings
            self.f8861 = PlatypusNexus(
                pjoin(self.pth, "PNR_files/PLP0008861.nx.hdf"))
            self.f8862 = PlatypusNexus(
                pjoin(self.pth, "PNR_files/PLP0008862.nx.hdf"))
            self.f8863 = PlatypusNexus(
                pjoin(self.pth, "PNR_files/PLP0008863.nx.hdf"))
            self.f8864 = PlatypusNexus(
                pjoin(self.pth, "PNR_files/PLP0008864.nx.hdf"))

        self.cwd = os.getcwd()

        self.tmpdir = tmpdir.strpath
        os.chdir(self.tmpdir)
        return 0
Ejemplo n.º 11
0
    def reducer(self, callback=None):
        """
        Reduce all the entries in reduction_entries

        Parameters
        ----------
        callback : callable
            Function, `f(percent_finished)` that is called with the current
            percentage progress of the reduction
        """

        # refnx.reduce.reduce needs you to be in the directory where you're
        # going to write files to
        if self.output_directory:
            os.chdir(self.output_directory)

        # if no data directory was specified then assume it's the cwd
        data_directory = self.data_directory
        if not data_directory:
            data_directory = "./"

        def full_path(fname):
            f = os.path.join(data_directory, fname)
            return f

        # if the streamed directory isn't mentioned then assume it's the same
        # as the data directory
        streamed_directory = self.streamed_directory
        if not os.path.isdir(streamed_directory):
            self.streamed_directory = data_directory

        logging.info("-------------------------------------------------------"
                     "\nStarting reduction run")
        logging.info(
            "data_folder={data_directory}, trim_trailing=True, "
            "lo_wavelength={low_wavelength}, "
            "hi_wavelength={high_wavelength}, "
            "rebin_percent={rebin_percent}, "
            "normalise={monitor_normalisation}, "
            "background={background_subtraction} "
            "eventmode={streamed_reduction} "
            "event_folder={streamed_directory}".format(**self.__dict__))

        # sets up time slices for event reduction
        if self.streamed_reduction:
            eventmode = np.arange(self.stream_start, self.stream_end,
                                  self.stream_duration)
            eventmode = np.r_[eventmode, self.stream_end]
        else:
            eventmode = None

        # are you manual beamfinding?
        peak_pos = None
        if self.manual_beam_find and self.manual_beam_finder is not None:
            peak_pos = -1

        idx = 0

        cached_direct_beams = {}

        for row, val in self.reduction_entries.items():
            if not val["use"]:
                continue

            flood = None
            if val["flood"]:
                flood = full_path(val["flood"])

            combined_dataset = None

            # process entries one by one
            for ref, db in zip(
                ["reflect-1", "reflect-2", "reflect-3"],
                ["direct-1", "direct-2", "direct-3"],
            ):
                reflect = val[ref]
                direct = val[db]

                # if the file doesn't exist there's no point continuing
                if (not os.path.isfile(full_path(reflect))) or (
                        not os.path.isfile(full_path(direct))):
                    continue

                # which of the nspectra to reduce (or all)
                ref_pn = PlatypusNexus(full_path(reflect))

                if direct not in cached_direct_beams:
                    cached_direct_beams[direct] = PlatypusReduce(
                        direct, data_folder=data_directory)

                reducer = cached_direct_beams[direct]

                try:
                    reduced = reducer(
                        ref_pn,
                        scale=val["scale"],
                        h5norm=flood,
                        lo_wavelength=self.low_wavelength,
                        hi_wavelength=self.high_wavelength,
                        rebin_percent=self.rebin_percent,
                        normalise=self.monitor_normalisation,
                        background=self.background_subtraction,
                        manual_beam_find=self.manual_beam_finder,
                        peak_pos=peak_pos,
                        eventmode=eventmode,
                        event_folder=streamed_directory,
                    )
                except Exception as e:
                    # typical Exception would be ValueError for non overlapping
                    # angles
                    logging.info(e)
                    continue

                logging.info("Reduced {} vs {}, scale={}, angle={}".format(
                    reflect,
                    direct,
                    val["scale"],
                    reduced[1]["omega"][0, 0],
                ))

                if combined_dataset is None:
                    combined_dataset = ReflectDataset()

                    fname = basename_datafile(reflect)
                    fname_dat = os.path.join(self.output_directory,
                                             "c_{0}.dat".format(fname))
                    fname_xml = os.path.join(self.output_directory,
                                             "c_{0}.xml".format(fname))

                try:
                    combined_dataset.add_data(
                        reducer.data(),
                        requires_splice=True,
                        trim_trailing=True,
                    )
                except ValueError as e:
                    # datasets don't overlap
                    logging.info(e)
                    continue

            if combined_dataset is not None:
                # after you've finished reducing write a combined file.
                with open(fname_dat, "wb") as f:
                    combined_dataset.save(f)
                with open(fname_xml, "wb") as f:
                    combined_dataset.save_xml(f)
                logging.info("Written combined files: {} and {}".format(
                    fname_dat, fname_xml))

            # can be used to create a progress bar
            idx += 1
            if callback is not None:
                ok = callback(100 * idx / len(self.reduction_entries))
                if not ok:
                    break

        logging.info("\nFinished reduction run"
                     "-------------------------------------------------------")