예제 #1
0
def test_stage1_datalevels(tmpdir):
    """test the dl1 tool on a file not providing r1 or dl0"""
    from ctapipe.io import EventSource
    from ctapipe.tools.stage1 import Stage1Tool

    class DummyEventSource(EventSource):
        @classmethod
        def is_compatible(cls, path):
            with open(path, "rb") as f:
                dummy = f.read(5)
                return dummy == b"dummy"

        @property
        def datalevels(self):
            return (DataLevel.R0, )

        @property
        def is_simulation(self):
            return True

        @property
        def obs_ids(self):
            return [1]

        @property
        def subarray(self):
            return None

        def _generator(self):
            return None

    with tempfile.NamedTemporaryFile(mode="wb", suffix=".dummy") as f:
        with tempfile.NamedTemporaryFile(mode="wb", suffix=".h5") as out:
            f.write(b"dummy")
            f.flush()

            config = Path("./examples/stage1_config.json").absolute()
            tool = Stage1Tool()

            assert (run_tool(
                tool,
                argv=[
                    f"--config={config}",
                    f"--input={f.name}",
                    f"--output={out.name}",
                    "--write-images",
                    "--overwrite",
                ],
                cwd=tmpdir,
            ) == 1)
            # make sure the dummy event source was really used
            assert isinstance(tool.event_source, DummyEventSource)

            # we need to "touch" the output file again, otherwise tempfile will
            # complain it no longer exists as the tool removed it
            open(out.name, mode="a").close()
예제 #2
0
def test_stage1_datalevels(tmpdir):
    """test the dl1 tool on a file not providing r1, dl0 or dl1a"""
    from ctapipe.io import EventSource
    from ctapipe.tools.stage1 import Stage1Tool

    class DummyEventSource(EventSource):
        @classmethod
        def is_compatible(cls, path):
            with open(path, "rb") as f:
                dummy = f.read(5)
                return dummy == b"dummy"

        @property
        def datalevels(self):
            return (DataLevel.R0,)

        @property
        def is_simulation(self):
            return True

        @property
        def obs_ids(self):
            return [1]

        @property
        def subarray(self):
            return None

        def _generator(self):
            return None

    dummy_file = tmp_dir.name + "/datalevels_dummy.h5"
    out_file = tmp_dir.name + "/datalevels_dummy_stage1_output.h5"
    with open(dummy_file, "wb") as f:
        f.write(b"dummy")
        f.flush()

    config = Path("./examples/stage1_config.json").absolute()
    tool = Stage1Tool()

    assert (
        run_tool(
            tool,
            argv=[
                f"--config={config}",
                f"--input={dummy_file}",
                f"--output={out_file}",
                "--write-images",
                "--overwrite",
            ],
            cwd=tmpdir,
        )
        == 1
    )
    # make sure the dummy event source was really used
    assert isinstance(tool.event_source, DummyEventSource)
예제 #3
0
def test_export_config_to_yaml():
    """ test that we can export a Tool's config to YAML"""
    import yaml
    from ctapipe.tools.stage1 import Stage1Tool

    tool = Stage1Tool()
    tool.progress_bar = True
    yaml_string = export_tool_config_to_commented_yaml(tool)

    # check round-trip back from yaml:
    config_dict = yaml.load(yaml_string, Loader=yaml.SafeLoader)

    assert config_dict["Stage1Tool"]["progress_bar"] is True
예제 #4
0
def run_stage1(input_path, cwd, output_path=None):
    config = Path("./examples/stage1_config.json").absolute()

    if output_path is None:
        output_path = Path(
            tempfile.NamedTemporaryFile(suffix=".dl1.h5",
                                        dir=cwd).name).absolute()

    ret = run_tool(
        Stage1Tool(),
        argv=[
            f"--config={config}",
            f"--input={input_path}",
            f"--output={output_path}",
            "--write-parameters",
            "--write-images",
            "--overwrite",
        ],
        cwd=cwd,
    )
    assert ret == 0, "Running stage1 failed"

    return output_path
예제 #5
0
def test_no_ff_tagging(tmpdir):
    """Test the ctapipe stage1 tool can read in LST real data using the event source"""
    from ctapipe.tools.stage1 import Stage1Tool
    from ctapipe.core.tool import run_tool

    tmpdir = Path(tmpdir)
    config_path = tmpdir / 'config.json'

    config = {
        'LSTEventSource': {
            "use_flatfield_heuristic": False,
            'LSTR0Corrections': {
                'drs4_pedestal_path': str(test_drs4_pedestal_path),
                'drs4_time_calibration_path': str(test_time_calib_path),
                'calibration_path': str(test_calib_path),
            },
            'PointingSource': {
                'drive_report_path': str(test_drive_report)
            },
            'EventTimeCalculator': {
                'run_summary_path': str(test_run_summary),
            },
        },
        "CameraCalibrator": {
            "image_extractor_type": "LocalPeakWindowSum",
            "LocalPeakWindowSum": {
                "window_shift": 4,
                "window_width": 8,
                "apply_integration_correction": False,
            }
        },
        "TailcutsImageCleaner": {
            "picture_threshold_pe": 6,
            "boundary_threshold_pe": 3,
            "keep_isolated_pixels": False,
            "min_picture_neighbors": 1,
        }
    }
    with config_path.open('w') as f:
        json.dump(config, f)

    tool = Stage1Tool()
    output = tmpdir / "test_dl1.h5"

    ret = run_tool(tool,
                   argv=[
                       f'--input={test_r0_path}',
                       f'--output={output}',
                       f'--config={config_path}',
                   ])
    assert ret == 0

    # test our custom default works
    assert tool.event_source.r0_r1_calibrator.gain_selector.threshold == 3500

    parameters = read_table(output, '/dl1/event/telescope/parameters/tel_001')
    assert len(parameters) == 200

    trigger = read_table(output, '/dl1/event/subarray/trigger')

    # test regression of event time calculation
    first_event_time = Time(59101.95035244, format='mjd', scale='tai')
    assert np.all((trigger['time'] - first_event_time).to_value(u.s) < 10)

    event_type_counts = np.bincount(trigger['event_type'])

    # one pedestal and flat field expected each, rest should be physics data
    # without ff heuristic, the ff event will have type SUBARRAY
    assert event_type_counts.sum() == 200
    assert event_type_counts[EventType.FLATFIELD.value] == 0
    assert event_type_counts[EventType.SKY_PEDESTAL.value] == 1
    assert event_type_counts[EventType.SUBARRAY.value] == 199
예제 #6
0
def test_merge(tmpdir):
    from ctapipe.tools.dl1_merge import MergeTool
    from ctapipe.tools.stage1 import Stage1Tool

    config = Path("./examples/stage1_config.json").absolute()

    tmp_dir = tempfile.TemporaryDirectory()
    in_1 = tmp_dir.name + "/test_file_1.hdf5"
    in_2 = tmp_dir.name + "/test_file_2.hdf5"
    out_all = tmp_dir.name + "/merged_file_all.hdf5"
    out_skip_images = tmp_dir.name + "/merged_file_images.hdf5"
    out_skip_parameters = tmp_dir.name + "/merged_file_parameters.hdf5"
    out_tels_dir_pattern = tmp_dir.name + "/merged_file_tels_dir_pattern.hdf5"

    assert (
        run_tool(
            Stage1Tool(),
            argv=[
                f"--config={config}",
                f"--input={GAMMA_TEST_LARGE}",
                f"--output={in_1}",
                "--write-parameters",
                "--write-images",
                "--overwrite",
            ],
            cwd=tmpdir,
        )
        == 0
    )
    assert (
        run_tool(
            Stage1Tool(),
            argv=[
                f"--config={config}",
                f"--input={GAMMA_TEST_LARGE}",
                f"--output={in_2}",
                "--write-parameters",
                "--write-images",
                "--overwrite",
            ],
            cwd=tmpdir,
        )
        == 0
    )

    assert (
        run_tool(
            MergeTool(),
            argv=[
                f"--i={tmp_dir.name}",
                "--p='test_file_*.hdf5'",
                f"--o={out_tels_dir_pattern}",
                "--overwrite",
                "--t=[2, 3]",
            ],
            cwd=tmpdir,
        )
        == 0
    )

    assert (
        run_tool(
            MergeTool(), argv=[in_1, in_2, f"--o={out_all}", "--overwrite"], cwd=tmpdir
        )
        == 0
    )

    assert (
        run_tool(
            MergeTool(),
            argv=[in_1, in_2, f"--o={out_skip_images}", "--overwrite", "--skip-images"],
            cwd=tmpdir,
        )
        == 0
    )

    assert (
        run_tool(
            MergeTool(),
            argv=[
                in_1,
                in_2,
                f"--o={out_skip_parameters}",
                "--overwrite",
                "--skip-parameters",
            ],
            cwd=tmpdir,
        )
        == 0
    )

    out_files_list = [
        out_all,
        out_skip_images,
        out_skip_parameters,
        out_tels_dir_pattern,
    ]

    for out_file in out_files_list:
        with tables.open_file(out_file, mode="r") as out_f, tables.open_file(
            in_1, mode="r"
        ) as in_f:

            # Check expanded tables
            assert len(out_f.root.simulation.service.shower_distribution) == 2
            assert len(out_f.root.simulation.event.subarray.shower) == 220
            assert len(out_f.root.configuration.simulation.run) == 2
            assert len(out_f.root.dl1.monitoring.subarray.pointing) == 2
            assert len(out_f.root.dl1.event.subarray.trigger) == 220
            assert len(out_f.root.dl1.event.telescope.trigger) == 918
            assert len(out_f.root.simulation.service.shower_distribution) == 2
            # Check subarray and service meta
            assert out_f.root.dl1.service["image_statistics.__table_column_meta__"]
            assert out_f.root.configuration.instrument.subarray.layout
            assert out_f.root.configuration.instrument.telescope.optics
            assert out_f.root.configuration.instrument.telescope.camera.geometry_LSTCam
            assert out_f.root.configuration.instrument.telescope.camera.readout_LSTCam

            # Check image statistics
            table_in = in_f.root["/dl1/service/image_statistics"]
            table_out = out_f.root["/dl1/service/image_statistics"]
            for row in range(len(table_in)):
                assert table_out.cols.counts[row] == np.multiply(
                    table_in.cols.counts[row], 2
                )
                assert table_out.cols.cumulative_counts[row] == np.multiply(
                    table_in.cols.cumulative_counts[row], 2
                )

            # Check telescope tables
            if out_file == out_tels_dir_pattern:
                telescope_nodes = {
                    "/dl1/monitoring/telescope/pointing",
                    "/dl1/event/telescope/images",
                    "/dl1/event/telescope/parameters",
                }
                for node in telescope_nodes:
                    assert len(out_f.list_nodes(node)) == 2
                    for tel_name in {"tel_002", "tel_003"}:
                        assert len(out_f.root[node + "/" + tel_name]) == np.multiply(
                            len(in_f.root[node + "/" + tel_name]), 2
                        )
                continue

            for tel in in_f.root.dl1.monitoring.telescope.pointing:
                assert len(
                    out_f.root.dl1.monitoring.telescope.pointing[tel.name]
                ) == np.multiply(
                    len(in_f.root.dl1.monitoring.telescope.pointing[tel.name]), 2
                )

            if out_file != out_skip_images:
                for tel in in_f.root.dl1.event.telescope.images:
                    assert len(
                        out_f.root.dl1.event.telescope.images[tel.name]
                    ) == np.multiply(
                        len(in_f.root.dl1.event.telescope.images[tel.name]), 2
                    )

            if out_file != out_skip_parameters:
                for tel in in_f.root.dl1.event.telescope.parameters:
                    assert len(
                        out_f.root.dl1.event.telescope.parameters[tel.name]
                    ) == np.multiply(
                        len(in_f.root.dl1.event.telescope.parameters[tel.name]), 2
                    )

    config = Path("./examples/stage1_config.json").absolute()
    dl1b_file = tmp_dir.name + "/dl1b_from_simtel.dl1.h5"
    assert (
        run_tool(
            Stage1Tool(),
            argv=[
                f"--config={config}",
                f"--input={GAMMA_TEST_LARGE}",
                f"--output={dl1b_file}",
                "--write-parameters",
                "--overwrite",
            ],
            cwd=tmpdir,
        )
        == 0
    )

    # check tables were written
    with tables.open_file(dl1b_file, mode="r") as tf:
        assert tf.root.dl1
        assert tf.root.dl1.event.telescope
        assert tf.root.dl1.event.subarray
        assert tf.root.configuration.instrument.subarray.layout
        assert tf.root.configuration.instrument.telescope.optics
        assert tf.root.configuration.instrument.telescope.camera.geometry_LSTCam
        assert tf.root.configuration.instrument.telescope.camera.readout_LSTCam

        assert tf.root.dl1.monitoring.subarray.pointing.dtype.names == (
            "time",
            "array_azimuth",
            "array_altitude",
            "array_ra",
            "array_dec",
        )

    # check we can read telescope parameters
    dl1_features = pd.read_hdf(dl1b_file, "/dl1/event/telescope/parameters/tel_001")
    features = (
        "obs_id",
        "event_id",
        "tel_id",
        "hillas_intensity",
        "concentration_cog",
        "leakage_pixels_width_1",
    )
    for feature in features:
        assert feature in dl1_features.columns

    dl1a_file = tmp_dir.name + "/dl1a_from_simtel.dl1.h5"
    assert (
        run_tool(
            Stage1Tool(),
            argv=[
                f"--config={config}",
                f"--input={GAMMA_TEST_LARGE}",
                f"--output={dl1a_file}",
                "--write-images",
                "--overwrite",
            ],
            cwd=tmpdir,
        )
        == 0
    )

    with tables.open_file(dl1a_file, mode="r") as tf:
        assert tf.root.dl1
        assert tf.root.dl1.event.telescope
        assert tf.root.dl1.event.subarray
        assert tf.root.configuration.instrument.subarray.layout
        assert tf.root.configuration.instrument.telescope.optics
        assert tf.root.configuration.instrument.telescope.camera.geometry_LSTCam
        assert tf.root.configuration.instrument.telescope.camera.readout_LSTCam
        assert tf.root.dl1.event.telescope.images.tel_001
        dl1_image = tf.root.dl1.event.telescope.images.tel_001
        assert "image_mask" in dl1_image.dtype.names
        assert "image" in dl1_image.dtype.names
        assert "peak_time" in dl1_image.dtype.names
예제 #7
0
def test_stage_1_dl1(tmpdir, dl1_image_file, dl1_parameters_file):
    from ctapipe.tools.stage1 import Stage1Tool

    config = Path("./examples/stage1_config.json").absolute()
    # DL1A file as input
    dl1b_from_dl1a_file = tmp_dir.name + "/dl1b_from dl1a.dl1.h5"
    assert (
        run_tool(
            Stage1Tool(),
            argv=[
                f"--config={config}",
                f"--input={dl1_image_file}",
                f"--output={dl1b_from_dl1a_file}",
                "--write-parameters",
                "--overwrite",
            ],
            cwd=tmpdir,
        )
        == 0
    )

    # check tables were written
    with tables.open_file(dl1b_from_dl1a_file, mode="r") as tf:
        assert tf.root.dl1
        assert tf.root.dl1.event.telescope
        assert tf.root.dl1.event.subarray
        assert tf.root.configuration.instrument.subarray.layout
        assert tf.root.configuration.instrument.telescope.optics
        assert tf.root.configuration.instrument.telescope.camera.geometry_LSTCam
        assert tf.root.configuration.instrument.telescope.camera.readout_LSTCam

        assert tf.root.dl1.monitoring.subarray.pointing.dtype.names == (
            "time",
            "array_azimuth",
            "array_altitude",
            "array_ra",
            "array_dec",
        )

    # check we can read telescope parameters
    dl1_features = pd.read_hdf(
        dl1b_from_dl1a_file, "/dl1/event/telescope/parameters/tel_001"
    )
    features = (
        "obs_id",
        "event_id",
        "tel_id",
        "hillas_intensity",
        "concentration_cog",
        "leakage_pixels_width_1",
    )
    for feature in features:
        assert feature in dl1_features.columns

    # DL1B file as input
    assert (
        run_tool(
            Stage1Tool(),
            argv=[
                f"--config={config}",
                f"--input={dl1_parameters_file}",
                f"--output={tmp_dir.name + '/dl1b_from_dl1b.dl1.h5'}",
                "--write-parameters",
                "--overwrite",
            ],
            cwd=tmpdir,
        )
        == 1
    )
예제 #8
0
def test_merge(tmpdir):
    from ctapipe.tools.dl1_merge import MergeTool
    from ctapipe.tools.stage1 import Stage1Tool

    config = Path("./examples/stage1_config.json").absolute()

    with tempfile.NamedTemporaryFile(
            suffix=".hdf5") as f1, tempfile.NamedTemporaryFile(
                suffix=".hdf5") as f2, tempfile.NamedTemporaryFile(
                    suffix=".hdf5") as out_all, tempfile.NamedTemporaryFile(
                        suffix=".hdf5"
                    ) as out_skip_images, tempfile.NamedTemporaryFile(
                        suffix=".hdf5") as out_skip_parameters:
        assert (run_tool(
            Stage1Tool(),
            argv=[
                f"--config={config}",
                f"--input={GAMMA_TEST_LARGE}",
                f"--output={f1.name}",
                "--write-parameters",
                "--write-images",
                "--overwrite",
            ],
            cwd=tmpdir,
        ) == 0)
        assert (run_tool(
            Stage1Tool(),
            argv=[
                f"--config={config}",
                f"--input={GAMMA_TEST_LARGE}",
                f"--output={f2.name}",
                "--write-parameters",
                "--write-images",
                "--overwrite",
            ],
            cwd=tmpdir,
        ) == 0)

        assert (run_tool(
            MergeTool(),
            argv=[
                f"{f1.name}", f"{f2.name}", f"--o={out_all.name}",
                "--overwrite"
            ],
            cwd=tmpdir,
        ) == 0)

        assert (run_tool(
            MergeTool(),
            argv=[
                f"{f1.name}",
                f"{f2.name}",
                f"--o={out_skip_images.name}",
                "--overwrite",
                "--skip-images",
            ],
            cwd=tmpdir,
        ) == 0)

        assert (run_tool(
            MergeTool(),
            argv=[
                f"{f1.name}",
                f"{f2.name}",
                f"--o={out_skip_parameters.name}",
                "--overwrite",
                "--skip-parameters",
            ],
            cwd=tmpdir,
        ) == 0)

        out_files_list = [
            out_all.name, out_skip_images.name, out_skip_parameters.name
        ]

        for out_file in out_files_list:
            with tables.open_file(out_file,
                                  mode="r") as out_f, tables.open_file(
                                      f1.name, mode="r") as in_f:

                # Check expanded tables
                assert len(
                    out_f.root.simulation.service.shower_distribution) == 2
                assert len(out_f.root.simulation.event.subarray.shower) == 220
                assert len(out_f.root.configuration.simulation.run) == 2
                assert len(out_f.root.dl1.monitoring.subarray.pointing) == 2
                assert len(out_f.root.dl1.event.subarray.trigger) == 220
                assert len(out_f.root.dl1.event.telescope.trigger) == 918
                assert len(
                    out_f.root.simulation.service.shower_distribution) == 2
                # Check subarray and service meta
                assert out_f.root.dl1.service[
                    "image_statistics.__table_column_meta__"]
                assert out_f.root.configuration.instrument.subarray.layout
                assert out_f.root.configuration.instrument.telescope.optics
                assert (out_f.root.configuration.instrument.telescope.camera.
                        geometry_LSTCam)
                assert (out_f.root.configuration.instrument.telescope.camera.
                        readout_LSTCam)

                # Check image statistics
                table_in = in_f.root["/dl1/service/image_statistics"]
                table_out = out_f.root["/dl1/service/image_statistics"]
                for row in range(len(table_in)):
                    assert table_out.cols.counts[row] == np.multiply(
                        table_in.cols.counts[row], 2)
                    assert table_out.cols.cumulative_counts[
                        row] == np.multiply(
                            table_in.cols.cumulative_counts[row], 2)

                # Check telescope tables
                for tel in in_f.root.dl1.monitoring.telescope.pointing:
                    assert len(out_f.root.dl1.monitoring.telescope.pointing[
                        tel.name]) == np.multiply(
                            len(in_f.root.dl1.monitoring.telescope.pointing[
                                tel.name]), 2)

                if out_file != out_skip_images.name:
                    for tel in in_f.root.dl1.event.telescope.images:
                        assert len(out_f.root.dl1.event.telescope.images[
                            tel.name]) == np.multiply(
                                len(in_f.root.dl1.event.telescope.images[
                                    tel.name]), 2)

                if out_file != out_skip_parameters.name:
                    for tel in in_f.root.dl1.event.telescope.parameters:
                        assert len(out_f.root.dl1.event.telescope.parameters[
                            tel.name]) == np.multiply(
                                len(in_f.root.dl1.event.telescope.parameters[
                                    tel.name]), 2)
예제 #9
0
def test_stage_1(tmpdir):
    from ctapipe.tools.stage1 import Stage1Tool

    config = Path("./examples/stage1_config.json").absolute()
    with tempfile.NamedTemporaryFile(suffix=".hdf5") as f:
        assert (run_tool(
            Stage1Tool(),
            argv=[
                f"--config={config}",
                f"--input={GAMMA_TEST_LARGE}",
                f"--output={f.name}",
                "--write-parameters",
                "--overwrite",
            ],
            cwd=tmpdir,
        ) == 0)

        # check tables were written
        with tables.open_file(f.name, mode="r") as tf:
            assert tf.root.dl1
            assert tf.root.dl1.event.telescope
            assert tf.root.dl1.event.subarray
            assert tf.root.configuration.instrument.subarray.layout
            assert tf.root.configuration.instrument.telescope.optics
            assert tf.root.configuration.instrument.telescope.camera.geometry_LSTCam
            assert tf.root.configuration.instrument.telescope.camera.readout_LSTCam

            assert tf.root.dl1.monitoring.subarray.pointing.dtype.names == (
                "time",
                "array_azimuth",
                "array_altitude",
                "array_ra",
                "array_dec",
            )

        # check we can read telescope parametrs
        dl1_features = pd.read_hdf(f.name,
                                   "/dl1/event/telescope/parameters/tel_001")
        features = (
            "obs_id",
            "event_id",
            "tel_id",
            "hillas_intensity",
            "concentration_cog",
            "leakage_pixels_width_1",
        )
        for feature in features:
            assert feature in dl1_features.columns

    with tempfile.NamedTemporaryFile(suffix=".hdf5") as f:
        assert (run_tool(
            Stage1Tool(),
            argv=[
                f"--config={config}",
                f"--input={GAMMA_TEST_LARGE}",
                f"--output={f.name}",
                "--write-images",
                "--overwrite",
            ],
            cwd=tmpdir,
        ) == 0)

        with tables.open_file(f.name, mode="r") as tf:
            assert tf.root.dl1
            assert tf.root.dl1.event.telescope
            assert tf.root.dl1.event.subarray
            assert tf.root.configuration.instrument.subarray.layout
            assert tf.root.configuration.instrument.telescope.optics
            assert tf.root.configuration.instrument.telescope.camera.geometry_LSTCam
            assert tf.root.configuration.instrument.telescope.camera.readout_LSTCam
            assert tf.root.dl1.event.telescope.images.tel_001
            dl1_image = tf.root.dl1.event.telescope.images.tel_001
            assert "image_mask" in dl1_image.dtype.names
            assert "image" in dl1_image.dtype.names
            assert "peak_time" in dl1_image.dtype.names