예제 #1
0
    def test_open_close_connection(self):
        storage = DataStorage(self.test_data)

        storage.open_connection()
        assert storage.m_open is True

        storage.open_connection()
        assert storage.m_open is True

        storage.close_connection()
        assert storage.m_open is False

        storage.close_connection()
        assert storage.m_open is False

        os.remove(self.test_data)
예제 #2
0
    def test_create_storage_with_existing_database(self):
        np.random.seed(1)
        images = np.random.normal(loc=0, scale=2e-4, size=(10, 100, 100))

        h5f = h5py.File(self.test_data, "w")
        h5f.create_dataset("images", data=images)
        h5f.close()

        storage = DataStorage(self.test_data)
        storage.open_connection()
        data = storage.m_data_bank["images"]

        assert np.allclose(data[0, 0, 0], 0.00032486907273264834, rtol=limit, atol=0.)
        assert np.allclose(np.mean(data), 1.0506056979365338e-06, rtol=limit, atol=0.)

        os.remove(self.test_data)
예제 #3
0
    def __init__(self,
                 working_place_in=None,
                 input_place_in=None,
                 output_place_in=None):
        """
        Constructor of Pypeline.

        :param working_place_in: Working location of the Pypeline which needs to be a folder on the
                                 hard drive. The given folder will be used to save the central
                                 PynPoint database (an HDF5 file) in which all the intermediate
                                 processing steps are saved. Note that the HDF5 file can become
                                 very large depending on the size and number of input images.
        :type working_place_in: str
        :param input_place_in: Default input directory of the Pypeline. All ReadingModules added
                               to the Pypeline use this directory to look for input data. It is
                               possible to specify a different location for the ReadingModules
                               using their constructors.
        :type input_place_in: str
        :param output_place_in: Default result directory used to save the output of all
                                WritingModules added to the Pypeline. It is possible to specify
                                a different locations for the WritingModules by using their
                                constructors.

        :return: None
        """

        sys.stdout.write("Initiating PynPoint...")
        sys.stdout.flush()

        self._m_working_place = working_place_in
        self._m_input_place = input_place_in
        self._m_output_place = output_place_in

        self._m_modules = collections.OrderedDict()
        self.m_data_storage = DataStorage(
            os.path.join(working_place_in, 'PynPoint_database.hdf5'))

        self._config_init()

        sys.stdout.write(" [DONE]\n")
        sys.stdout.flush()
예제 #4
0
    def test_bad_pixel_map(self):

        bp_map = BadPixelMapModule(name_in="bp_map",
                                   dark_in_tag="dark",
                                   flat_in_tag="flat",
                                   bp_map_out_tag="bp_map",
                                   dark_threshold=0.99,
                                   flat_threshold=-0.99)

        self.pipeline.add_module(bp_map)

        self.pipeline.run()

        storage = DataStorage(self.test_dir + "/PynPoint_database.hdf5")
        storage.open_connection()

        data = storage.m_data_bank["bp_map"]

        assert data[0, 0] == 1.
        assert data[30, 30] == 1.
        assert data[10, 10] == 0.
        assert data[12, 12] == 0.
        assert data[14, 14] == 0.
        assert data[20, 20] == 0.
        assert data[22, 22] == 0.
        assert data[24, 24] == 0.
        assert np.mean(data) == 0.9993

        storage.close_connection()
예제 #5
0
    def test_create_storage_without_existing_database(self):
        storage = DataStorage(self.test_data)
        storage.open_connection()
        storage.m_data_bank["data"] = [0, 1, 2, 5, 7]

        assert storage.m_data_bank["data"][2] == 2
        assert storage.m_data_bank.keys() == ["data", ]

        storage.close_connection()

        os.remove(self.test_data)
예제 #6
0
    def test_dark_and_flat_calibration(self):

        dark = DarkCalibrationModule(name_in="dark",
                                     image_in_tag="images",
                                     dark_in_tag="dark",
                                     image_out_tag="dark_cal")

        self.pipeline.add_module(dark)

        flat = FlatCalibrationModule(name_in="flat",
                                     image_in_tag="dark_cal",
                                     flat_in_tag="flat",
                                     image_out_tag="flat_cal")

        self.pipeline.add_module(flat)

        self.pipeline.run()

        storage = DataStorage(self.test_dir + "/PynPoint_database.hdf5")
        storage.open_connection()

        data = storage.m_data_bank["dark"]
        assert np.allclose(data[0, 10, 10],
                           3.528694163309295e-05,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(np.mean(data),
                           7.368663496379876e-07,
                           rtol=limit,
                           atol=0.)

        data = storage.m_data_bank["flat"]
        assert np.allclose(data[0, 10, 10],
                           -0.0004053528990466237,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(np.mean(data),
                           -4.056978234798532e-07,
                           rtol=limit,
                           atol=0.)

        storage.close_connection()
예제 #7
0
    def test_bad_pixel_sigma_filter(self):

        sigma = BadPixelSigmaFilterModule(name_in="sigma",
                                          image_in_tag="images",
                                          image_out_tag="sigma",
                                          box=9,
                                          sigma=5,
                                          iterate=1)

        self.pipeline.add_module(sigma)

        self.pipeline.run()

        storage = DataStorage(self.test_dir + "/PynPoint_database.hdf5")
        storage.open_connection()

        data = storage.m_data_bank["sigma"]

        assert np.allclose(data[0, 0, 0],
                           0.00032486907273264834,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(data[0, 10, 10],
                           0.025022559679385093,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(data[0, 20, 20],
                           0.024962143884217046,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(np.mean(data),
                           6.721637736047109e-07,
                           rtol=limit,
                           atol=0.)

        storage.close_connection()
예제 #8
0
    def test_bad_pixel_interpolation(self):

        interpolation = BadPixelInterpolationModule(
            name_in="interpolation",
            image_in_tag="images",
            bad_pixel_map_tag="bp_map",
            image_out_tag="interpolation",
            iterations=100)

        self.pipeline.add_module(interpolation)

        self.pipeline.run()

        storage = DataStorage(self.test_dir + "/PynPoint_database.hdf5")
        storage.open_connection()

        data = storage.m_data_bank["interpolation"]

        assert np.allclose(data[0, 0, 0],
                           0.00032486907273264834,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(data[0, 10, 10],
                           1.0139222106683477e-05,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(data[0, 20, 20],
                           -4.686852973820094e-05,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(np.mean(data),
                           3.0499629451215465e-07,
                           rtol=limit,
                           atol=0.)

        storage.close_connection()
예제 #9
0
class Pypeline(object):
    """
    A Pypeline instance can be used to manage various processing steps. It inheres an internal
    dictionary of Pypeline steps (modules) and their names. A Pypeline has a central DataStorage on
    the hard drive which can be accessed by various modules. The order of the modules depends on
    the order the steps have been added to the pypeline. It is possible to run all modules attached
    to the Pypeline at once or run a single modules by name.
    """
    def __init__(self,
                 working_place_in=None,
                 input_place_in=None,
                 output_place_in=None):
        """
        Constructor of Pypeline.

        :param working_place_in: Working location of the Pypeline which needs to be a folder on the
                                 hard drive. The given folder will be used to save the central
                                 PynPoint database (an HDF5 file) in which all the intermediate
                                 processing steps are saved. Note that the HDF5 file can become
                                 very large depending on the size and number of input images.
        :type working_place_in: str
        :param input_place_in: Default input directory of the Pypeline. All ReadingModules added
                               to the Pypeline use this directory to look for input data. It is
                               possible to specify a different location for the ReadingModules
                               using their constructors.
        :type input_place_in: str
        :param output_place_in: Default result directory used to save the output of all
                                WritingModules added to the Pypeline. It is possible to specify
                                a different locations for the WritingModules by using their
                                constructors.

        :return: None
        """

        sys.stdout.write("Initiating PynPoint...")
        sys.stdout.flush()

        self._m_working_place = working_place_in
        self._m_input_place = input_place_in
        self._m_output_place = output_place_in

        self._m_modules = collections.OrderedDict()
        self.m_data_storage = DataStorage(
            os.path.join(working_place_in, 'PynPoint_database.hdf5'))

        self._config_init()

        sys.stdout.write(" [DONE]\n")
        sys.stdout.flush()

    def __setattr__(self, key, value):
        """
        This method is called every time a member / attribute of the Pypeline is changed. It checks
        whether a chosen working / input / output directory exists.

        :param key: Member or attribute name.
        :param value: New value for the given member or attribute.

        :return: None
        """

        if key in ["_m_working_place", "_m_input_place", "_m_output_place"]:
            assert (os.path.isdir(str(value))), "Input directory for " + str(key) + "does not " \
                                                "exist - input requested: %s." % value

        super(Pypeline, self).__setattr__(key, value)

    @staticmethod
    def _validate(module, tags):
        """
        Internal function which is used for the validation of the pipeline. Validates a
        single module.

        :param module: The module.
        :type module: ReadingModule, WritingModule, ProcessingModule
        :param tags: Tags in the database.
        :type tags: list, str

        :return: Module validation.
        :rtype: bool, str
        """

        if isinstance(module, ReadingModule):
            tags.extend(module.get_all_output_tags())

        elif isinstance(module, WritingModule):
            for tag in module.get_all_input_tags():
                if tag not in tags:
                    return False, module.name

        elif isinstance(module, ProcessingModule):
            tags.extend(module.get_all_output_tags())
            for tag in module.get_all_input_tags():
                if tag not in tags:
                    return False, module.name

        else:
            return False, None

        return True, None

    def _config_init(self):
        """
        Internal function which initializes the configuration file. It reads PynPoint_config.ini
        in the working folder and creates this file with the default (ESO/NACO) settings in case
        the file is not present.

        :return: None
        """

        cpu = multiprocessing.cpu_count()

        default = [('INSTRUMENT', ('header', 'INSTRUME', 'str')),
                   ('NFRAMES', ('header', 'NAXIS3', 'str')),
                   ('EXP_NO', ('header', 'ESO DET EXP NO', 'str')),
                   ('DIT', ('header', 'ESO DET DIT', 'str')),
                   ('NDIT', ('header', 'ESO DET NDIT', 'str')),
                   ('PARANG_START', ('header', 'ESO ADA POSANG', 'str')),
                   ('PARANG_END', ('header', 'ESO ADA POSANG END', 'str')),
                   ('DITHER_X', ('header', 'ESO SEQ CUMOFFSETX', 'str')),
                   ('DITHER_Y', ('header', 'ESO SEQ CUMOFFSETY', 'str')),
                   ('PUPIL', ('header', 'ESO ADA PUPILPOS', 'str')),
                   ('DATE', ('header', 'DATE-OBS', 'str')),
                   ('LATITUDE', ('header', 'ESO TEL GEOLAT', 'str')),
                   ('LONGITUDE', ('header', 'ESO TEL GEOLON', 'str')),
                   ('RA', ('header', 'RA', 'str')),
                   ('DEC', ('header', 'DEC', 'str')),
                   ('PIXSCALE', ('settings', 0.027, 'float')),
                   ('MEMORY', ('settings', 1000, 'int')),
                   ('CPU', ('settings', cpu, 'int'))]

        default = collections.OrderedDict(default)
        config_dict = collections.OrderedDict()

        def _create_config(filename):
            group = None

            file_obj = open(filename, 'w')
            for i, item in enumerate(default):
                if default[item][0] != group:
                    if i != 0:
                        file_obj.write('\n')
                    file_obj.write('[' + str(default[item][0]) + ']\n\n')

                file_obj.write(item + ': ' + str(default[item][1]) + '\n')
                group = default[item][0]

            file_obj.close()

        def _read_config(config_file):
            config = configparser.ConfigParser()
            config.read_file(open(config_file))

            for _, item in enumerate(default):
                if config.has_option(default[item][0], item):

                    if config.get(default[item][0], item) == "None":
                        if default[item][2] == "str":
                            config_dict[item] = "None"

                        elif default[item][2] == "float":
                            config_dict[item] = float(0.)

                        elif default[item][2] == "int":
                            config_dict[item] = int(0)

                    else:
                        if default[item][2] == "str":
                            config_dict[item] = str(
                                config.get(default[item][0], item))

                        elif default[item][2] == "float":
                            config_dict[item] = float(
                                config.get(default[item][0], item))

                        elif default[item][2] == "int":
                            config_dict[item] = int(
                                config.get(default[item][0], item))

                else:
                    config_dict[item] = default[item][1]

            return config_dict

        def _write_config(config_dict):
            hdf = h5py.File(self._m_working_place + '/PynPoint_database.hdf5',
                            'a')

            if "config" in hdf:
                del hdf["config"]

            config = hdf.create_group("config")

            for i in config_dict:
                config.attrs[i] = config_dict[i]

            hdf.close()

        config_file = self._m_working_place + "/PynPoint_config.ini"

        if not os.path.isfile(config_file):
            warnings.warn(
                "Configuration file not found. Creating PynPoint_config.ini with "
                "default values.")

            _create_config(config_file)

        config_dict = _read_config(config_file)

        _write_config(config_dict)

    def add_module(self, module):
        """
        Adds a Pypeline module to the internal Pypeline dictionary. The module is appended at the
        end of this ordered dictionary. If the input module is a reading or writing module without
        a specified input or output location then the Pypeline default location is used. Moreover,
        the given module is connected to the Pypeline internal data storage.

        :param module: Input module.
        :type module: ReadingModule, WritingModule, ProcessingModule

        :return: None
        """

        assert isinstance(module, PypelineModule), "The added module is not a valid " \
                                                   "Pypeline module."

        if isinstance(module, WritingModule):
            if module.m_output_location is None:
                module.m_output_location = self._m_output_place

        if isinstance(module, ReadingModule):
            if module.m_input_location is None:
                module.m_input_location = self._m_input_place

        module.connect_database(self.m_data_storage)

        if module.name in self._m_modules:
            warnings.warn(
                "Processing module names need to be unique. Overwriting module '%s'."
                % module.name)

        self._m_modules[module.name] = module

    def remove_module(self, name):
        """
        Removes a Pypeline module from the internal dictionary.

        :param name: Name of the module which has to be removed.
        :type name: str

        :return: True if module was deleted and False if module does not exist.
        :rtype: bool
        """

        if name in self._m_modules:
            del self._m_modules[name]
            return True

        warnings.warn("Module name '" + name +
                      "' not found in the Pypeline dictionary.")

        return False

    def get_module_names(self):
        """
        Function which returns a list of all module names.

        :return: Ordered list of all Pypeline modules.
        :rtype: list[str]
        """

        return self._m_modules.keys()

    def validate_pipeline(self):
        """
        Function which checks if all input ports of the Pypeline are pointing to previous output
        ports.

        :return: True if Pypeline is valid and False if not. The second parameter contains the name
                 of the module which is not valid.
        :rtype: bool, str
        """

        self.m_data_storage.open_connection()

        existing_data_tags = self.m_data_storage.m_data_bank.keys()

        for module in self._m_modules.itervalues():
            validation = self._validate(module, existing_data_tags)

            if not validation[0]:
                return validation

        return True, None

    def validate_pipeline_module(self, name):
        """
        Checks if the data exists for the module with label *name*.

        :param name: Name of the module that is checked.
        :type name: str

        :return: True if the Pypeline module is valid and False if not. The second parameter gives
                 the name of the module which is not valid.
        :rtype: bool, str
        """

        self.m_data_storage.open_connection()

        existing_data_tags = self.m_data_storage.m_data_bank.keys()

        if name in self._m_modules:
            module = self._m_modules[name]

        else:
            return

        return self._validate(module, existing_data_tags)

    def run(self):
        """
        Walks through all saved processing steps and calls their run methods. The order in which
        the steps are called depends on the order they have been added to the Pypeline.

        :return: None
        """

        sys.stdout.write("Validating Pypeline...")
        sys.stdout.flush()

        validation = self.validate_pipeline()

        if not validation[0]:
            raise AttributeError(
                "Pipeline module '%s' is looking for data under a tag which is "
                "not created by a previous module or does not exist in the "
                "database." % validation[1])

        sys.stdout.write(" [DONE]\n")
        sys.stdout.flush()

        for key in self._m_modules:
            self._m_modules[key].run()

    def run_module(self, name):
        """
        Runs a single processing module.

        :param name: Name of the module.
        :type name: str

        :return: None
        """

        if name in self._m_modules:
            sys.stdout.write("Validating module " + name + "...")
            sys.stdout.flush()

            validation = self.validate_pipeline_module(name)

            if not validation[0]:
                raise AttributeError(
                    "Pipeline module '%s' is looking for data under a tag which "
                    "does not exist in the database." % validation[1])

            sys.stdout.write(" [DONE]\n")
            sys.stdout.flush()

            self._m_modules[name].run()

        else:
            warnings.warn("Module '" + name + "' not found.")

    def get_data(self, tag):
        """
        Function for accessing data in the central database.

        :param tag: Database tag.
        :type tag: str

        :return: The selected dataset from the database.
        :rtype: numpy.asarray
        """

        self.m_data_storage.open_connection()

        return np.asarray(self.m_data_storage.m_data_bank[tag])

    def get_attribute(self, data_tag, attr_name, static=True):
        """
        Function for accessing attributes in the central database.

        :param data_tag: Database tag.
        :type data_tag: str
        :param attr_name: Name of the attribute.
        :type attr_name: str
        :param static: Static or non-static attribute.
        :type static: bool

        :return: The attribute value(s).
        """

        self.m_data_storage.open_connection()

        if static:
            attr = self.m_data_storage.m_data_bank[data_tag].attrs[attr_name]

        else:
            attr = self.m_data_storage.m_data_bank["header_" + data_tag + "/" +
                                                   attr_name]

        return attr
예제 #10
0
    def test_fake_planet(self):

        read = FitsReadingModule(name_in="read",
                                 image_tag="read")

        self.pipeline.add_module(read)

        angle = AngleInterpolationModule(name_in="angle",
                                         data_tag="read")

        self.pipeline.add_module(angle)

        fake = FakePlanetModule(position=(0.5, 90.),
                                magnitude=5.,
                                psf_scaling=1.,
                                interpolation="spline",
                                name_in="fake",
                                image_in_tag="read",
                                psf_in_tag="read",
                                image_out_tag="fake",
                                verbose=True)

        self.pipeline.add_module(fake)

        simplex = SimplexMinimizationModule(position=(31., 49.),
                                            magnitude=5.,
                                            psf_scaling=-1.,
                                            name_in="simplex",
                                            image_in_tag="fake",
                                            psf_in_tag="read",
                                            res_out_tag="simplex_res",
                                            flux_position_tag="flux_position",
                                            merit="sum",
                                            aperture=0.05,
                                            sigma=0.027,
                                            tolerance=0.1,
                                            pca_number=2,
                                            cent_size=None,
                                            edge_size=None,
                                            extra_rot=0.)

        self.pipeline.add_module(simplex)

        pca = PcaPsfSubtractionModule(pca_numbers=(2, ),
                                      name_in="pca",
                                      images_in_tag="fake",
                                      reference_in_tag="fake",
                                      res_mean_tag="res_mean",
                                      res_median_tag=None,
                                      res_arr_out_tag=None,
                                      res_rot_mean_clip_tag=None,
                                      extra_rot=0.)

        self.pipeline.add_module(pca)

        false = FalsePositiveModule(position=(31., 49.),
                                    aperture=0.1,
                                    ignore=True,
                                    name_in="false",
                                    image_in_tag="res_mean",
                                    snr_out_tag="snr_fpf")

        self.pipeline.add_module(false)

        photometry = AperturePhotometryModule(radius=0.1,
                                              position=None,
                                              name_in="photometry",
                                              image_in_tag="read",
                                              phot_out_tag="photometry")

        self.pipeline.add_module(photometry)

        self.pipeline.run()

        storage = DataStorage(self.test_dir+"/PynPoint_database.hdf5")
        storage.open_connection()

        data = storage.m_data_bank["read"]
        assert np.allclose(data[0, 10, 10], 0.00012958496246258364, rtol=limit, atol=0.)
        assert np.allclose(np.mean(data), 0.00010029494781738066, rtol=limit, atol=0.)

        data = storage.m_data_bank["header_read/PARANG"]
        assert data[5] == 2.7777777777777777

        data = storage.m_data_bank["fake"]
        assert np.allclose(data[0, 49, 31], 0.00036532633147006946, rtol=limit, atol=0.)
        assert np.allclose(np.mean(data), 0.0001012983225928772, rtol=limit, atol=0.)

        data = storage.m_data_bank["simplex_res"]
        assert np.allclose(data[46, 49, 31], 3.718481593648487e-05, rtol=limit, atol=0.)
        assert np.allclose(np.mean(data), -2.8892749617545238e-08, rtol=limit, atol=0.)

        data = storage.m_data_bank["flux_position"]
        assert np.allclose(data[46, 0], 31.276994533457994, rtol=limit, atol=0.)
        assert np.allclose(data[46, 1], 50.10345749706295, rtol=limit, atol=0.)
        assert np.allclose(data[46, 2], 0.5055288651354779, rtol=limit, atol=0.)
        assert np.allclose(data[46, 3], 89.6834045889695, rtol=limit, atol=0.)
        assert np.allclose(data[46, 4], 4.997674024675655, rtol=limit, atol=0.)

        data = storage.m_data_bank["res_mean"]
        assert np.allclose(data[0, 49, 31], 9.258255068620805e-05, rtol=limit, atol=0.)
        assert np.allclose(np.mean(data), -2.610863424405134e-08, rtol=limit, atol=0.)

        data = storage.m_data_bank["snr_fpf"]
        assert np.allclose(data[0, 2], 0.513710034941892, rtol=limit, atol=0.)
        assert np.allclose(data[0, 3], 93.01278750418334, rtol=limit, atol=0.)
        assert np.allclose(data[0, 4], 11.775360946367874, rtol=limit, atol=0.)
        assert np.allclose(data[0, 5], 2.9838031156970146e-08, rtol=limit, atol=0.)

        data = storage.m_data_bank["photometry"]
        assert np.allclose(data[0][0], 0.983374353660573, rtol=limit, atol=0.)
        assert np.allclose(data[39][0], 0.9841484973083519, rtol=limit, atol=0.)
        assert np.allclose(np.mean(data), 0.9835085649488583, rtol=limit, atol=0.)

        storage.close_connection()
예제 #11
0
    def setup(self):
        file_in = os.path.dirname(__file__) + "/PynPoint_database.hdf5"

        self.storage = DataStorage(file_in)
예제 #12
0
    def test_star_alignment(self):

        read = FitsReadingModule(name_in="read",
                                 image_tag="read")

        self.pipeline.add_module(read)

        extraction = StarExtractionModule(name_in="extract",
                                          image_in_tag="read",
                                          image_out_tag="extract",
                                          image_size=0.6,
                                          fwhm_star=0.1,
                                          position=None)

        self.pipeline.add_module(extraction)

        align = StarAlignmentModule(name_in="align",
                                    image_in_tag="extract",
                                    ref_image_in_tag=None,
                                    image_out_tag="align",
                                    accuracy=10,
                                    resize=2)

        self.pipeline.add_module(align)

        shift = ShiftImagesModule((6., 4.),
                                  name_in="shift",
                                  image_in_tag="align",
                                  image_out_tag="shift")

        self.pipeline.add_module(shift)

        center = StarCenteringModule(name_in="center",
                                     image_in_tag="shift",
                                     image_out_tag="center",
                                     mask_out_tag=None,
                                     fit_out_tag="center_fit",
                                     method="full",
                                     interpolation="spline",
                                     radius=0.1,
                                     sign="positive",
                                     guess=(6., 4., 1., 1., 1., 0.))

        self.pipeline.add_module(center)

        self.pipeline.run()

        storage = DataStorage(self.test_dir+"/PynPoint_database.hdf5")
        storage.open_connection()

        data = storage.m_data_bank["read"]
        assert np.allclose(data[0, 10, 10], 0.00012958496246258364, rtol=limit, atol=0.)
        assert np.allclose(np.mean(data), 9.832838021311831e-05, rtol=limit, atol=0.)

        data = storage.m_data_bank["extract"]
        assert np.allclose(data[0, 10, 10], 0.05304008435511765, rtol=limit, atol=0.)
        assert np.allclose(np.mean(data), 0.0020655767159466613, rtol=limit, atol=0.)

        data = storage.m_data_bank["header_extract/STAR_POSITION"]
        assert data[10, 0] ==  data[10, 1] == 75

        data = storage.m_data_bank["shift"]
        assert np.allclose(data[0, 10, 10], -4.341611534220891e-05, rtol=limit, atol=0.)
        assert np.allclose(np.mean(data), 0.0005164420068450968, rtol=limit, atol=0.)

        data = storage.m_data_bank["center"]
        assert np.allclose(data[0, 10, 10], 4.128859892625027e-05, rtol=1e-4, atol=0.)
        assert np.allclose(np.mean(data), 0.0005163806188663894, rtol=1e-7, atol=0.)

        storage.close_connection()
예제 #13
0
    def test_contrast_curve(self):

        read = FitsReadingModule(name_in="read", image_tag="read")

        self.pipeline.add_module(read)

        angle = AngleInterpolationModule(name_in="angle", data_tag="read")

        self.pipeline.add_module(angle)

        contrast = ContrastCurveModule(name_in="contrast",
                                       image_in_tag="read",
                                       psf_in_tag="read",
                                       pca_out_tag="pca",
                                       contrast_out_tag="limits",
                                       separation=(0.5, 0.6, 0.1),
                                       angle=(0., 360., 180.),
                                       magnitude=(7.5, 1.),
                                       sigma=5.,
                                       accuracy=1e-1,
                                       psf_scaling=1.,
                                       aperture=0.1,
                                       ignore=True,
                                       pca_number=15,
                                       norm=False,
                                       cent_size=None,
                                       edge_size=None,
                                       extra_rot=0.)

        self.pipeline.add_module(contrast)

        self.pipeline.run()

        storage = DataStorage(self.test_dir + "/PynPoint_database.hdf5")
        storage.open_connection()

        data = storage.m_data_bank["read"]
        assert np.allclose(data[0, 10, 10],
                           0.00012958496246258364,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(np.mean(data),
                           0.00010029494781738066,
                           rtol=limit,
                           atol=0.)

        data = storage.m_data_bank["header_read/PARANG"]
        assert data[5] == 2.7777777777777777

        data = storage.m_data_bank["pca"]
        assert np.allclose(data[9, 68, 49],
                           5.707647718560735e-05,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(np.mean(data),
                           -3.66890878538392e-08,
                           rtol=limit,
                           atol=0.)

        data = storage.m_data_bank["pca"]
        assert np.allclose(data[21, 31, 50],
                           5.4392925807364694e-05,
                           rtol=limit,
                           atol=0.)
        assert np.allclose(np.mean(data),
                           -3.668908785383954e-08,
                           rtol=limit,
                           atol=0.)

        storage.close_connection()
예제 #14
0
    def test_create_storage_with_wrong_location(self):
        file_in = "/test/test.hdf5"

        with pytest.raises(AssertionError):
            DataStorage(file_in)