Esempio n. 1
0
def query_skybot(images, settings, sci_ext, date_obs_fmt):
    '''Queries the VO SkyBoT service for known objects
    in the images

    :images: list - paths to images
    :settings: dict - pipeline settings
    :sci_ext: int or False - number of science extension
    :date_obs_fmt: str - format of observation date string
    :returns: pd.DataFrame - SkyBoT response for all images
    '''
    skybot = pd.DataFrame()

    for img in tqdm(images,
                    unit='imgs',
                    desc='Querying SkyBoT for known SSOs in FoV'):
        # ------
        # Construct the query string
        with fits.open(img) as exp:
            ra, dec, = utils.compute_image_center(exp[sci_ext].header)
            date_obs, texp = \
                utils.unpack_header_kw(exp, [settings['DATE-OBS'],
                                             settings['EXPTIME']], sci_ext)
            mid_epoch = (Time(date_obs, format=date_obs_fmt) +
                         (float(texp) / 2) * u.second).isot

            if settings['FOV_DIMENSIONS'] == '0x0':  # we determine the FoV

                naxis1, naxis2, cdelt1, cdelt2 = \
                    utils.unpack_header_kw(exp, ['NAXIS1', 'NAXIS2',
                                                 'CDELT1', 'CDELT2'],
                                           sci_ext)

                if cdelt1 is False or cdelt2 is False:
                    cd11, cd12, cd21, cd22 = \
                        utils.unpack_header_kw(exp, ['CD1_1', 'CD1_2',
                                                     'CD2_1', 'CD2_2'],
                                               sci_ext)
                    dra = naxis1 * abs(cd11) + naxis2 * abs(cd12)
                    ddec = naxis1 * abs(cd21) + naxis2 * abs(cd22)

                else:
                    dra = naxis1 * cdelt1
                    ddec = naxis2 * cdelt2

                # Ensure full coverage by adding CROSSMATCH_RADIUS
                dra = round(dra + settings['CROSSMATCH_RADIUS'] / 60, 1)
                ddec = round(ddec + settings['CROSSMATCH_RADIUS'] / 60, 1)
                fov = f'{dra:.1f}x{ddec:.1f}'

            else:
                fov = settings['FOV_DIMENSIONS']

        result = _query_skybot_per_image(mid_epoch, ra, dec, fov,
                                         settings['OBSERVATORY_CODE'])
        if not result.empty:
            skybot = skybot.append(result)

    return skybot
Esempio n. 2
0
    def add_image_metadata(self):

        # Derive image filename from SExtractor catalogue
        for cat_number, group in self.sources.groupby("CATALOG_NUMBER"):

            if self.settings["SCI_EXTENSION"]:
                image_filename = (
                    "_".join(
                        os.path.splitext(
                            os.path.basename(self.SExtractor_catalogues[cat_number - 1])
                        )[0].split("_")[:-1]
                    )
                    + ".fits"
                )
            else:
                image_filename = os.path.basename(
                    self.SExtractor_catalogues[cat_number - 1]
                ).replace(".cat", ".fits")

            self.sources.loc[group.index, "IMAGE_FILENAME"] = image_filename

        # Add image metadata. Have to use the correct header extension
        for image_filename, group in self.sources.groupby(["IMAGE_FILENAME"]):

            extension = group.EXTENSION.values[0] - 1

            # Add exposure keywords
            with fits.open(os.path.join(self.paths["images"], image_filename)) as exp:
                for prop in ["DATE-OBS", "FILTER", "EXPTIME"]:
                    self.sources.loc[group.index, prop] = utils.unpack_header_kw(
                        exp, self.settings[prop.split("_")[0]], extension
                    )

                ra, dec = utils.compute_image_center(exp[extension].header)
                self.sources.loc[group.index, "RA_IMAGE"] = ra
                self.sources.loc[group.index, "DEC_IMAGE"] = dec

                object_ = utils.unpack_header_kw(exp, "OBJECT", extension)
                if object_ is False:
                    object_ = np.nan
                self.sources.loc[group.index, "OBJECT"] = object_

        self.sources["MID_EXPOSURE_MJD"] = self.sources.apply(
            lambda x: (
                Time(x["DATE-OBS"], format=self.date_obs_fmt)
                + (float(x["EXPTIME"]) / 2) * u.second
            ).mjd,
            axis=1,
        )
Esempio n. 3
0
    def add_image_metadata(self):

        # Derive image filename from SExtractor catalogue
        for cat_number, group in self.sources.groupby('CATALOG_NUMBER'):

            if self.settings['SCI_EXTENSION']:
                image_filename = '_'.join(os.path.splitext(
                                            os.path.basename(self.SExtractor_catalogues[cat_number-1])
                                                          )[0].split('_')[:-1]) + '.fits'
            else:
                image_filename = os.path.basename(self.SExtractor_catalogues[cat_number-1]).replace('.cat', '.fits')

            self.sources.loc[group.index, 'IMAGE_FILENAME'] = image_filename

        # Add image metadata. Have to use the correct header extension
        for image_filename, group in self.sources.groupby(['IMAGE_FILENAME']):

            extension = group.EXTENSION.values[0]
            # Add exposure keywords
            with fits.open(os.path.join(self.paths['images'], image_filename)) as exposure:
                for prop in ['OBJECT', 'DATE-OBS', 'FILTER', 'EXPTIME', 'RA_IMAGE', 'DEC_IMAGE']:
                    self.sources.loc[group.index, prop] = date_obs = unpack_header_kw(exposure, self.settings[prop.split('_')[0]], extension)

        self.sources['MID_EXPOSURE_MJD'] = self.sources.apply(lambda x: ( Time(x['DATE-OBS'], format=self.date_obs_fmt) +
                                                                       float(x['EXPTIME']) / 2 * u.second).mjd, axis=1)
Esempio n. 4
0
    def _run_SExtractor_on_single_image(self, image, extension):
        '''
        Run SExtractor on individual images.

        input
        ------
        image - str, absolute path to image file
        extension - int, index of FITS file extension to be SExtracted

        return
        ------
        cat, str - absolute path to SExtractor output catalog
        date_obs, str - observation epoch from exposure
        '''

        # Name of output catalog
        cat = os.path.join(self.paths['cats'],
                           os.path.splitext(os.path.basename(image))[0])

        # Select extension or run whole image
        if extension is not False:
            image_ext = image + '[%i]' % extension
            cat += '_%i.cat' % extension
            ext = extension

        else:
            image_ext = image
            cat += '.cat'
            ext = 1

        if not self.args.sex and os.path.isfile(cat):
            self.log.debug('SExtractor catalog %s already exists! Skipping this sextraction..\n' % cat)

            with fits.open(image) as exposure:
                date_obs = unpack_header_kw(exposure, self.settings['DATE-OBS'], self.sci_ext)

            return cat, date_obs

        sex_args = {

            'file': image_ext,
            'config': self.settings['SEX_CONFIG'],
            'overwrite_params': { # Arguments to initialize Astromatic class
                    'CATALOG_NAME': cat,
                    'PARAMETERS_NAME': self.settings['SEX_PARAMS'],
                    'FILTER_NAME': self.settings['SEX_FILTER'],
                    'STARNNW_NAME': self.settings['SEX_NNW'],
            },
        }

        if self.settings['WEIGHT_IMAGES']:
            sex_args['overwrite_params']['WEIGHT_IMAGE'] = 'MAP_WEIGHT'

            if extension is not False:
                weight_suffix = '_%i.weight' % extension
            else:
                weight_suffix = '.weight'

            sex_args['overwrite_params']['WEIGHT_IMAGE'] = os.path.join(self.settings['WEIGHT_IMAGES'],
                                                           os.path.basename(image).replace(
                                                           '.fits', weight_suffix))

        if self.log.level <= 10:  # if we're at DEBUG log level, print SExtractor output
            sex_args['overwrite_params']['VERBOSE_TYPE'] = 'NORMAL'

        # ------
        # Exectue SExtractor
        cmd = ' '.join(['sex', sex_args['file'], '-c', sex_args['config']])
        for param, value in sex_args['overwrite_params'].items():
            cmd += ' '.join([' -' + param, value])

        self.log.debug('\nExecuting SExtractor command:\n%s\n\n' % cmd)
        os.system(cmd)

        # ------
        with fits.open(image) as exposure:
            # Make .ahead file with the EPOCH in MJD for SCAMP
            # Following http://www.astromatic.net/forum/showthread.php?tid=501
            date_obs = unpack_header_kw(exposure, self.settings['DATE-OBS'], self.sci_ext)
            mjd = Time(date_obs, format=self.date_obs_fmt).mjd

            with open(os.path.splitext(cat)[0] + '.ahead', 'w+') as file:
                for hdu in exposure:
                    file.write('MJD-OBS = %.6f\nEND\n' % mjd)
        return cat, date_obs
Esempio n. 5
0
    def _print_field_info(self):
        ''' Prints RA, DEC, and OBJECT keywords to log '''
        with fits.open(self.images[0]) as exposure:
            ra, dec, object = unpack_header_kw(exposure, ['RA', 'DEC', 'OBJECT'], try_first=self.sci_ext)

        ecli_lat = SkyCoord(ra, dec, frame='icrs', unit='deg').barycentrictrueecliptic.lat.deg
        print('\n')
        self.log.info('   |   '.join(['%i Exposures' % len(self.images),
                                    '%s' % object,
                                    '%.2fdeg Ecliptic Latitude' % ecli_lat]).center(self.term_size))

        # Get number of known SSOs in FoV from SkyBoT
        if self.settings['CROSSMATCH_SKYBOT']:
            print(f'\n\n{"-" * self.term_size}')
            self.log.info('\rQuerying SkyBoT for known SSOs in FoV..')

            if not self.args.skybot and \
               os.path.isfile(os.path.join(self.paths['skybot'], 'skybot_all.csv')):

               self.skybot = pd.read_csv(os.path.join(self.paths['skybot'], 'skybot_all.csv'))

            else:
                self.skybot = pd.DataFrame()

                for img in self.images:
                    with fits.open(img) as exp:
                        ra, dec, date_obs, texp = unpack_header_kw(exp, [self.settings['RA'], self.settings['DEC'],
                                                                         self.settings['DATE-OBS'], self.settings['EXPTIME']], self.sci_ext)
                        mid_epoch = (Time(date_obs, format=self.date_obs_fmt) + float(texp) / 2 * u.second).isot

                        if self.settings['FOV_DIMENSIONS'] == '0x0':

                            # Determine exposure FoV
                            naxis1, naxis2, cdelt1, cdelt2 = unpack_header_kw(exp, ['NAXIS1', 'NAXIS2', 'CDELT1', 'CDELT2'], self.sci_ext)

                            if not cdelt1 or not cdelt2:
                                cd11, cd12, cd21, cd22 = unpack_header_kw(exp, ['CD1_1', 'CD1_2', 'CD2_1', 'CD2_2'], self.sci_ext)
                                dra  = naxis1 * abs(cd11) + naxis2 * abs(cd12)
                                ddec = naxis1 * abs(cd21) + naxis2 * abs(cd22)

                            else:
                                dra = naxis1 * cdelt1
                                ddec = naxis2 * cdelt2

                            # Ensure coverage by adding CROSSMATCH_RADIUS
                            fov = '{:.1f}x{:.1f}'.format(round(dra  + self.settings['CROSSMATCH_RADIUS'] / 60, 1),
                                                         round(ddec + self.settings['CROSSMATCH_RADIUS'] / 60, 1))

                        else:
                            fov = self.settings['FOV_DIMENSIONS']

                        result = query_skybot(mid_epoch, ra, dec, fov,
                                              self.settings['OBSERVATORY_CODE'])
                        if not result.empty:
                            self.skybot = self.skybot.append(result)

                if not self.skybot.empty:
                    self.skybot.to_csv(os.path.join(self.paths['skybot'], 'skybot_all.csv'), index=False)

            if not self.skybot.empty:
                # Print gimmicky SkyBoT info
                bins = np.arange(np.floor(min(self.skybot.Mv)),
                                 np.ceil(max(self.skybot.Mv)), 0.5)

                counts = pd.cut(self.skybot['Mv'], bins).value_counts()
                counts.sort_index(inplace=True)

                magnitudes = '  '.join([str(i) for i in range(int(min(bins)),
                                                             int(np.ceil(max(bins))) + 1
                                                             )])
                # center histogram
                buffer = int((self.term_size - len(magnitudes) ) / 2) * ' '
                magnitudes = buffer + magnitudes
                # construct bar chart top-down
                bars = []
                bar_height = 7


                for i in range(1, bar_height):
                    bar = buffer
                    for count in counts:
                        if count >= max(counts.values) * (bar_height-i)/bar_height:
                            bar += '##'
                        else:
                            bar += '  '
                    bars.append(bar)
                # bottom bar
                bar = buffer
                for count in counts:
                    if count > 0:
                        bar += '##'
                    else:
                        bar += '  '
                bars.append(bar)


                self.log.info(f'\rQuerying SkyBoT for known SSOs in FoV.. '
                              f'{len(set(self.skybot.Name))} SSOs with {len(self.skybot)} detections\n')

                self.log.info('\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n %s\n%sMv\n'
                               % (*bars, magnitudes[1:],
                                  buffer[::2] + ' ' * int(len(magnitudes) / 2)))
            else:
                self.log.info('No known SSOs returned by SkyBoT.\n')
Esempio n. 6
0
    def _check_settings(self, settings):
        '''
        Converts parameter values to expected formats

        input
        -----
        settings - dict, dictionary containing PARAMETER: VALUE pairs

        return
        -----
        settings - dict, dictionary containing PARAMETER: VALUE pairs
        '''

        # Unpack provided DETECTIONS and SCI extension(s) into list
        for param in ['SCI_EXTENSION', 'DETECTIONS']:
            try:
                settings[param] = [int(character) for character in settings[param].split(',')]
            except ValueError:
                if param == 'SCI_EXTENSION' and settings[param] == 'All':
                    settings[param] = False # treat it as 'None provided'
                else:
                    raise PipelineSettingsException('%s value invalid' % param)
        if self.settings['SCI_EXTENSION']:
            self.sci_ext = self.settings['SCI_EXTENSION'][0]
        else:
            self.sci_ext = 0

        # Check one image header for keyword presence
        with fits.open(self.images[0]) as exposure:
            kws = ['RA', 'DEC', 'OBJECT', 'DATE-OBS', 'FILTER', 'EXPTIME']
            for kw in kws:
                if not unpack_header_kw(exposure, self.settings[kw], self.sci_ext):
                    raise PipelineSettingsException(f'Could not find keyword {self.settings[kw]} in FITS header.')
                if kw == 'DATE-OBS':
                    # Find format of DATE-OBS keyword
                    try:
                        Time(unpack_header_kw(exposure, self.settings[kw], try_first=self.sci_ext), format='isot')
                        self.date_obs_fmt = 'isot'
                    except ValueError:
                        self.date_obs_fmt = 'mjd'


        # Check that config files exist
        for file in ['SEX_CONFIG', 'SEX_PARAMS', 'SEX_NNW', 'SEX_FILTER',
                     'SCAMP_CONFIG', 'SWARP_CONFIG']:
            if not os.path.isfile(settings[file]):
                raise PipelineSettingsException('Could not find %s in %s' %
                                                (file, settings[file]) )

        # Check if weight images are provided and exist
        if not settings['WEIGHT_IMAGES'].upper() == 'FALSE':
            if not os.path.isdir(settings['WEIGHT_IMAGES']):
                raise PipelineSettingsException('Could not find weight images directory %s.' %
                                                 settings['WEIGHT_IMAGES'])
        else:
            settings['WEIGHT_IMAGES'] = False

        # Convert filter strings to booleans
        for param in ['FIX_HEADER', 'REMOVE_REF_SOURCES', 'FILTER_DETEC', 'FILTER_PM', 'FILTER_PIXEL', 'FILTER_MOTION',
                      'IDENTIFY_OUTLIER', 'FILTER_TRAIL',
                      'FILTER_BRIGHT_SOURCES', 'CROSSMATCH_SKYBOT', 'EXTRACT_CUTOUTS',
                      'FIXED_APER_MAGS']:

            if settings[param].upper() == 'TRUE':
                settings[param] = True
            elif settings[param].upper() == 'FALSE':
                settings[param] = False
            else:
                raise PipelineSettingsException('Could not evaluate %s value, has to be True or\
                                                 False' % param)

        if settings['FILTER_MOTION']:
            if not settings['FILTER_DETEC'] or\
              (not 1 in settings['DETECTIONS'] and not 2 in settings['DETECTIONS']):
                raise PipelineSettingsException('When FILTER_MOTION is True, DETECTIONS needs\
                                                 to contain "1,2".')

        # Convert numeric values to float
        for param in ['PM_LOW', 'PM_UP', 'PM_SNR', 'DELTA_PIXEL', 'OUTLIER_THRESHOLD',
                      'R_SQU_M', 'RATIO', 'DISTANCE', 'CROSSMATCH_RADIUS',
                      'CUTOUT_SIZE']:

            try:
                settings[param] = float(settings[param])
            except ValueError:
                raise PipelineSettingsException('Could not convert %s value to float'
                                                % param)
            if param == ['R_SQU_M']:
                assert 0 <= settings[param] <= 1, 'The %s parameter has to be in range [0 - 1]' % param

        if settings['FILTER_BRIGHT_SOURCES']:
            # Evaluate bright-sources catalogue path
            if settings['BRIGHT_SOURCES_CAT'] == 'REFCAT':
                # get the filename and column names from the scamp config file
                with open(settings['SCAMP_CONFIG'], 'r') as file:
                    for line in file:
                        if 'REFOUT_CATPATH' in line:
                            refout_catpath = line.split()[1]
                        if 'ASTREF_CATALOG' in line:
                            astref_catalog = line.split()[1]
                if self.args.ASTREF_CATALOG: # if overwritten via command line
                    astref_catalog = self.args.ASTREF_CATALOG

                settings['BRIGHT_SOURCES_CAT'] = [refout_catpath, astref_catalog]

        return settings
Esempio n. 7
0
    def _run_SExtractor_on_single_image(self, image, extension):
        """Run SExtractor on individual images.

        input
        ------
        image - str, absolute path to image file
        extension - int, index of FITS file extension to be SExtracted

        return
        ------
        cat, str - absolute path to SExtractor output catalog
        date_obs, str - observation epoch from exposure
        """
        tmp_image = os.path.join(self.paths["tmp"], os.path.basename(image))

        # Name of output catalog
        cat = os.path.join(
            self.paths["cats"], os.path.splitext(os.path.basename(tmp_image))[0]
        )

        # Select extension or run whole tmp_image
        if extension is False:
            tmp_image_ext = tmp_image
            cat += ".cat"
            weight_suffix = ".weight"
        else:
            tmp_image_ext = tmp_image + "[%i]" % extension
            cat += "_%i.cat" % extension
            weight_suffix = "_%i.weight" % extension

        # If we already ran this
        if not self.args.sex and os.path.isfile(cat):
            self.log.debug(
                f"SExtractor catalog {cat} already exists!"
                f" Skipping this sextraction..\n"
            )

            with fits.open(image) as exposure:
                date_obs = utils.unpack_header_kw(
                    exposure, self.settings["DATE-OBS"], self.sci_ext
                )
            return cat, date_obs

        # Copy the image to the temporary folder and remove bad
        # keywords from header
        utils.create_clean_image(image, tmp_image)

        sex_args = {
            "file": tmp_image_ext,
            "config": self.settings["SEX_CONFIG"],
            "overwrite_params": {
                "CATALOG_NAME": cat,
                "PARAMETERS_NAME": self.settings["SEX_PARAMS"],
                "FILTER_NAME": self.settings["SEX_FILTER"],
                "STARNNW_NAME": self.settings["SEX_NNW"],
            },
        }

        if self.settings["WEIGHT_IMAGES"]:
            sex_args["overwrite_params"]["WEIGHT_IMAGES"] = "MAP_WEIGHT"

            sex_args["overwrite_params"]["WEIGHT_IMAGES"] = os.path.join(
                self.settings["WEIGHT_IMAGES"],
                os.path.basename(tmp_image).replace(".fits", weight_suffix),
            )

        # if we're at DEBUG log level, print SExtractor output
        if self.log.level <= 10:
            sex_args["overwrite_params"]["VERBOSE_TYPE"] = "NORMAL"

        # ------
        # Exectue SExtractor
        cmd = " ".join(["sex", sex_args["file"], "-c", sex_args["config"]])
        for param, value in sex_args["overwrite_params"].items():
            cmd += " ".join([" -" + param, value])

        self.log.debug("\nExecuting SExtractor command:\n%s\n\n" % cmd)
        os.system(cmd)

        # ------
        with fits.open(tmp_image) as exposure:
            # Make .ahead file with the EPOCH in MJD for SCAMP
            # Following http://www.astromatic.net/forum/showthread.php?tid=501
            date_obs = utils.unpack_header_kw(
                exposure, self.settings["DATE-OBS"], self.sci_ext
            )
            mjd = Time(date_obs, format=self.date_obs_fmt).mjd

            with open(os.path.splitext(cat)[0] + ".ahead", "w+") as file:
                for hdu in exposure:
                    file.write("MJD-OBS = %.6f\nEND\n" % mjd)

        # Remove cleaned image copy
        os.remove(tmp_image)

        return cat, date_obs
Esempio n. 8
0
    def _check_settings(self, settings):
        """Converts parameter values to expected formats

        input
        -----
        settings - dict, dictionary containing PARAMETER: VALUE pairs

        return
        -----
        settings - dict, dictionary containing PARAMETER: VALUE pairs
        """

        # Unpack provided DETECTIONS and SCI extension(s) into list
        for param in ["SCI_EXTENSION", "DETECTIONS"]:
            try:
                settings[param] = [int(char) for char in settings[param].split(",")]
            except ValueError:
                if param == "SCI_EXTENSION" and settings[param].lower() == "all":
                    settings[param] = False  # treat it as 'None provided'
                else:
                    raise PipelineSettingsException("%s value invalid" % param)

        if self.settings["SCI_EXTENSION"]:
            self.sci_ext = self.settings["SCI_EXTENSION"][0]
        else:
            self.sci_ext = 0

        # Check one image header for keyword presence
        with fits.open(self.images[0]) as exposure:
            kws = ["DATE-OBS", "FILTER", "EXPTIME"]

            for kw in kws:
                val = utils.unpack_header_kw(exposure, self.settings[kw], self.sci_ext)
                if val is False:
                    raise PipelineSettingsException(
                        f"Could not find keyword "
                        f"{self.settings[kw]} in "
                        f"FITS header."
                    )
                if kw == "DATE-OBS":
                    # Find format of DATE-OBS keyword
                    try:
                        Time(val, format="isot")
                        self.date_obs_fmt = "isot"
                    except ValueError:
                        try:
                            self.date_obs_fmt = "mjd"
                        except ValueError:
                            raise PipelineSettingsException(
                                "DATE-OBS keyword " "is neither MJD " "nor ISOT."
                            )

        # Check that config files exist
        for file_ in [
            "SEX_CONFIG",
            "SEX_PARAMS",
            "SEX_NNW",
            "SEX_FILTER",
            "SCAMP_CONFIG",
            "SWARP_CONFIG",
        ]:
            if not os.path.isfile(settings[file_]):
                raise PipelineSettingsException(
                    f"Could not find {file_} in" f" {settings[file_]}"
                )

        # Check if weight images are provided and exist
        if settings["WEIGHT_IMAGES"].upper() != "FALSE":
            if not os.path.isdir(settings["WEIGHT_IMAGES"]):
                raise PipelineSettingsException(
                    f"Could not find weight images"
                    f" directory "
                    f'{settings["WEIGHT_IMAGES"]}'
                )
        else:
            settings["WEIGHT_IMAGES"] = False

        # Convert filter strings to booleans
        for param in [
            "REMOVE_REF_SOURCES",
            "FILTER_DETEC",
            "FILTER_PM",
            "FILTER_PIXEL",
            "FILTER_MOTION",
            "IDENTIFY_OUTLIER",
            "FILTER_TRAIL",
            "FILTER_BRIGHT_SOURCES",
            "CROSSMATCH_SKYBOT",
            "EXTRACT_CUTOUTS",
            "FIXED_APER_MAGS",
        ]:
            if settings[param].upper() == "TRUE":
                settings[param] = True
            elif settings[param].upper() == "FALSE":
                settings[param] = False
            else:
                raise PipelineSettingsException(
                    f"Could not evaluate {param} " f"value, has to be True or " f"False"
                )

        if settings["CHECKPLOTS"].upper() != "FALSE":
            settings["CHECKPLOTS"] = settings["CHECKPLOTS"].split(",")
        else:
            settings["CHECKPLOTS"] = []

        if settings["FILTER_MOTION"]:
            if not settings["FILTER_DETEC"] or (
                1 not in settings["DETECTIONS"] and 2 not in settings["DETECTIONS"]
            ):
                raise PipelineSettingsException(
                    f"When FILTER_MOTION is True, "
                    f"DETECTIONS needs to contain "
                    f'"1,2".'
                )

        # Convert numeric values to float
        for param in [
            "PM_LOW",
            "PM_UP",
            "PM_SNR",
            "DELTA_PIXEL",
            "OUTLIER_THRESHOLD",
            "R_SQU_M",
            "RATIO",
            "DISTANCE",
            "CROSSMATCH_RADIUS",
            "CUTOUT_SIZE",
        ]:
            try:
                settings[param] = float(settings[param])
            except ValueError:
                raise PipelineSettingsException(
                    f"Could not convert {param} " f"value to float"
                )
            if param == ["R_SQU_M"]:
                assert 0 <= settings[param] <= 1, (
                    f"The {param} parameter has" f"to be in range [0 - 1]"
                )

        if settings["FILTER_BRIGHT_SOURCES"]:

            # Evaluate bright-sources catalogue path
            if settings["BRIGHT_SOURCES_CAT"] == "REFCAT":

                # get the filename and column names from the scamp config file
                with open(settings["SCAMP_CONFIG"], "r") as file:
                    for line in file:
                        if "REFOUT_CATPATH" in line:
                            refout_catpath = line.split()[1]
                        if "ASTREF_CATALOG" in line:
                            astref_catalog = line.split()[1]

                if self.args.ASTREF_CATALOG:  # if overwritten via command line
                    astref_catalog = self.args.ASTREF_CATALOG

                settings["BRIGHT_SOURCES_CAT"] = [refout_catpath, astref_catalog]

            settings["MAG_LIMITS"] = [
                float(mag) for mag in settings["MAG_LIMITS"].split(",")
            ]
        return settings