Ejemplo n.º 1
0
    def sort_raw_tables(self, checkmode=None, strong_checkmode=None):
        """Provide lists of exposures with types defined in the dictionary
        """
        if checkmode is not None:
            self.checkmode = checkmode
        else:
            checkmode = self.checkmode

        if strong_checkmode is not None:
            self.strong_checkmode = strong_checkmode
        else:
            strong_checkmode = self.strong_checkmode

        if len(self.Tables.Rawfiles) == 0:
            upipe.print_error("Raw files is empty, hence cannot be sorted")
            return

        # Sorting alphabetically (thus by date)
        for expotype in dic_expotypes:
            try:
                mask = (
                    self.Tables.Rawfiles['type'] == dic_expotypes[expotype])
                if self.checkmode:
                    maskmode = (self.Tables.Rawfiles['mode'] == self.musemode)
                    if (expotype.upper() not in exclude_list_checkmode
                        ) or self.strong_checkmode:
                        mask = maskmode & mask
                setattr(self.Tables.Raw, self._get_attr_expo(expotype),
                        self.Tables.Rawfiles[mask])
            except AttributeError:
                pass
Ejemplo n.º 2
0
    def retrieve_geoastro_name(self, date_str, filetype='geo', mode='wfm'):
        """Retrieving the astrometry or geometry fits file name

        Parameters
        ----------
        date_str: str
            Date as a string (DD/MM/YYYY)
        filetype: str
            'geo' or 'astro', type of the needed file
        mode: str
            'wfm' or 'nfm' - MUSE mode
        """
        dic_pre = {'geo': 'geometry_table', 'astro': 'astrometry_wcs'}
        if filetype not in dic_pre:
            upipe.print_error("Could not decipher the filetype option "
                              "in retrieve_geoastro")
            return None

        # Transform into a datetime date
        date_dt = dt.strptime(date_str, "%Y-%m-%dT%H:%M:%S").date()
        # get all the distance to the dates (start+end together)
        near = {
            min(abs(date_dt - self._dic_geoastro[name][0]),
                abs(date_dt - self._dic_geoastro[name][1])): name
            for name in self._dic_geoastro
        }
        # Find the minimum distance and get the name
        ga_suffix = near[min(near.keys())]
        # Build the name with the prefix, suffix and mode
        ga_name = "{0}_{1}_{2}.fits".format(dic_pre[filetype], mode, ga_suffix)
        return ga_name
Ejemplo n.º 3
0
    def _read_offset_table(self, offset_table_name=None, folder_offset_table=None):
        """Reading the Offset Table
        If readable, the table is read and set in the offset_table attribute.

        Input
        -----
        offset_table_name: str
            Name of the offset table
            Default is None
        folder_offset_table: str
            Name of the folder to find the offset table
            Default is None
        """
        self.offset_table_name = offset_table_name
        if self.offset_table_name is None:
            upipe.print_warning("No Offset table name given", pipe=self)
            self.offset_table = Table()
            return

        # Using the given folder name, alignment one by default
        if folder_offset_table is None:
            self.folder_offset_table = self.paths.alignment
        else:
            self.folder_offset_table = folder_offset_table

        full_offset_table_name = joinpath(self.folder_offset_table,
                                          self.offset_table_name)
        if not os.path.isfile(full_offset_table_name):
            upipe.print_error("Offset table [{0}] not found".format(
                full_offset_table_name), pipe=self)
            self.offset_table = Table()
            return

        # Opening the offset table
        self.offset_table = Table.read(full_offset_table_name)
Ejemplo n.º 4
0
 def _get_table_expo(self, expotype, stage="master"):
     try:
         return getattr(self._dic_tables[stage],
                        self._get_attr_expo(expotype))
     except AttributeError:
         upipe.print_error(
             "No attributed table with expotype {0} and stage {1}".format(
                 expotype, stage))
         return Table()
Ejemplo n.º 5
0
    def extract_combined_narrow_wcs(self, name_cube=None, **kwargs):
        """Create the reference WCS from the full mosaic with
        only 2 lambdas

        Input
        -----
        name_cube: str
            Name of the cube. Can be None, and then the final
            datacube from the combine folder will be used.
        wave1: float - optional
            Wavelength taken for the extraction. Should only
            be present in all spaxels you wish to get.
        prefix_wcs: str - optional
            Prefix to be added to the name of the input cube.
            By default, will use "refwcs".
        add_targetname: bool [True]
            Add the name of the target to the name of the output
            WCS reference cube. Default is True.

        Creates:
            Combined narrow band WCS cube

        Returns:
            name of the created cube
        """
        # Adding targetname in names or not
        self.add_targetname = kwargs.pop("add_targetname", True)

        if name_cube is None:
            # getting the name of the final datacube (mosaic)
            cube_suffix = prep_recipes_pipe.dic_products_scipost['cube'][0]
            cube_suffix = self._add_targetname(cube_suffix)
            name_cube = joinpath(self.paths.cubes, cube_suffix + ".fits")

        # test if cube exists
        if not os.path.isfile(name_cube):
            upipe.print_error("[combine/extract_combined_narrow_wcs] File {0} "
                              "does not exist. Aborting.".format(name_cube))
            return

        # Opening the cube via MuseCube
        refcube = MuseCube(filename=name_cube)

        # Creating the new cube
        prefix_wcs = kwargs.pop("prefix_wcs", default_prefix_wcs)
        upipe.print_info("Now creating the Reference WCS cube using prefix '{0}'".format(
            prefix_wcs))
        cfolder, cname = refcube.extract_onespectral_cube(prefix=prefix_wcs, **kwargs)

        # Now transforming this into a bona fide 1 extension WCS file
        full_cname = joinpath(cfolder, cname)
        d = pyfits.getdata(full_cname, ext=1)
        h = pyfits.getheader(full_cname, ext=1)
        hdu = pyfits.PrimaryHDU(data=d, header=h)
        hdu.writeto(full_cname, overwrite=True)
        upipe.print_info("...Done")
        return full_cname
Ejemplo n.º 6
0
 def _select_closest_mjd(self, mjdin, group_table):
     """Get the closest frame within the expotype
     If the attribute does not exist in Tables, it tries to read
     the table from the folder
     """
     if len(group_table['mjd']) < 1:
         # Printing an error message and sending back a -1 for index
         upipe.print_error(
             "[musepipe/_select_closest_mjd] Group table is empty - Aborting"
         )
         return -1, None
     # Get the closest tpl
     index = np.argmin((mjdin - group_table['mjd'])**2)
     closest_tpl = group_table[index]['tpls']
     return index, closest_tpl
Ejemplo n.º 7
0
 def _get_fullpath_expo(self, expotype, stage="master"):
     if stage not in self._dic_paths:
         upipe.print_error("[_get_fullpath_expo] stage {} not "
                           "in dic_paths dict".format(stage))
     return upipe.normpath(
         getattr(self._dic_paths[stage], self._get_attr_expo(expotype)))
Ejemplo n.º 8
0
    def run_combine(self, sof_filename='pointings_combine',
                    lambdaminmax=[4000., 10000.],
                    suffix="", **kwargs):
        """MUSE Exp_combine treatment of the reduced pixtables
        Will run the esorex muse_exp_combine routine

        Parameters
        ----------
        sof_filename: string (without the file extension)
            Name of the SOF file which will contain the Bias frames
        lambdaminmax: list of 2 floats
            Minimum and maximum lambda values to consider for the combine
        suffix: str
            Suffix to be used for the output name
        """
        # Lambda min and max?
        [lambdamin, lambdamax] = lambdaminmax

        # Save options
        save = kwargs.pop("save", "cube,combined")

        # Filters
        filter_list = kwargs.pop("filter_list", self.filter_list)

        # Expotype
        expotype = kwargs.pop("expotype", 'REDUCED')

        # Adding target name as prefix or not
        self.add_targetname = kwargs.pop("add_targetname", True)
        prefix_wcs = kwargs.pop("prefix_wcs", default_prefix_wcs)
        prefix_all = kwargs.pop("prefix_all", "")
        prefix_all = self._add_targetname(prefix_all)

        if "offset_table_name" in kwargs:
            offset_table_name = kwargs.pop("offset_table_name")
            folder_offset_table = kwargs.pop("folder_offset_table", self.folder_offset_table)
            self._check_offset_table(offset_table_name, folder_offset_table)

        # Go to the data folder
        self.goto_folder(self.paths.data, addtolog=True)

        # If list_pointings is None using the initially set up one
        list_pointings = kwargs.pop("list_pointings", self.list_pointings)

        # Abort if only one exposure is available
        # exp_combine needs a minimum of 2
        nexpo_tocombine = sum(len(self.dic_pixtabs_in_pointings[pointing])
                              for pointing in list_pointings)
        if nexpo_tocombine <= 1:
            upipe.print_warning("All considered pointings only "
                                "have one exposure: process aborted",
                                pipe=self)
            return

        # Now creating the SOF file, first reseting it
        self._sofdict.clear()
        # Selecting only exposures to be treated
        # Producing the list of REDUCED PIXTABLES
        self._add_calib_to_sofdict("FILTER_LIST")

        # Adding a WCS if needed
        wcs_auto = kwargs.pop("wcs_auto", False)
        ref_wcs = kwargs.pop("ref_wcs", None)
        if wcs_auto:
            upipe.print_warning("wcs_auto is True, hence overwriting ref_wcs name")
            # getting the name of the final datacube (mosaic)
            cube_suffix = prep_recipes_pipe.dic_products_scipost['cube'][0]
            cube_suffix = self._add_targetname(cube_suffix)
            ref_wcs = "{0}{1}.fits".format(prefix_wcs, cube_suffix)
            upipe.print_warning("ref_wcs used is {0}".format(ref_wcs))

        folder_ref_wcs = kwargs.pop("folder_ref_wcs", upipe.normpath(self.paths.cubes))
        if ref_wcs is not None:
            full_ref_wcs = joinpath(folder_ref_wcs, ref_wcs)
            if not os.path.isfile(full_ref_wcs):
                upipe.print_error("Reference WCS file {0} does not exist".format(
                    full_ref_wcs))
                upipe.print_error("Consider using the create_combined_wcs recipe"
                                  " if you wish to create pointing masks. Else"
                                  " just check that the WCS reference file exists.")
                return

            self._sofdict['OUTPUT_WCS'] = [joinpath(folder_ref_wcs, ref_wcs)]

        # Setting the default option of offset_list
        if self.offset_table_name is not None:
            self._sofdict['OFFSET_LIST'] = [joinpath(self.folder_offset_table,
                                                     self.offset_table_name)]

        pixtable_name = dic_listObject[expotype]
        self._sofdict[pixtable_name] = []
        for pointing in list_pointings:
            self._sofdict[pixtable_name] += self.dic_pixtabs_in_pointings[pointing]

        self.write_sof(sof_filename="{0}_{1}{2}".format(sof_filename,
                                                        self.targetname,
                                                        suffix), new=True)

        # Product names
        dir_products = upipe.normpath(self.paths.cubes)
        name_products, suffix_products, suffix_prefinalnames, prefix_products = \
            prep_recipes_pipe._get_combine_products(filter_list,
                                                    prefix_all=prefix_all)

        # Combine the exposures 
        self.recipe_combine_pointings(self.current_sof, dir_products, name_products,
                                      suffix_products=suffix_products,
                                      suffix_prefinalnames=suffix_prefinalnames,
                                      prefix_products=prefix_products,
                                      save=save, suffix=suffix, filter_list=filter_list,
                                      lambdamin=lambdamin, lambdamax=lambdamax)

        # Go back to original folder
        self.goto_prevfolder(addtolog=True)