Exemplo n.º 1
0
 def goto_origfolder(self, addtolog=False):
     """Go back to original folder
     """
     upipe.print_info("Going back to the original folder {0}".format(
         self.paths.orig),
                      pipe=self)
     self.goto_folder(self.paths.orig, addtolog=addtolog, verbose=False)
Exemplo n.º 2
0
    def run_combine_all_single_pointings(self,
                                         add_suffix="",
                                         sof_filename='pointings_combine',
                                         **kwargs):
        """Run for all pointings individually, provided in the 
        list of pointings, by just looping over the pointings.

        Input
        -----
        list_pointings: list of int
            By default to None (using the default self.list_pointings).
            Otherwise a list of pointings you wish to conduct
            a combine but for each individual pointing.
        add_suffix: str
            Additional suffix. 'PXX' where XX is the pointing number
            will be automatically added to that add_suffix for 
            each individual pointing.
        sof_filename: str
            Name (suffix only) of the sof file for this combine.
            By default, it is set to 'pointings_combine'.
        lambdaminmax: list of 2 floats [in Angstroems]
            Minimum and maximum lambda values to consider for the combine.
            Default is 4000 and 10000 for the lower and upper limits, resp.
        """
        # If list_pointings is None using the initially set up one
        list_pointings = kwargs.pop("list_pointings", self.list_pointings)

        # Additional suffix if needed
        for pointing in list_pointings:
            upipe.print_info("Combining single pointings - Pointing {0:02d}".format(
                             pointing))
            self.run_combine_single_pointing(pointing, add_suffix=add_suffix,
                                             sof_filename=sof_filename,
                                             **kwargs)
Exemplo n.º 3
0
    def extract_combined_narrow_wcs(self, name_cube=None, **kwargs):
        """Create the reference WCS from the full mosaic with
        only 2 lambdas

        Input
        -----
        name_cube: str
            Name of the cube. Can be None, and then the final
            datacube from the combine folder will be used.
        wave1: float - optional
            Wavelength taken for the extraction. Should only
            be present in all spaxels you wish to get.
        prefix_wcs: str - optional
            Prefix to be added to the name of the input cube.
            By default, will use "refwcs".
        add_targetname: bool [True]
            Add the name of the target to the name of the output
            WCS reference cube. Default is True.

        Creates:
            Combined narrow band WCS cube

        Returns:
            name of the created cube
        """
        # Adding targetname in names or not
        self.add_targetname = kwargs.pop("add_targetname", True)

        if name_cube is None:
            # getting the name of the final datacube (mosaic)
            cube_suffix = prep_recipes_pipe.dic_products_scipost['cube'][0]
            cube_suffix = self._add_targetname(cube_suffix)
            name_cube = joinpath(self.paths.cubes, cube_suffix + ".fits")

        # test if cube exists
        if not os.path.isfile(name_cube):
            upipe.print_error("[combine/extract_combined_narrow_wcs] File {0} "
                              "does not exist. Aborting.".format(name_cube))
            return

        # Opening the cube via MuseCube
        refcube = MuseCube(filename=name_cube)

        # Creating the new cube
        prefix_wcs = kwargs.pop("prefix_wcs", default_prefix_wcs)
        upipe.print_info("Now creating the Reference WCS cube using prefix '{0}'".format(
            prefix_wcs))
        cfolder, cname = refcube.extract_onespectral_cube(prefix=prefix_wcs, **kwargs)

        # Now transforming this into a bona fide 1 extension WCS file
        full_cname = joinpath(cfolder, cname)
        d = pyfits.getdata(full_cname, ext=1)
        h = pyfits.getheader(full_cname, ext=1)
        hdu = pyfits.PrimaryHDU(data=d, header=h)
        hdu.writeto(full_cname, overwrite=True)
        upipe.print_info("...Done")
        return full_cname
Exemplo n.º 4
0
 def goto_folder(self, newpath, addtolog=False, verbose=True):
     """Changing directory and keeping memory of the old working one
     """
     try:
         prev_folder = os.getcwd()
         newpath = os.path.normpath(newpath)
         os.chdir(newpath)
         upipe.print_info("Going to folder {0}".format(newpath), pipe=self)
         if addtolog:
             upipe.append_file(self.paths.log_filename, "cd {0}\n".format(newpath))
         self.paths._prev_folder = prev_folder
     except OSError:
         if not os.path.isdir(newpath):
             raise
Exemplo n.º 5
0
    def goto_prevfolder(self, addtolog=False):
        """Go back to previous folder

        Parameters
        ----------
        addtolog: bool [False]
            Adding the folder move to the log file
        """
        upipe.print_info("Going back to the previous folder {0}".format(
            self.paths._prev_folder),
                         pipe=self)
        self.goto_folder(self.paths._prev_folder,
                         addtolog=addtolog,
                         verbose=False)
Exemplo n.º 6
0
    def run_combine_all_single_pointings_withmasks(self, combine=True, masks=True, 
            mosaic_wcs=True, perpointing_combine=True, **kwargs):
        """Run all combine recipes including WCS and masks

        combine: bool [True]
            Default is True. Will run the combine for all pointings.

        masks: bool [True]
            Will run the combined WCS and the individual pointing ones
            (and masks).

        mosaic_wcs (bool): [True]. Reference WCS for the full mosaic
            
        perpointing_combine: bool [True]
            Will run individual pointings using the WCS.
        """
        lambdaminmax = kwargs.pop("lambdaminmax", lambdaminmax_for_mosaic)
        if combine:
            upipe.print_info("Running the mosaic combine")
            offset_table_name = kwargs.get("offset_table_name", None)
            folder_offset_table = kwargs.get("folder_offset_table",
                                             self.folder_offset_table)
            if offset_table_name is not None:
                self._check_offset_table(offset_table_name, folder_offset_table)
            self.run_combine(lambdaminmax=lambdaminmax,
                             offset_table_name=offset_table_name,
                             folder_offset_table=folder_offset_table)

        if masks:
            # Creating the full mosaic WCS first with a narrow lambda range
            upipe.print_info("Start creating the narrow-lambda WCS and Masks")
            _ = self.create_combined_wcs()
            # Then creating the mask WCS for each pointing
            upipe.print_info("Start creating the individual Pointings Masks")
            self.create_all_pointings_mask_wcs(lambdaminmax_mosaic=lambdaminmax, 
                                               **kwargs)

        if mosaic_wcs:
            # Creating a reference WCS for the Full Mosaic with the right 
            # Spectral coverage for a full mosaic
            upipe.print_info("Start creating the full-lambda WCS")
            self._combined_wcs_name = self.create_combined_wcs(
                prefix_wcs=default_prefix_wcs_mosaic,
                lambdaminmax_wcs=lambdaminmax_for_mosaic)

        if perpointing_combine:
            upipe.print_info("Running the Individual Pointing combine")
            # Then merging each single pointing using the masks
            self.run_combine_all_single_pointings(**kwargs)
Exemplo n.º 7
0
 def read_astropy_table(self, expotype=None, stage="master"):
     """Read an existing Masterfile data table to start the pipeline
     """
     # Read the astropy table
     name_table = self._get_fitstablename_expo(expotype, stage)
     if not os.path.isfile(name_table):
         upipe.print_warning(
             "Astropy table {0} does not exist - setting up an "
             " empty one".format(name_table),
             pipe=self)
         return Table([[], [], []], names=['tpls', 'mjd', 'tplnexp'])
     else:
         upipe.print_info(
             "Reading Astropy fits Table {0}".format(name_table), pipe=self)
         return Table.read(name_table, format="fits")
Exemplo n.º 8
0
def get_list_targets(period_path=""):
    """Getting a list of existing periods
    for a given path

    Input
    -----
    period_path: str

    Return
    ------
    list_targets: list of str
    """
    # Done by scanning the target path
    list_targets = [name for name in os.listdir(period_path)
                    if os.path.isdir(os.path.join(period_path, name))]

    list_targets.sort()
    upipe.print_info("Potential Targets -- list: {0}".format(str(list_targets)))
    return list_targets
Exemplo n.º 9
0
    def create_all_pointings_mask_wcs(self, filter_list="white", **kwargs):
        """Create all pointing masks one by one
        as well as the wcs for each individual pointings. Using the grid
        from the global WCS of the mosaic but restricting it to the 
        range of non-NaN.

        Input
        -----
        filter_list = list of str
            List of filter names to be used. 
        """
        # If list_pointings is None using the initially set up one
        list_pointings = kwargs.pop("list_pointings", self.list_pointings)

        # Additional suffix if needed
        for pointing in list_pointings:
            upipe.print_info("Making WCS Mask for pointing {0:02d}".format(pointing))
            _ = self.create_pointing_mask_wcs(pointing=pointing,
                                          filter_list=filter_list,
                                          **kwargs)
Exemplo n.º 10
0
def get_list_pointings(target_path=""):
    """Getting a list of existing pointings
    for a given path

    Input
    -----
    target_path: str

    Return
    ------
    list_pointings: list of int
    """
    # Done by scanning the target path
    list_folders = glob.glob(target_path + "/P??")
    list_pointings = []
    for folder in list_folders:
        list_pointings.append(np.int(folder[-2:]))

    list_pointings.sort()
    upipe.print_info("Pointings list: {0}".format(str(list_pointings)))
    return list_pointings
Exemplo n.º 11
0
    def goto_folder(self, newpath, addtolog=False, **kwargs):
        """Changing directory and keeping memory of the old working one

        Parameters
        ----------
        addtolog: bool [False]
            Adding the folder move to the log file
        """
        verbose = kwargs.pop("verbose", self.verbose)
        try:
            prev_folder = os.getcwd()
            newpath = os.path.normpath(newpath)
            os.chdir(newpath)
            upipe.print_info("Going to folder {0}".format(newpath), pipe=self)
            if addtolog:
                upipe.append_file(self.paths.log_filename,
                                  "cd {0}\n".format(newpath))
            self.paths._prev_folder = prev_folder
        except OSError:
            if not os.path.isdir(newpath):
                raise
Exemplo n.º 12
0
def build_dic_exposures(target_path=""):
    """

    Parameters
    ----------
    target_path

    Returns
    -------
    dic_expo: dict
        Dictionary of exposures in each pointing

    """
    list_pointings = get_list_pointings(target_path)
    dic_expos = {}
    for pointing in list_pointings:
        name_pointing = "P{:02d}".format(pointing)
        upipe.print_info("For pointing {0}".format(name_pointing))
        dic_p = get_list_exposures(joinpath(target_path, name_pointing))
        dic_expos[pointing] = [(tpl, dic_p[tpl]) for tpl in dic_p]

    return dic_expos
Exemplo n.º 13
0
def get_list_exposures(pointing_path=""):
    """Getting a list of exposures from a given path

    Input
    -----
    pointing_path: str

    Return
    ------
    list_expos: list of int
    """
    # Done by scanning the target path
    list_files = glob.glob(pointing_path + "/Object/DATACUBE_FINAL*_????.fits")
    list_expos = []
    for name in list_files:
        [(tpl, lint)] = re.findall(r'\_(\S{19})\_(\d{4}).fits', name)
        if len(lint) > 0:
            list_expos.append((tpl, np.int(lint)))

    # Making it unique and sort
    list_expos = np.unique(list_expos, axis=0)
    # Sorting by tpl and expo number
    sorted_list = sorted(list_expos, key=lambda e: (e[0], e[1]))

    # Building the final list
    dic_expos = {}
    for l in sorted_list:
        tpl = l[0]
        if tpl in dic_expos:
            dic_expos[tpl].append(l[1])
        else:
            dic_expos[tpl] = [l[1]]

    # Finding the full list of tpl
    upipe.print_info("Exposures list:")
    for tpl in dic_expos:
        upipe.print_info("TPL= {0} : Exposures= {1}".format(tpl, dic_expos[tpl]))

    return dic_expos
Exemplo n.º 14
0
def get_list_periods(root_path=""):
    """Getting a list of existing periods
    for a given path

    Input
    -----
    path: str

    Return
    ------
    list_targets: list of str
    """
    # Done by scanning the target path
    list_folders = glob.glob(root_path + "/P???")
    list_periods = []
    for folder in list_folders:
        lint = re.findall(r'(\d{3})', folder)
        if len(lint) > 0:
            list_periods.append(np.int(lint[-1]))

    list_periods.sort()
    upipe.print_info("Periods list: {0}".format(str(list_periods)))
    return list_periods
Exemplo n.º 15
0
    def init_raw_table(self, reset=False, **kwargs):
        """ Create a fits table with all the information from
        the Raw files. Also create an astropy table with the same info

        Parameters
        ----------
        reset: bool [False]
            Resetting the raw astropy table if True
        """
        upipe.print_info("Creating the astropy fits raw data table", pipe=self)

        if reset or not hasattr(self, "Tables"):
            self._reset_tables()

        # Testing if raw table exists
        name_table = self._get_fitstablename_expo('RAWFILES', "raw")

        # ---- File exists - we READ it ------------------- #
        overwrite = kwargs.pop("overwrite", self._overwrite_astropy_table)
        if os.path.isfile(name_table):
            if overwrite:
                upipe.print_warning("The raw-files table will be overwritten",
                                    pipe=self)
            else:
                upipe.print_warning("The raw files table already exists",
                                    pipe=self)
                upipe.print_warning(
                    "If you wish to overwrite it, "
                    " please turn on the 'overwrite_astropy_table' option to 'True'",
                    pipe=self)
                upipe.print_warning(
                    "In the meantime, the existing table will be read and used",
                    pipe=self)
                self.Tables.Rawfiles = self.read_astropy_table(
                    'RAWFILES', "raw")

        # ---- File does not exist - we create it ---------- #
        if overwrite:
            # Check the raw folder
            self.goto_folder(self.paths.rawfiles)
            # Get the list of files from the Raw data folder
            files = os.listdir(".")

            smalldic = {"FILENAME": ['filename', '', str, '100A']}
            fulldic = listexpo_files.copy()
            fulldic.update(smalldic)

            # Init the lists
            MUSE_infodic = {}
            for key in fulldic:
                MUSE_infodic[key] = []

            # Looping over the files
            for f in files:
                # Excluding the files without MUSE and fits.fz
                if ('MUSE' in f):
                    if any([f.endswith(suffix) for suffix in suffix_rawfiles]):
                        header = pyfits.getheader(f, 0)
                        # Short circuit in case 'OBJECT' is not found in header
                        if 'OBJECT' not in header:
                            continue
                        new_infodic = {}
                        good_file = True
                        object_file = None
                        for k in listexpo_files:
                            [namecol, keyword, func, form] = listexpo_files[k]
                            if keyword in header:
                                new_infodic[k] = func(header[keyword])
                            elif k == 'TYPE':
                                # Find the key which is right
                                astrogeo_keys = [
                                    tk for tk, tv in dic_astrogeo.items()
                                    if tv == header['OBJECT']
                                ]
                                # Nothing found?
                                if len(astrogeo_keys) == 0:
                                    good_file = False
                                # If found, print info and save value
                                else:
                                    upipe.print_info(
                                        "Found one {0} file {1}".format(
                                            astrogeo_keys[0], f))
                                    new_infodic[k] = astrogeo_keys[0]
                                    object_file = astrogeo_keys[0]
                            else:
                                good_file = False
                        # Transferring the information now if complete
                        if object_file is not None:
                            new_infodic['OBJECT'] = object_file
                        if good_file:
                            MUSE_infodic['FILENAME'].append(f)
                            for k in new_infodic:
                                MUSE_infodic[k].append(new_infodic[k])

                    elif any([suffix in f for suffix in suffix_rawfiles]):
                        upipe.print_warning(
                            "File {0} will be ignored "
                            "from the Raw files "
                            "(it may be a download duplicate - "
                            " please check)".format(f),
                            pipe=self)

            # Transforming into numpy arrayimport pymusepipe
            for k in fulldic:
                MUSE_infodic[k] = np.array(MUSE_infodic[k])

            # Getting a sorted array with indices
            idxsort = np.argsort(MUSE_infodic['FILENAME'])

            # Creating the astropy table
            self.Tables.Rawfiles = Table([MUSE_infodic['FILENAME'][idxsort]],
                                         names=['filename'],
                                         meta={'name': 'raw file table'})

            # Creating the columns
            for k in fulldic:
                [namecol, keyword, func, form] = fulldic[k]
                self.Tables.Rawfiles[namecol] = MUSE_infodic[k][idxsort]

            # Writing up the table
            self.Tables.Rawfiles.write(name_table,
                                       format="fits",
                                       overwrite=overwrite)

            if len(self.Tables.Rawfiles) == 0:
                upipe.print_warning(
                    "Raw Files Table is empty: please check your 'Raw' folder")

            # Going back to the original folder
            self.goto_prevfolder()

        # Sorting the types ====================================
        self.sort_raw_tables()
Exemplo n.º 16
0
    def __init__(self, targetname=None, list_pointings="all",
                 dic_exposures_in_pointings=None,
                 suffix_fixed_pixtables="tmask",
                 folder_config="",
                 rc_filename=None, cal_filename=None,
                 combined_folder_name="Combined", suffix="",
                 offset_table_name=None,
                 log_filename="MusePipeCombine.log",
                 verbose=True, debug=False, **kwargs):
        """Initialisation of class muse_expo

        Input
        -----
        targetname: string (e.g., 'NGC1208'). default is None. 

        rc_filename: str
            filename to initialise folders
        cal_filename: str
            filename to initial FIXED calibration MUSE files
        verbose: bool 
            Give more information as output (default is True)
        debug: bool
            Allows to get more messages when needed
            Default is False
        vsystemic: float 
            Default is 0. Systemic velocity of the galaxy [in km/s]
        suffix_fixed_pixtables: str
            Suffix for fixed PixTables. Default is ''.
        use_fixed_pixtables: bool
            Default is False. If True, will use suffix_fixed_pixtables to filter out
            Pixtables which have been fixed.

        Other possible entries
        ----------------------
        warnings: strong  ('ignore'by default. If set to ignore, will ignore the Astropy Warnings.

        """
        # Verbose option
        self.verbose = verbose
        self._debug = debug
        if self._debug:
            upipe.print_warning("In DEBUG Mode [more printing]")

        # Warnings for astropy
        self.warnings = kwargs.pop("warnings", 'ignore')
        if self.warnings == 'ignore':
            warnings.simplefilter('ignore', category=AstropyWarning)

        # Setting the default attibutes #####################
        self.targetname = targetname
        self.__phangs = kwargs.pop("PHANGS", False)
        if self.__phangs:
            self.filter_list = kwargs.pop("filter_list", default_PHANGS_filter_list)
        else:
            self.filter_list = kwargs.pop("filter_list", default_filter_list)

        self.combined_folder_name = combined_folder_name
        self.vsystemic = np.float(kwargs.pop("vsystemic", 0.))

        # Including or not the fixed Pixtables in place of the original ones
        self.use_fixed_pixtables = kwargs.pop("use_fixed_pixtables", False)
        self.suffix_fixed_pixtables = suffix_fixed_pixtables

        # Setting other default attributes
        if log_filename is None:
            log_filename = "log_{timestamp}.txt".format(timestamp=upipe.create_time_name())
            upipe.print_info("The Log file will be {0}".format(log_filename))
        self.log_filename = log_filename
        self.suffix = suffix
        folder_offset_table = kwargs.pop("folder_offset_table", None)

        # End of parameter settings #########################

        # Init of the subclasses
        PipeRecipes.__init__(self, **kwargs)
        SofPipe.__init__(self)

        # ---------------------------------------------------------
        # Setting up the folders and names for the data reduction
        # Can be initialised by either an rc_file, 
        # or a default rc_file or harcoded defaults.
        self.pipe_params = InitMuseParameters(folder_config=folder_config,
                                              rc_filename=rc_filename,
                                              cal_filename=cal_filename,
                                              verbose=verbose)

        # Setting up the relative path for the data, using Galaxy Name + Pointing
        self.pipe_params.data = "{0}/{1}/".format(self.targetname,
                                                  self.combined_folder_name)

        self.pipe_params.init_default_param(dic_combined_folders)
        self._dic_combined_folders = dic_combined_folders

        # Now the list of pointings
        if isinstance(list_pointings, str):
            if list_pointings.lower() == "all":
                self.list_pointings = get_list_pointings(
                    joinpath(self.pipe_params.root, self.targetname))
        else:
            self.list_pointings = list_pointings

        # Setting all the useful paths
        self.set_fullpath_names()
        self.paths.log_filename = joinpath(self.paths.log, log_filename)

        # and Recording the folder where we start
        self.paths.orig = os.getcwd()

        # END Set up params =======================================

        # =========================================================== 
        # ---------------------------------------------------------
        # Create the Combined folder
        upipe.safely_create_folder(self.paths.data, verbose=verbose)

        # Go to the Combined Folder
        self.goto_folder(self.paths.data)

        # Now create full path folder 
        for folder in self._dic_combined_folders:
            upipe.safely_create_folder(self._dic_combined_folders[folder], verbose=verbose)

        # Checking input pointings and pixtables
        self._check_pointings(dic_exposures_in_pointings)

        # Checking input offset table and corresponding pixtables
        self._check_offset_table(offset_table_name, folder_offset_table)
        # END CHECK UP ============================================

        # Making the output folders in a safe mode
        if self.verbose:
            upipe.print_info("Creating directory structure")

        # Going back to initial working directory
        self.goto_origfolder()
Exemplo n.º 17
0
    def _check_offset_table(self, offset_table_name=None, folder_offset_table=None):
        """Checking if DATE-OBS and MJD-OBS are in the OFFSET Table

        Input
        -----
        offset_table_name: str
            Name of the offset table
            Default is None
        folder_offset_table: str
            Name of the folder to find the offset table
            Default is None
        """
        if offset_table_name is None:
            return

        self._read_offset_table(offset_table_name=offset_table_name,
                                folder_offset_table=folder_offset_table)

        # getting the MJD and DATE from the OFFSET table
        if not set(self.offset_table.columns.keys()) \
            & {mjd_names['table'], date_names['table']}:
            upipe.print_warning("Could not find some keywords "
                                "in offset table")
            return

        self.table_mjdobs = self.offset_table[mjd_names['table']]
        self.table_dateobs = self.offset_table[date_names['table']]

        # Checking existence of each pixel_table in the offset table
        nexcluded_pixtab = 0
        nincluded_pixtab = 0
        for pointing in self.list_pointings:
            pixtab_to_exclude = []
            for pixtab_name in self.dic_pixtabs_in_pointings[pointing]:
                pixtab_header = pyfits.getheader(pixtab_name)
                mjd_obs = pixtab_header['MJD-OBS']
                date_obs = pixtab_header['DATE-OBS']
                # First check MJD
                index = np.argwhere(self.table_mjdobs == mjd_obs)
                # Then check DATE
                if (index.size == 0) or (self.table_dateobs[index] != date_obs):
                    upipe.warning("PIXELTABLE {0} not found in OFFSET table: "
                                  "please Check MJD-OBS and DATE-OBS".format(pixtab_name))
                    pixtab_to_exclude.append(pixtab_name)
                nincluded_pixtab += 1
                # Exclude the one which have not been found
            nexcluded_pixtab += len(pixtab_to_exclude)
            for pixtab in pixtab_to_exclude:
                self.dic_pixtabs_in_pointings[pointing].remove(pixtab)
                if self.verbose:
                    upipe.print_warning("PIXTABLE [not found in OffsetTable]: "
                                        "{0}".format(pixtab))

        # printing result
        upipe.print_info("Offset Table checked: #{0} PixTables included".format(
            nincluded_pixtab))
        if nexcluded_pixtab == 0:
            upipe.print_info("All PixTables were found in Offset Table")
        else:
            upipe.print_warning("#{0} PixTables not found in Offset Table".format(
                nexcluded_pixtab))
Exemplo n.º 18
0
    def __init__(self,
                 targetname=None,
                 pointing=0,
                 folder_config="Config/",
                 rc_filename=None,
                 cal_filename=None,
                 log_filename="MusePipe.log",
                 verbose=True,
                 musemode="WFM-NOAO-N",
                 checkmode=True,
                 strong_checkmode=False,
                 **kwargs):
        """Initialise the file parameters to be used during the run Create the
        python structure which allows the pipeline to run either individual
        recipes or global ones

        Args:
            targetname (str): Name of the target (e.g., 'NGC1208').
            pointing (int): Number of the pointing to consider
            folder_config (str): Folder name for the configuration files
            rc_filename (str): Name of the input configuration file with
                the root folders
            cal_filename (str):
            log_filename (str):
            verbose (bool):
            musemode (str):
            checkmode (str):
            strong_checkmode (bool):
            **kwargs:

        cal_filename: str
            Name of the input configuration file with calibration file names
        log_filename: str ['MusePipe.log']
            Name of the log file where all pymusepipe output will be recorded
        reset_log: bool [False]
            If True, log file will be reset to an empty file before starting
        verbose: bool [True]
            Give more information as output
        musemode: str ['WFM-NOAO-N'] 
            String to define the mode to be considered
        checkmode: bool [True]
            Check the mode when reducing
        strong_checkmode: bool [True]
            Enforce the checkmode for all if True, 
            or exclude DARK/BIAS from check if False
        vsystemic: float [0.0]
            Systemic velocity of the galaxy [in km/s]

        Other possible entries
        ----------------------
        overwrite_astropy_table: bool [True]
            Overwrite the astropy table even when it exists.
        warnings: str ['ignore']
            If set to 'ignore', will ignore the Astropy Warnings.
        time_astrometry: bool [False]
            Use the time dependent geo_table and astrometry_wcs files
            following on the date of the input exposures (MJD)
        """
        # Verbose option
        self.verbose = verbose
        self._debug = kwargs.pop("debug", False)
        if self._debug:
            upipe.print_warning("In DEBUG Mode [more printing]")

        self._suffix_prealign = kwargs.pop("suffix_prealign", suffix_prealign)
        self._suffix_checkalign = kwargs.pop("suffix_checkalign",
                                             suffix_checkalign)

        # Warnings for astropy
        self.warnings = kwargs.pop("warnings", 'ignore')
        if self.warnings == 'ignore':
            warnings.simplefilter('ignore', category=AstropyWarning)

        # Overwriting option for the astropy table
        self._overwrite_astropy_table = kwargs.pop("overwrite_astropy_table",
                                                   True)
        # Updating the astropy table
        self._update_astropy_table = kwargs.pop("update_astropy_table", False)

        # Use time dependent geo_table
        self._time_astrometry = kwargs.pop("time_astrometry", False)

        # Set alignment saving option
        self._save_alignment_images = kwargs.pop("save_alignment_images", True)

        #        super(MusePipe, self).__init__(**kwargs)
        # Filter for alignment
        self.filter_for_alignment = kwargs.pop("filter_for_alignment",
                                               "Cousins_R")
        self.filter_list = kwargs.pop("filter_list", "white")

        # Setting the default attibutes #####################
        self.targetname = targetname
        self.pointing = pointing
        self.vsystemic = np.float(kwargs.pop("vsystemic", 0.))

        # Setting other default attributes
        if log_filename is None:
            log_filename = "log_{timestamp}.txt".format(
                timestamp=upipe.create_time_name())
            upipe.print_info(
                "The Log file will be {log}".format(log=log_filename),
                pipe=self)
        self.log_filename = log_filename

        # Further reduction options =====================================
        # Mode of the observations
        self.musemode = musemode
        # Checking if mode is correct
        self.checkmode = checkmode
        # Checking if mode is correct also for BIAS & DARK
        self.strong_checkmode = strong_checkmode
        # End of parameter settings #########################

        # Extra parameters for the initialisation and starting of recipes
        first_recipe = kwargs.pop("first_recipe", 1)
        last_recipe = kwargs.pop("last_recipe", None)
        init_raw_table = kwargs.pop("init_raw_table", True)

        # Init of the subclasses
        PipePrep.__init__(self,
                          first_recipe=first_recipe,
                          last_recipe=last_recipe)
        PipeRecipes.__init__(self, **kwargs)

        # =========================================================== #
        # Setting up the folders and names for the data reduction
        # Can be initialised by either an rc_file,
        # or a default rc_file or harcoded defaults.
        self.pipe_params = InitMuseParameters(folder_config=folder_config,
                                              rc_filename=rc_filename,
                                              cal_filename=cal_filename,
                                              verbose=verbose)

        # Setting up the relative path for the data, using Galaxy Name + Pointing
        self.pipe_params.data = "{0}/P{1:02d}/".format(self.targetname,
                                                       self.pointing)

        # Create full path folder
        self.set_fullpath_names()
        self.paths.log_filename = joinpath(self.paths.log, log_filename)

        # Go to the data directory
        # and Recording the folder where we start
        self.paths.orig = os.getcwd()

        # Making the output folders in a safe mode
        upipe.print_info("Creating directory structure", pipe=self)
        self.goto_folder(self.paths.data)

        # ==============================================
        # Creating the extra pipeline folder structure
        for folder in self.pipe_params._dic_input_folders:
            upipe.safely_create_folder(
                self.pipe_params._dic_input_folders[folder], verbose=verbose)

        # ==============================================
        # Creating the folder structure itself if needed
        for folder in self.pipe_params._dic_folders:
            upipe.safely_create_folder(self.pipe_params._dic_folders[folder],
                                       verbose=verbose)

        # ==============================================
        # Init the Master exposure flag dictionary
        self.Master = {}
        for mastertype in dic_listMaster:
            upipe.safely_create_folder(self._get_path_expo(
                mastertype, "master"),
                                       verbose=verbose)
            self.Master[mastertype] = False

        # Init the Object folder
        for objecttype in dic_listObject:
            upipe.safely_create_folder(self._get_path_expo(
                objecttype, "processed"),
                                       verbose=verbose)

        self._dic_listMasterObject = dic_listMasterObject

        # ==============================================
        # Creating the folders in the TARGET root folder
        # e.g, for the alignment images
        for name in self.pipe_params._dic_folders_target:
            upipe.safely_create_folder(getattr(self.paths, name),
                                       verbose=verbose)

        # ==============================================
        # Going back to initial working directory
        self.goto_origfolder()

        # ===========================================================
        # Transform input dictionary of geo/astro files for later
        # This is useful for the creation of the sof files
        self._init_geoastro_dates()

        if init_raw_table:
            self.init_raw_table()
            self._raw_table_initialised = True
        else:
            self._raw_table_initialised = False
        self.read_all_astro_tables()
Exemplo n.º 19
0
 def goto_prevfolder(self, addtolog=False):
     """Go back to previous folder
     """
     upipe.print_info("Going back to the previous folder {0}".format(self.paths._prev_folder),
                          pipe=self)
     self.goto_folder(self.paths._prev_folder, addtolog=addtolog, verbose=False)
Exemplo n.º 20
0
    def create_pointing_mask_wcs(self, pointing, 
            lambdaminmax_mosaic=lambdaminmax_for_mosaic,
            filter_list="white", **kwargs):
        """Create the mask of a given pointing
        And also a WCS file which can then be used to compute individual pointings
        with a fixed WCS.

        Input
        -----
        pointing: int
            Number of the pointing
        filter_list = list of str
            List of filter names to be used.

        Creates:
            Pointing mask WCS cube

        Returns:
            Name of the created WCS cube
        """

        # Adding target name as prefix or not
        self.add_targetname = kwargs.pop("add_targetname", True)
        prefix_mask = kwargs.pop("prefix_mask", default_prefix_mask)
        prefix_wcs = kwargs.pop("prefix_wcs", default_prefix_wcs)

        # Running combine with the ref WCS with only 2 spectral pixels
        # Limit the maximum lambda to the wcs ones
        self.run_combine_single_pointing(pointing=pointing,
                                         filter_list=filter_list,
                                         sof_filename="pointing_mask",
                                         add_targetname=self.add_targetname,
                                         prefix_all=prefix_mask,
                                         lambdaminmax=lambdaminmax_for_wcs,
                                         wcs_auto=True, **kwargs)

        # Now creating the mask with 0's and 1's
        dir_mask = upipe.normpath(self.paths.cubes)

        # Adding targetname for the final names
        prefix_mask = self._add_targetname(prefix_mask)
        prefix_wcs = self._add_targetname(prefix_wcs)

        name_mask = "{0}DATACUBE_FINAL_P{1:02d}.fits".format(
            prefix_mask, np.int(pointing))
        finalname_mask = "{0}P{1:02d}.fits".format(prefix_mask,
                                                   np.int(pointing))
        finalname_wcs = "{0}P{1:02d}.fits".format(prefix_wcs,
                                                  np.int(pointing))

        # First create a subcube without all the Nan
        mask_cube = MuseCube(filename=joinpath(dir_mask, name_mask))

        # Creating the new cube
        upipe.print_info("Now creating the Reference WCS cube "
                         "for pointing {0}".format(np.int(pointing)))
        cfolder, cname = mask_cube.create_reference_cube(
                lambdamin=lambdaminmax_mosaic[0],
                lambdamax=lambdaminmax_mosaic[1], 
                filter_for_nan=True, prefix=prefix_wcs, 
                outcube_name=finalname_wcs, **kwargs)

        # Now transforming this into a bona fide 1 extension WCS file
        full_cname = joinpath(cfolder, cname)
        d = pyfits.getdata(full_cname, ext=1)
        h = pyfits.getheader(full_cname, ext=1)
        hdu = pyfits.PrimaryHDU(data=d, header=h)
        hdu.writeto(full_cname, overwrite=True)
        upipe.print_info("...Done")
        return full_cname