def start(self):
        # Loop over every target that was found and perform the actions
        # that were specified in the command line arguments
        for self.target in self.targets:
            ps.module("Booting BlaauwPipe")
            ps.done(f"Current target: {self.target}")

            # Construct a working dir where all *new/altered* data goes
            working_dir_name = os.path.relpath(self.target, cst.tele_path)
            self.working_dir = os.path.join(cst.base_path, working_dir_name)

            # Extra check if the working dir already exists, else create it
            if os.path.isdir(self.working_dir):
                # TODO: this means this data probably has been handled already...
                # BUT: not sure what exact operations have been carried out
                # Maybe we can put some sort of log in each handled dir, containing
                # a list of all actions that were already performed? Seems better
                # than just 'checking' manually what has or hasn't been done yet.
                #print("Folder existed")
                pass
            else:
                os.makedirs(self.working_dir)

            # Initialize log file
            logfile_path = os.path.join(self.working_dir, cst.logfile)
            logging.basicConfig(
                filename=logfile_path,
                level=logging.DEBUG,
                force=True,
                format=
                '%(asctime)s %(levelname)s %(module)s - %(funcName)s: %(message)s',
                datefmt='%Y-%m-%d %H:%M:%S.%03d')
            logging.info("Blaauwpipe: " + cst.cr)
            ps.done(
                f"Logfile created at {BlaauwPipe.strip_filepath(logfile_path)}"
            )

            # Create observation object for this target
            self.create_observation()
            self.create_pipelineproduct()

            ps.module("BlaauwPipe booted succesfully!")
            ps.newline()

            # Perform core functions
            self.get_plugins()
            self.run_plugins()

        if not self.targets:
            ps.module("Booting BlaauwPipe")
            ps.failed("No observation directories found for specified target!",
                      log=False)
            ps.module("Booting BlaauwPipe failed")

        else:
            print("BlaauwPipe done!")

        print("")
    def reduce_imgs(self, obs, plp):
        """ Wrapper function that loops over every light file and calls
            reduce_img() to do the actual reduction process on a per file
            basis.
        """
        # Initiliase or create the saving dir for reduced content
        if not os.path.isdir(plp.red_dir): os.mkdir(plp.red_dir)

        ps.done(
            f"Savepath created at {BlaauwPipe.strip_filepath(plp.red_dir)}")

        # Get basic information about this observation
        lightFiles = sorted(
            plp.lightFiles,
            key=lambda file: datetime.strptime(fits.getval(file, 'DATE-OBS'),
                                               "%Y-%m-%dT%H:%M:%S.%f"))

        #     # Get the time difference between start and end of the observation
        #     start_time = datetime.strptime(fits.getval(lightFiles[0], 'DATE-OBS'), "%Y-%m-%dT%H:%M:%S.%f")
        #     end_time = datetime.strptime(fits.getval(lightFiles[-1], 'DATE-OBS'), "%Y-%m-%dT%H:%M:%S.%f")
        #     time_diff = end_time - start_time

        #     # This block is not necessary yet, but it starts calculating time-based weights that can
        #     # be used to construct better master frames by interpolating multiple master biases

        #     # Get the creation time relative to the start of the observation
        #     cr_time_rel = cr_time - start_time

        #     # Creation time scaled between 0 and 1:
        #     # 0 means that it was created at the start;
        #     # 0.5 exactly halfway through the night;
        #     # 1 at the end of the night.
        #     cr_time_scl = cr_time_rel / time_diff

        self.failed_reds = 0

        # Loop over every light file in the target
        for light_file in lightFiles:
            filename = os.path.basename(light_file)
            savepath = os.path.join(plp.red_dir, filename)
            ps.progressBar(lightFiles.index(light_file),
                           len(lightFiles),
                           f"Reducing light file: {filename}",
                           log=False)
            self.reduce_img(obs, plp, light_file, 365)
        ps.updateDone(f"Reduction process finished", progressbar=True)

        if len(lightFiles) - self.failed_reds > 0:
            ps.done(
                f"Succesfully reduced {len(lightFiles) - self.failed_reds} light files!"
            )
        if self.failed_reds > 0:
            ps.warning(
                f"{self.failed_reds} light files were not optimally reduced!")
 def create_observation(self):
     """ Function that creates the observation object for the 
         passed target. Also performs some small checkups to 
         be sure that we have some proper data.
     """
     target = self.target
     args = self.args
     ps.running(f"Looking for files in {self.target}")
     self.obs = Observation(target)
     self.check_obs()
     ps.done("Observation Object initialized")
    def check_obs(self):
        """ Function that checks some properties of the generated 
            Observation object. Nothing fancy, just some extra checks 
            to prevent unforeseen circumstances...
        """
        obs = self.obs
        args = self.args

        # Update the user on the found content
        ps.updateDone(f"Found {len(obs.files)} fits files")
        ps.done(f"  {len(obs.lightFiles)} light frames")

        # Now, check what correction frame types we have
        frame_check = 3

        # Check for the presence of bias frames
        bias_found = len(obs.biasFiles) > 0
        if not bias_found:
            ps.warning("No bias files were found")
            frame_check -= 1
        else:
            ps.done(f"  {len(obs.biasFiles)} bias frames")

        # Check for dark presence of frames
        dark_found = len(obs.darkFiles) > 0
        if not dark_found:
            ps.warning("No dark files were found")
            frame_check -= 1
        else:
            ps.done(f"  {len(obs.darkFiles)} dark frames")

        # Check for flat presence of fields
        flat_found = len(obs.flatFiles) > 0
        if not flat_found:
            ps.warning("No flat fields were found")
            frame_check -= 1
        else:
            ps.done(f"  {len(obs.flatFiles)} flat fields")

        # Not likely to happen, but raise an error if
        # literally no correction frames were found
        if frame_check == 0:
            raise ers.MissingFramesError("No suitable correction frames found")

        return
    def on_run(self, obs, plp):
        """ Copy all the files in the passed observation object
            to a new directory, just like a backup.
        """

        # Construct the save path
        if not os.path.isdir(plp.raw_dir): os.mkdir(plp.raw_dir)

        ps.done(
            f"Savepath created at {BlaauwPipe.strip_filepath(plp.raw_dir)}")

        # Make a copy of each file
        for ori_file in obs.files:
            filename = os.path.basename(ori_file)
            filepath = os.path.join(plp.raw_dir, filename)

            # Update user
            ps.progressBar(obs.files.index(ori_file), len(obs.files),
                           f"Copying file to backup: {filename}")

            # Copy files
            copy2(ori_file, filepath)

            # Use chmod to ensure write permission (- rw- r-- r--)
            os.chmod(filepath,
                     stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR)

            # Add header keyword TRAW
            header = fits.getheader(filepath)
            header = BlaauwPipe.header_add_traw(header, ori_file)
            BlaauwPipe.save_fits(filepath, header=header)

        ps.updateDone(f"Copying files done", progressbar=True)
        ps.done(f"Changed file permissions to rw-r--r--")
        ps.done(f"Added TRAW keyword to headers")
        ps.done(f"Successfully copied and prepared {len(obs.files)} files!")
    def create_corrections(self, obs, plp):
        """ Function that generates and saves all possible master correction 
            frames, for each cluster, binning and filter. Keep in mind that a
            cluster is a group of files that belong together, when consecutive
            frames are taken less than 60min apart from each other.
        """

        # Initiliase or create the saving dir for raw frames
        if not os.path.isdir(plp.cor_dir): os.mkdir(plp.cor_dir)

        ps.done(
            f"Savepath created at {BlaauwPipe.strip_filepath(plp.cor_dir)}")

        # Get the unique binnings/filters for this observation
        binnings = obs.get_binnings(plp.lightFiles)
        fltrs = obs.get_filters(plp.flatFiles)

        # Loop over every possible binning
        for binning in binnings:
            # Handle the master bias
            b_clusters = plp.get_bias_clusters(binning)
            for b_cluster in b_clusters:
                self.create_mbias(obs, plp, b_cluster,
                                  b_clusters.index(b_cluster))

            # Handle the master dark
            d_clusters = plp.get_dark_clusters(binning)
            for d_cluster in d_clusters:
                self.create_mdark(obs, plp, d_cluster,
                                  d_clusters.index(d_cluster))

            # Handle the master flats
            # First, loop over every filter
            for fltr in fltrs:
                f_clusters = plp.get_flat_clusters(binning, fltr)
                for f_cluster in f_clusters:
                    self.create_mflat(obs, plp, f_cluster, fltr,
                                      f_clusters.index(f_cluster))

        ps.done(f"Added source files keywords to headers")
        ps.done(f"Successfully created master correction frames!")
    def main(self, obs, plp):
        ps.running("Initializing client...")
        c = Client(api_key=cst.api_key)
        ps.updateDone("Logged into Astrometry")

        # set view field width in this range (15-30 arcmin)
        # WARNING: this can be very different for your application.
        c.settings.set_scale_range(15, 30)
        #c.settings.use_sextractor = True

        # Get the raw & corresponding reduced files
        fits_files = plp.red_files[:3]

        # give the iterable of filenames to the function, which returns a
        # generator, generating pairs containing the finished job and filename.
        ps.running("Preparing files for uploading...")
        result_iter = c.upload_files_gen(
            fits_files)  #, filter_func=self.enough_sources)
        ps.updateDone("Light files are ready for Astrometry")

        file_counter = 0
        success_counter = 0

        for job, filename in result_iter:
            if not job.success():
                file_counter += 1
                ps.progressBar(
                    file_counter, len(fits_files),
                    f"Astrometry failed for {BlaauwPipe.strip_filepath(filename)}"
                )
                ps.newline()
                continue

            # retrieve the wcs file from the successful job
            wcs = job.wcs_file()
            file_counter += 1
            success_counter += 1
            ps.progressBar(
                file_counter, len(fits_files),
                f"Received WCS for {BlaauwPipe.strip_filepath(filename)}")
            ps.newline()

            with fits.open(filename) as hdul:
                # append resulting header (with astrometry) to existing header
                hdul[0].header.extend(wcs)

                astrom_file = self.change_filename(filename, plp)
                ps.running(
                    f"Writing to {BlaauwPipe.strip_filepath(astrom_file)}...")
                hdul.writeto(astrom_file)
                ps.updateDone(
                    f"Astrometry results are saved to {BlaauwPipe.strip_filepath(astrom_file)}"
                )

        if success_counter > 0:
            ps.done(
                f"{success_counter} files were successfully run through Astrometry!"
            )
        if file_counter - success_counter > 0:
            ps.warning(
                f"{file_counter-success_counter} files could not be resolved!")