def start(self):
        # Loop over every target that was found and perform the actions
        # that were specified in the command line arguments
        for self.target in self.targets:
            ps.module("Booting BlaauwPipe")
            ps.done(f"Current target: {self.target}")

            # Construct a working dir where all *new/altered* data goes
            working_dir_name = os.path.relpath(self.target, cst.tele_path)
            self.working_dir = os.path.join(cst.base_path, working_dir_name)

            # Extra check if the working dir already exists, else create it
            if os.path.isdir(self.working_dir):
                # TODO: this means this data probably has been handled already...
                # BUT: not sure what exact operations have been carried out
                # Maybe we can put some sort of log in each handled dir, containing
                # a list of all actions that were already performed? Seems better
                # than just 'checking' manually what has or hasn't been done yet.
                #print("Folder existed")
                pass
            else:
                os.makedirs(self.working_dir)

            # Initialize log file
            logfile_path = os.path.join(self.working_dir, cst.logfile)
            logging.basicConfig(
                filename=logfile_path,
                level=logging.DEBUG,
                force=True,
                format=
                '%(asctime)s %(levelname)s %(module)s - %(funcName)s: %(message)s',
                datefmt='%Y-%m-%d %H:%M:%S.%03d')
            logging.info("Blaauwpipe: " + cst.cr)
            ps.done(
                f"Logfile created at {BlaauwPipe.strip_filepath(logfile_path)}"
            )

            # Create observation object for this target
            self.create_observation()
            self.create_pipelineproduct()

            ps.module("BlaauwPipe booted succesfully!")
            ps.newline()

            # Perform core functions
            self.get_plugins()
            self.run_plugins()

        if not self.targets:
            ps.module("Booting BlaauwPipe")
            ps.failed("No observation directories found for specified target!",
                      log=False)
            ps.module("Booting BlaauwPipe failed")

        else:
            print("BlaauwPipe done!")

        print("")
 def run_plugins(self):
     # Execute each plugin
     for plugin in self.plugins:
         ps.module(
             f"Running {'core' if 'core' in plugin.__module__ else 'external'} plugin: {plugin.title}"
         )
         plugin.on_run(self.obs, self.plp)
         ps.module(f"Plugin {plugin.title} executed!")
         ps.newline()
    def reduce_img(self, obs, plp, light_file, max_days_off):
        """ Function that reduces a passed light_file. Tries to find 
            correction frames with a maximum relative age of max_days_off.
        """
        # Retrieve basic information
        binning, fltr, exptime, crtn = plp.get_file_info(
            light_file, ["BINNING", "FILTER", "EXPTIME", "DATE-OBS"])
        creation_datetime = datetime.strptime(crtn, "%Y-%m-%dT%H:%M:%S.%f")

        # Retrieve the closest master correction frames
        try:
            master_bias, mbias_path, bias_off = BlaauwPipe.get_closest_master(
                creation_datetime, plp, max_days_off, binning, "master_bias")
            master_dark, mdark_path, dark_off = BlaauwPipe.get_closest_master(
                creation_datetime, plp, max_days_off, binning, "master_dark")
            master_flat, mflat_path, flat_off = BlaauwPipe.get_closest_master(
                creation_datetime,
                plp,
                max_days_off,
                binning,
                "master_flat",
                fltr=fltr)
        except ers.SuitableMasterMissingError as err:
            #             warnings.warn(f"Re-reduction failed: {err} for {light_file}")
            # Let's hope the pending log can someday fix this
            folder_datetime = datetime.strptime(
                plp.working_dir.replace(cst.base_path, '').split(os.sep)[1],
                '%y%m%d')
            new_line = np.array([
                folder_datetime.date(), "Light file", binning, fltr, "?", "?",
                "?", "-", light_file
            ])
            pd.append_pending_log(new_line)
            ps.updateFailed(
                f"Failed to reduce light file ({err}): {light_file}",
                progressbar=True)
            self.failed_reds += 1
            return

        # Open the content of the current light fits
        hduList = fits.open(light_file)
        hdu_data = hduList[0].data

        # Add source files to the header
        header = fits.getheader(light_file)
        header = BlaauwPipe.header_add_praw(
            header,
            light_file)  # Add the raw version to this pipeline reduced file
        header = BlaauwPipe.header_add_mbias(header,
                                             mbias_path,
                                             days_off=bias_off)
        header = BlaauwPipe.header_add_mdark(header,
                                             mdark_path,
                                             days_off=dark_off)
        header = BlaauwPipe.header_add_mflat(header,
                                             mflat_path,
                                             days_off=flat_off)

        # Reduce the content and save it
        hdu_data_red = (hdu_data - master_bias -
                        master_dark * exptime) / master_flat
        filename_ori = os.path.basename(light_file)
        savepath = os.path.join(plp.red_dir, filename_ori)
        BlaauwPipe.save_fits(savepath, data=hdu_data_red, header=header)

        # Add the current file to the raw version pred
        header = fits.getheader(light_file)
        header = BlaauwPipe.header_add_pred(header, savepath)
        BlaauwPipe.save_fits(light_file, data=hdu_data, header=header)

        # Add to the pending log if need be
        max_off = abs(max(bias_off, dark_off, flat_off))
        if max_off > 0:
            folder_datetime = datetime.strptime(
                plp.working_dir.replace(cst.base_path, '').split(os.sep)[1],
                '%y%m%d')
            new_line = np.array([
                folder_datetime.date(), "Light file", binning, fltr,
                bias_off, dark_off, flat_off,
                (folder_datetime + timedelta(days=max_off)).date(), light_file
            ])
            pd.append_pending_log(new_line)
            self.failed_reds += 1
            ps.updateWarning(
                f"Reduced light file with non-zero days-off ({days_off}) saved at {BlaauwPipe.strip_filepath(savepath)}",
                progressbar=True)
        else:
            ps.updateDone(
                f"Reduced light file saved at {BlaauwPipe.strip_filepath(savepath)}",
                progressbar=True)
        ps.newline()

        return
    def main(self, obs, plp):
        ps.running("Initializing client...")
        c = Client(api_key=cst.api_key)
        ps.updateDone("Logged into Astrometry")

        # set view field width in this range (15-30 arcmin)
        # WARNING: this can be very different for your application.
        c.settings.set_scale_range(15, 30)
        #c.settings.use_sextractor = True

        # Get the raw & corresponding reduced files
        fits_files = plp.red_files[:3]

        # give the iterable of filenames to the function, which returns a
        # generator, generating pairs containing the finished job and filename.
        ps.running("Preparing files for uploading...")
        result_iter = c.upload_files_gen(
            fits_files)  #, filter_func=self.enough_sources)
        ps.updateDone("Light files are ready for Astrometry")

        file_counter = 0
        success_counter = 0

        for job, filename in result_iter:
            if not job.success():
                file_counter += 1
                ps.progressBar(
                    file_counter, len(fits_files),
                    f"Astrometry failed for {BlaauwPipe.strip_filepath(filename)}"
                )
                ps.newline()
                continue

            # retrieve the wcs file from the successful job
            wcs = job.wcs_file()
            file_counter += 1
            success_counter += 1
            ps.progressBar(
                file_counter, len(fits_files),
                f"Received WCS for {BlaauwPipe.strip_filepath(filename)}")
            ps.newline()

            with fits.open(filename) as hdul:
                # append resulting header (with astrometry) to existing header
                hdul[0].header.extend(wcs)

                astrom_file = self.change_filename(filename, plp)
                ps.running(
                    f"Writing to {BlaauwPipe.strip_filepath(astrom_file)}...")
                hdul.writeto(astrom_file)
                ps.updateDone(
                    f"Astrometry results are saved to {BlaauwPipe.strip_filepath(astrom_file)}"
                )

        if success_counter > 0:
            ps.done(
                f"{success_counter} files were successfully run through Astrometry!"
            )
        if file_counter - success_counter > 0:
            ps.warning(
                f"{file_counter-success_counter} files could not be resolved!")
            as a new fits file at save_path
        """
        if os.path.exists(save_path) and data is None:
            data = fits.getdata(save_path)
        if os.path.exists(save_path) and header is None:
            header = fits.getheader(save_path)
        hduNew = fits.PrimaryHDU(data, header=header)
        hduNew.writeto(save_path, overwrite=overwrite)

    @staticmethod
    def get_kw(filepath, keyword):
        val = fits.getheader(filepath, 0)[keyword]
        return val


def main():
    bp = BlaauwPipe()
    bp.start()


if __name__ == "__main__":
    try:
        main()
    except KeyboardInterrupt as KI:
        import core.printstatus as ps
        ps.warning("KeyboardInterrupt! Exiting...")
        ps.newline()
        try:
            sys.exit(1)
        except SystemExit:
            os._exit(1)