Example #1
0
def test_copy_files():
    """Test that files are copied successfully"""

    # Create an example file to be copied
    data_dir = os.path.dirname(__file__)
    file_to_copy = 'file.txt'
    original_file = os.path.join(data_dir, file_to_copy)
    Path(original_file).touch()
    assert os.path.exists(
        original_file), 'Failed to create original test file.'

    # Make a copy one level up
    new_location = os.path.abspath(os.path.join(data_dir, '../'))
    copied_file = os.path.join(new_location, file_to_copy)

    # Copy the file
    success, failure = copy_files([original_file], new_location)
    assert success == [copied_file]
    assert os.path.isfile(copied_file)

    # Remove the copy
    os.remove(original_file)
    os.remove(copied_file)
Example #2
0
    def map_uncal_and_rate_file_lists(self, uncal_files, rate_files, rate_files_to_copy, obs_type):
        """Copy uncal and rate files from the filesystem to the working
        directory. Any requested files that are not in the filesystem
        are noted and skipped. Return the file lists with skipped files
        removed.

        Parameters
        ----------
        uncal_files : list
            List of raw files to be copied

        rate_files : list
            List of rate (slope) images to be copied. This list should
            correspond 1-to-1 with ``uncal_files``. Any rate files that
            were not found in the MAST query should be set to None.

        rate_files_to_copy : list
            Similar to ``rate_files`` but with the None entries omitted.

        obs_type : str
            Observation type (``dark`` or ``flat``). Used only for
            logging

        Returns
        -------
        uncal_files : list
            List of the input raw files with any that failed to copy
            removed

        rate_files : list
            List of the input rate files with any that failed to copy
            removed (if the uncal also failed) or set to None (if only
            the rate file failed)
        """
        # Copy files from filesystem
        uncal_copied_files, uncal_not_copied = copy_files(uncal_files, self.data_dir)
        rate_copied_files, rate_not_copied = copy_files(rate_files_to_copy, self.data_dir)

        # Set any rate files that failed to copy to None so
        # that we can regenerate them
        if len(rate_not_copied) > 0:
            for badfile in rate_not_copied:
                rate_files[rate_files.index(badfile)] = 'None'

        # Any uncal files that failed to copy must be removed
        # entirely from the uncal and rate lists
        if len(uncal_not_copied) > 0:
            for badfile in uncal_not_copied:
                bad_index = uncal_files.index(badfile)
                del uncal_files[bad_index]
                del rate_files[bad_index]

        logging.info('\tNew {} observations: '.format(obs_type))
        logging.info('\tData dir: {}'.format(self.data_dir))
        logging.info('\tCopied to data dir: {}'.format(uncal_copied_files))
        logging.info('\tNot copied (failed, or missing from filesystem): {}'.format(uncal_not_copied))

        # After all this, the lists should be the same length
        # and have a 1-to-1 correspondence
        if len(uncal_files) != len(rate_files):
            print('Lists of {} uncal and rate files have different lengths!!'.format(obs_type))
            raise ValueError

        return uncal_files, rate_files
Example #3
0
    def run(self):
        """The main method.  See module docstrings for further
        details.
        """

        logging.info('Begin logging for dark_monitor')

        apertures_to_skip = ['NRCALL_FULL', 'NRCAS_FULL', 'NRCBS_FULL']

        # Get the output directory
        self.output_dir = os.path.join(get_config()['outputs'], 'dark_monitor')

        # Read in config file that defines the thresholds for the number
        # of dark files that must be present in order for the monitor to run
        limits = ascii.read(THRESHOLDS_FILE)

        # Use the current time as the end time for MAST query
        self.query_end = Time.now().mjd

        # Loop over all instruments
        for instrument in JWST_INSTRUMENT_NAMES:
            self.instrument = instrument

            # Identify which database tables to use
            self.identify_tables()

            # Get a list of all possible apertures from pysiaf
            possible_apertures = list(Siaf(instrument).apernames)
            possible_apertures = [
                ap for ap in possible_apertures if ap not in apertures_to_skip
            ]

            for aperture in possible_apertures:
                logging.info('')
                logging.info('Working on aperture {} in {}'.format(
                    aperture, instrument))

                # Find the appropriate threshold for the number of new files needed
                match = aperture == limits['Aperture']
                file_count_threshold = limits['Threshold'][match]

                # Locate the record of the most recent MAST search
                self.aperture = aperture
                self.query_start = self.most_recent_search()
                logging.info('\tQuery times: {} {}'.format(
                    self.query_start, self.query_end))

                # Query MAST using the aperture and the time of the
                # most recent previous search as the starting time
                new_entries = mast_query_darks(instrument, aperture,
                                               self.query_start,
                                               self.query_end)

                logging.info('\tAperture: {}, new entries: {}'.format(
                    self.aperture, len(new_entries)))

                # Check to see if there are enough new files to meet the
                # monitor's signal-to-noise requirements
                if len(new_entries) >= file_count_threshold:
                    logging.info(
                        '\tSufficient new dark files found for {}, {} to run the dark monitor.'
                        .format(self.instrument, self.aperture))

                    # Get full paths to the files
                    new_filenames = []
                    for file_entry in new_entries:
                        try:
                            new_filenames.append(
                                filesystem_path(file_entry['filename']))
                        except FileNotFoundError:
                            logging.warning(
                                '\t\tUnable to locate {} in filesystem. Not including in processing.'
                                .format(file_entry['filename']))

                    # Set up directories for the copied data
                    ensure_dir_exists(os.path.join(self.output_dir, 'data'))
                    self.data_dir = os.path.join(
                        self.output_dir,
                        'data/{}_{}'.format(self.instrument.lower(),
                                            self.aperture.lower()))
                    ensure_dir_exists(self.data_dir)

                    # Copy files from filesystem
                    dark_files, not_copied = copy_files(
                        new_filenames, self.data_dir)

                    logging.info('\tNew_filenames: {}'.format(new_filenames))
                    logging.info('\tData dir: {}'.format(self.data_dir))
                    logging.info(
                        '\tCopied to working dir: {}'.format(dark_files))
                    logging.info('\tNot copied: {}'.format(not_copied))

                    # Run the dark monitor
                    self.process(dark_files)
                    monitor_run = True

                else:
                    logging.info((
                        '\tDark monitor skipped. {} new dark files for {}, {}. {} new files are '
                        'required to run dark current monitor.').format(
                            len(new_entries), instrument, aperture,
                            file_count_threshold[0]))
                    monitor_run = False

                # Update the query history
                new_entry = {
                    'instrument': instrument,
                    'aperture': aperture,
                    'start_time_mjd': self.query_start,
                    'end_time_mjd': self.query_end,
                    'files_found': len(new_entries),
                    'run_monitor': monitor_run,
                    'entry_date': datetime.datetime.now()
                }
                self.query_table.__table__.insert().execute(new_entry)
                logging.info('\tUpdated the query history table')

        logging.info('Dark Monitor completed successfully.')
Example #4
0
    def run(self):
        """The main method.  See module docstrings for further
        details.
        """

        logging.info('Begin logging for dark_monitor')

        apertures_to_skip = ['NRCALL_FULL', 'NRCAS_FULL', 'NRCBS_FULL']

        # Get the output directory
        self.output_dir = os.path.join(get_config()['outputs'], 'dark_monitor')

        # Read in config file that defines the thresholds for the number
        # of dark files that must be present in order for the monitor to run
        limits = ascii.read(THRESHOLDS_FILE)

        # Use the current time as the end time for MAST query
        self.query_end = Time.now().mjd

        # Loop over all instruments
        for instrument in JWST_INSTRUMENT_NAMES:
            self.instrument = instrument

            # Identify which database tables to use
            self.identify_tables()

            # Get a list of all possible apertures from pysiaf
            possible_apertures = list(Siaf(instrument).apernames)
            possible_apertures = [
                ap for ap in possible_apertures if ap not in apertures_to_skip
            ]

            # Get a list of all possible readout patterns associated with the aperture
            possible_readpatts = RAPID_READPATTERNS[instrument]

            for aperture in possible_apertures:
                logging.info('')
                logging.info('Working on aperture {} in {}'.format(
                    aperture, instrument))

                # Find appropriate threshold for the number of new files needed
                match = aperture == limits['Aperture']

                # If the aperture is not listed in the threshold file, we need
                # a default
                if not np.any(match):
                    file_count_threshold = 30
                    logging.warning((
                        '\tAperture {} is not present in the threshold file. Continuing '
                        'with the default threshold of 30 files.'.format(
                            aperture)))
                else:
                    file_count_threshold = limits['Threshold'][match][0]
                self.aperture = aperture

                # We need a separate search for each readout pattern
                for readpatt in possible_readpatts:
                    self.readpatt = readpatt
                    logging.info('\tWorking on readout pattern: {}'.format(
                        self.readpatt))

                    # Locate the record of the most recent MAST search
                    self.query_start = self.most_recent_search()
                    logging.info('\tQuery times: {} {}'.format(
                        self.query_start, self.query_end))

                    # Query MAST using the aperture and the time of the
                    # most recent previous search as the starting time
                    new_entries = mast_query_darks(instrument,
                                                   aperture,
                                                   self.query_start,
                                                   self.query_end,
                                                   readpatt=self.readpatt)
                    logging.info(
                        '\tAperture: {}, Readpattern: {}, new entries: {}'.
                        format(self.aperture, self.readpatt, len(new_entries)))

                    # Check to see if there are enough new files to meet the
                    # monitor's signal-to-noise requirements
                    if len(new_entries) >= file_count_threshold:
                        logging.info(
                            '\tMAST query has returned sufficient new dark files for {}, {}, {} to run the dark monitor.'
                            .format(self.instrument, self.aperture,
                                    self.readpatt))

                        # Get full paths to the files
                        new_filenames = []
                        for file_entry in new_entries:
                            try:
                                new_filenames.append(
                                    filesystem_path(file_entry['filename']))
                            except FileNotFoundError:
                                logging.warning(
                                    '\t\tUnable to locate {} in filesystem. Not including in processing.'
                                    .format(file_entry['filename']))

                        # In some (unusual) cases, there are files in MAST with the correct aperture name
                        # but incorrect array sizes. Make sure that the new files all have the expected
                        # aperture size
                        temp_filenames = []
                        bad_size_filenames = []
                        expected_ap = Siaf(instrument)[aperture]
                        expected_xsize = expected_ap.XSciSize
                        expected_ysize = expected_ap.YSciSize
                        for new_file in new_filenames:
                            with fits.open(new_file) as hdulist:
                                xsize = hdulist[0].header['SUBSIZE1']
                                ysize = hdulist[0].header['SUBSIZE2']
                            if xsize == expected_xsize and ysize == expected_ysize:
                                temp_filenames.append(new_file)
                            else:
                                bad_size_filenames.append(new_file)
                        if len(temp_filenames) != len(new_filenames):
                            logging.info(
                                '\tSome files returned by MAST have unexpected aperture sizes. These files will be ignored: '
                            )
                            for badfile in bad_size_filenames:
                                logging.info('\t\t{}'.format(badfile))
                        new_filenames = deepcopy(temp_filenames)

                        # If it turns out that the monitor doesn't find enough
                        # of the files returned by the MAST query to meet the threshold,
                        # then the monitor will not be run
                        if len(new_filenames) < file_count_threshold:
                            logging.info((
                                "\tFilesystem search for the files identified by MAST has returned {} files. "
                                "This is less than the required minimum number of files ({}) necessary to run "
                                "the monitor. Quitting.").format(
                                    len(new_filenames), file_count_threshold))
                            monitor_run = False
                        else:
                            logging.info((
                                "\tFilesystem search for the files identified by MAST has returned {} files."
                            ).format(len(new_filenames)))
                            monitor_run = True

                        if monitor_run:
                            # Set up directories for the copied data
                            ensure_dir_exists(
                                os.path.join(self.output_dir, 'data'))
                            self.data_dir = os.path.join(
                                self.output_dir,
                                'data/{}_{}'.format(self.instrument.lower(),
                                                    self.aperture.lower()))
                            ensure_dir_exists(self.data_dir)

                            # Copy files from filesystem
                            dark_files, not_copied = copy_files(
                                new_filenames, self.data_dir)

                            logging.info(
                                '\tNew_filenames: {}'.format(new_filenames))
                            logging.info('\tData dir: {}'.format(
                                self.data_dir))
                            logging.info('\tCopied to working dir: {}'.format(
                                dark_files))
                            logging.info('\tNot copied: {}'.format(not_copied))

                            # Run the dark monitor
                            self.process(dark_files)

                    else:
                        logging.info((
                            '\tDark monitor skipped. MAST query has returned {} new dark files for '
                            '{}, {}, {}. {} new files are required to run dark current monitor.'
                        ).format(len(new_entries), instrument, aperture,
                                 self.readpatt, file_count_threshold))
                        monitor_run = False

                    # Update the query history
                    new_entry = {
                        'instrument': instrument,
                        'aperture': aperture,
                        'readpattern': self.readpatt,
                        'start_time_mjd': self.query_start,
                        'end_time_mjd': self.query_end,
                        'files_found': len(new_entries),
                        'run_monitor': monitor_run,
                        'entry_date': datetime.datetime.now()
                    }
                    self.query_table.__table__.insert().execute(new_entry)
                    logging.info('\tUpdated the query history table')

        logging.info('Dark Monitor completed successfully.')