def add_observer(top_of_tree, observers): """ Add observer name to each of the FITS files in a tree of directories Parameters ---------- top_of_tree : str Path to top of the directory tree containing the images to be modified observers : dict Dictionary with observation date of the form YYYY-MM-DD as keys and name(s) of observer(s) as a single string as the value. .. warning:: This function will overwrite the FITS files in the tree. """ for root, dirs, files in os.walk(top_of_tree): date = obs_date(root) if not date: continue logging.info('Processing directory %s with observers: %s', root, observers[date]) ic = ImageFileCollection(root, keywords=['imagetyp']) observer_keyword = FITSKeyword(name='observer', value=observers[date]) for hdr, fname in ic.headers(clobber=True, return_fname=True): if ('observer' in hdr) and hdr['purged']: logging.warning( 'Skipping file %s in %s because observer ' 'has already been added', fname, root) continue observer_keyword.add_to_header(hdr, history=True)
def test_add_ra_dec_from_object_name_edge_cases(caplog): # add a 'dec' keyword to every light file so that none need RA/Dec ic = ImageFileCollection(_test_dir, keywords=['imagetyp']) for h in ic.headers(imagetyp='light', overwrite=True): h['dec'] = '+17:42:00' h['ra'] = '03:45:06' h['object'] = 'm101' # does this succeed without errors? ph.add_ra_dec_from_object_name(_test_dir) # add name that will fail as object of one image image_path = path.join(_test_dir, _test_image_name) f = fits.open(image_path) h = f[0].header try: del h['RA'] del h['dec'] except KeyError: pass h['object'] = 'i am a fake object' f.writeto(image_path, overwrite=True) ph.add_ra_dec_from_object_name(_test_dir) warns = get_patch_header_logs(caplog, level=logging.WARN) assert 'Unable to lookup' in warns
def test_run_triage_on_set_with_no_light_files(self): ic = ImageFileCollection(self.test_dir.strpath, keywords=['imagetyp']) for header in ic.headers(imagetyp='light', overwrite=True): header['imagetyp'] = 'BIAS' arglist = [self.test_dir.strpath] run_triage.main(arglist) assert 1
def test_add_object_name_logic_when_all_images_have_matching_object(caplog): ic = ImageFileCollection(_test_dir, keywords=['imagetyp']) for h in ic.headers(imagetyp='light', overwrite=True): h['imagetyp'] = 'FLAT' ph.add_object_info(_test_dir) infos = get_patch_header_logs(caplog, level=logging.INFO) alls = get_patch_header_logs(caplog) print(alls) assert 'NO OBJECTS MATCHED' in infos
def test_unit_is_added(): # patch in _test_dir, overwriting existing files ph.patch_headers(_test_dir, overwrite=True, new_file_ext='') ic = ImageFileCollection(_test_dir, keywords='*') feder = Feder() print(_test_dir) for h, f in ic.headers(return_fname=True): instrument = feder.instruments[h['instrume']] if instrument.image_unit is not None: print(str(instrument.image_unit), f) # If the instrument has a unit, the header should too. assert h['BUNIT'] == str(instrument.image_unit)
def test_triage_grabbing_all_keywords_gets_them_all(self): tbl_name = 'tbl.txt' run_triage.main(arglist=['-a', '-t', tbl_name, self.test_dir.strpath]) rt_table = Table.read(self.test_dir.join(tbl_name).strpath, format='ascii.csv') lcase_columns = [c.lower() for c in rt_table.colnames] print(lcase_columns) ic = ImageFileCollection(self.test_dir.strpath, keywords='*') for h in ic.headers(): for k in h: if k: assert k.lower() in lcase_columns
def test_quick_add_keys_records_history(self, keyword_arg, file_arg, file_column): ic = ImageFileCollection(self.test_dir.strpath, keywords=['imagetyp']) ic.summary.keep_columns('file') file_list = os.path.join(ic.location, 'files.txt') keyword_list = os.path.join(ic.location, 'keys.txt') full_paths = [ os.path.join(self.test_dir.strpath, fil) for fil in ic.summary['file'] ] print('fill paths: %s' % ' '.join(full_paths)) ic.summary['file'][:] = full_paths ic.summary.remove_column('file') ic.summary.add_column(Column(data=full_paths, name=file_column)) ic.summary.write(file_list, format='ascii') if file_column != 'file': ic.summary.rename_column(file_column, 'file') dumb_keyword = 'munkeez'.upper() dumb_value = 'bananaz' keywords = Column(data=[dumb_keyword], name='Keyword') vals = Column(data=[dumb_value], name='value') keyword_table = Table() keyword_table.add_columns([keywords, vals]) keyword_table.write(keyword_list, format='ascii') args_for = {} args_for['--key-file'] = [keyword_list] args_for['--key-value'] = [dumb_keyword, dumb_value] args_for['--file-list'] = [file_list] args_for[''] = full_paths argslist = [keyword_arg] argslist.extend(args_for[keyword_arg]) if file_arg: argslist.append(file_arg) argslist.extend(args_for[file_arg]) if file_column.lower() != 'file' and file_arg: with pytest.raises(ValueError): quick_add_keys_to_file.main(argslist) return else: quick_add_keys_to_file.main(argslist) # add_keys(file_list=file_list, key_file=keyword_list) for header in ic.headers(): assert (header[dumb_keyword] == dumb_value) history_string = ' '.join(header['history']) assert (dumb_keyword in history_string) assert (dumb_value in history_string)
def test_patch_headers_stops_if_instrument_or_software_not_found( badkey, caplog): ic = ImageFileCollection(_test_dir, keywords=['imagetyp']) # need a header that contains IMAGETYP so that it will be processed a_fits_file = '' for h, f in ic.headers(imagetyp='*', return_fname=True): a_fits_file = f break a_fits_hdu = fits.open(path.join(_test_dir, a_fits_file)) hdr = a_fits_hdu[0].header badname = 'Nonsense' hdr[badkey] = badname a_fits_hdu.writeto(path.join(_test_dir, a_fits_file), overwrite=True) with pytest.raises(KeyError): ph.patch_headers(_test_dir)
def main(fits_directory, jpeg_directory, base_url, night, thumbnail_directory='thumbnail', org_by=None): """ org_by: list, optional List of FITS keywords by which the files should be organized. """ ic = ImageFileCollection(fits_directory, keywords='*') groups = ['BIAS', 'DARK', 'FLAT', 'LIGHT'] image_groups = OrderedDict() for group in groups: images = [] for header, fname in ic.headers(imagetyp=group, return_fname=True): f = os.path.basename(fname) # Try replacing all the extensions with jpg... jpeg_name = (f.replace('.fit', '.jpg').replace('.fts', '.jpg').replace( '.fits', '.jpg')) images.append({ 'jpeg_name': jpeg_name, 'title': construct_image_title(f, header), 'original_name': f }) image_groups[group] = images loader = jinja2.FileSystemLoader('.') e = jinja2.Environment(loader=loader, autoescape=True) template = e.get_template('viewer_page.html') foo = template.render(night=night, base_url=base_url, image_groups=image_groups, base_url_thumb=os.path.join(base_url, thumbnail_directory)) return foo
def test_sort_handles_cannot_form_tree(self, set_test_files): # the object keyword is stripped from all headers, which means # you cannot for a tree for the light files. As a result, all # light files should end up in "unsorted" and no error should # be raised. images = ImageFileCollection(self.test_dir.strpath, keywords=['imagetyp', 'object']) n_light = 0 for header in images.headers(overwrite=True, imagetyp='LIGHT'): try: del header['object'] except KeyError: pass n_light += 1 dest = self.test_dir.mkdtemp() sort_files.main(['-d', dest.strpath, self.test_dir.strpath]) unsorted_path = os.path.join(dest.strpath, 'LIGHT', sort_files.UNSORTED_DIR) assert len(os.listdir(unsorted_path)) == n_light
def test_purge_bad_keywords_logic_for_conditionals(caplog): ic = ImageFileCollection(_test_dir, keywords=['imagetyp']) headers = [h for h in ic.headers()] a_header = headers[0] # should be no warnings the first time... ph.purge_bad_keywords(a_header) purge_warnings = get_patch_header_logs(caplog) assert not purge_warnings # re-purging should generate warnings... ph.purge_bad_keywords(a_header) purge_warnings = get_patch_header_logs(caplog) assert 'force' in purge_warnings assert 'removing' in purge_warnings # grab a clean header # want to get to a header with more than one bad keyword so that a # history is generated... for a_header in headers[1:]: software = ph.get_software_name(a_header) if len(software.bad_keywords) <= 1: continue else: break # delete one of the purge keywords for this software, which should ensure # that their is no history added to the header that contains the name of # this keyword key_to_delete = software.bad_keywords[0] print(software.bad_keywords) try: del a_header[key_to_delete] except KeyError: pass print(a_header) ph.purge_bad_keywords(a_header, history=True) print(a_header) assert all(key_to_delete.lower() not in h.lower() for h in a_header['HISTORY'])
def load_files(target_dir, config, files, logger): from ccdproc import ImageFileCollection keywords = [ 'object', 'imagetyp', 'date-obs', 'telra', 'teldec', 'airmass', 'filter', 'oairtemp', 'relhum', 'subbias', 'flatcor' ] ic = ImageFileCollection(location=target_dir, filenames=files, keywords=keywords) nbias = len(ic.files_filtered(imagetyp='bias')) nflat = len(ic.files_filtered(imagetyp='flat')) ndata = len(ic.files_filtered(imagetyp='object')) filters = [] for h in ic.headers(): if h['IMAGETYP'] in ['object', 'flat']: if h['FILTER'] not in filters: filters.append(h['FILTER']) filters.sort() # flat_breakdown = [] # data_breakdown = [] # for filt in filters: # for k in config.flats.values(): # flat_breakdown.append('{} {} {}'.format( # len(ic.files_filtered( # imagetyp='flat', object=k, filter=filt)), # filt, k)) # data_breakdown.append('{} {}'.format( # len(ic.files_filtered(imagetyp='OBJECT', filter=filt)), filt)) logger.info('{} files: {} bias, {} flats, {} object ({} filters)'.format( len(ic.files), nbias, nflat, ndata, len(filters))) return ic
def test_purge_handles_all_software(): ic = ImageFileCollection(_test_dir, keywords=['imagetyp']) for h in ic.headers(): ph.purge_bad_keywords(h) assert 'purged' in h
def add_object_info(directory=None, object_list=None, object_list_dir=None, match_radius=20.0, new_file_ext=None, save_location=None, overwrite=False, detailed_history=True): """ Add object information to FITS files that contain pointing information given a list of objects. Parameters ---------- directory : str Directory containing the FITS files to be fixed. Default is the current directory, ``.``. object_list : str, optional Name of file containing list of objects. Default is set by :func:`read_object_list` which also explains the format of this file. object_list_dir : str, optional Directory in which the `object_list` is contained. Default is `directory`. match_radius : float, optional Maximum distance, in arcmin, between the RA/Dec of the image and a particular object for the image to be considered an image of that object. new_file_ext : str, optional Name added to the FITS files with updated header information. It is added to the base name of the input file, between the old file name and the `.fit` or `.fits` extension. Default is 'new'. save_location : str, optional Directory to which the patched files should be written, if not `dir`. overwrite : bool, optional Set to `True` to replace the original files. """ directory = directory or '.' if new_file_ext is None: new_file_ext = 'new' images = ImageFileCollection(directory, keywords=['imagetyp', 'ra', 'dec', 'object']) im_table = images.summary object_dir = directory if object_list_dir is None else object_list_dir logger.debug('About to read object list') try: object_names, ra_dec = read_object_list(object_dir, input_list=object_list) except IOError: warn_msg = 'No object list in directory {0}, skipping.' logger.warn(warn_msg.format(directory)) return except name_resolve.NameResolveError: logger.error('Unable to add objects--name resolve error') return object_names = np.array(object_names) # I want rows which... # # ...have no OBJECT... needs_object = im_table['object'].mask # ...and have coordinates. needs_object &= ~(im_table['ra'].mask | im_table['dec'].mask) logger.debug('Looking for objects for %s images', needs_object.sum()) # Qualifying rows need a search for a match. # the search returns a match for every row provided, but some matches # may be farther away than desired, so... # # ...`and` the previous index mask with those that matched, and # ...construct list of object names for those images. default_angle_units = (u.hour, u.degree) img_pos = SkyCoord(im_table['ra'][needs_object], im_table['dec'][needs_object], unit=default_angle_units, frame='fk5') match_idx, d2d, d3d = img_pos.match_to_catalog_sky(ra_dec) good_match = (d2d.arcmin <= match_radius) found_object = np.array(needs_object) found_object[needs_object] = good_match matched_object_name = object_names[match_idx][good_match] no_match_found = needs_object & ~found_object if no_match_found.any(): for fname in np.array(images.files)[no_match_found]: warn_msg = "No object found for image {0}".format(fname) logger.warn(warn_msg) if not found_object.any(): logger.info('NO OBJECTS MATCHED TO IMAGES IN: {0}'.format(directory)) return im_table['file'].mask = ~found_object for idx, (header, fname) in enumerate( images.headers(save_with_name=new_file_ext, overwrite=overwrite, save_location=save_location, return_fname=True)): logger.info('START ATTEMPTING TO ADD OBJECT to: {0}'.format(fname)) object_name = matched_object_name[idx] logger.debug('Found matching object named %s', object_name) obj_keyword = FITSKeyword('object', value=object_name) obj_keyword.add_to_header(header, history=True) logger.info(obj_keyword.history_comment()) logger.info('END ATTEMPTING TO ADD OBJECT to: {0}'.format(fname))
def patch_headers(dir=None, new_file_ext=None, save_location=None, overwrite=False, purge_bad=True, add_time=True, add_apparent_pos=True, add_overscan=True, fix_imagetype=True, add_unit=True): """ Add minimal information to Feder FITS headers. Parameters ---------- dir : str, optional Directory containing the files to be patched. Default is the current directory, ``.`` new_file_ext : str, optional Name added to the FITS files with updated header information. It is added to the base name of the input file, between the old file name and the `.fit` or `.fits` extension. Default is 'new'. save_location : str, optional Directory to which the patched files should be written, if not `dir`. overwrite : bool, optional Set to `True` to replace the original files. purge_bad : bool, optional Remove "bad" keywords form header before any other processing. See :func:`purge_bad_keywords` for details. add_time : bool, optional If ``True``, add time information (e.g. JD, LST); see :func:`add_time_info` for details. add_apparent_pos : bool, optional If ``True``, add apparent position (e.g. alt/az) to headers. See :func:`add_object_pos_airmass` for details. add_overscan : bool, optional If ``True``, add overscan keywords to the headers. See :func:`add_overscan_header` for details. fix_imagetype : bool, optional If ``True``, change image types to IRAF-style. See :func:`change_imagetype_to_IRAF` for details. add_unit : bool, optional If ``True``, add image unit to FITS header. """ dir = dir or '.' if new_file_ext is None: new_file_ext = 'new' images = ImageFileCollection(location=dir, keywords=['imagetyp']) for header, fname in images.headers(save_with_name=new_file_ext, save_location=save_location, overwrite=overwrite, do_not_scale_image_data=True, return_fname=True): run_time = datetime.now() logger.info('START PATCHING FILE: {0}'.format(fname)) header.add_history(history(patch_headers, mode='begin', time=run_time)) header.add_history('patch_headers.py modified this file on %s' % run_time) # Removed this from the try/except to ensure an error is # raised if the software isn't recognized. get_software_name(header) # is there some software? header['instrume'] # is there an instrument? feder.instruments[header['instrume']] # Is this an instrument we know? try: header['imagetyp'] # is there an image type? if purge_bad: purge_bad_keywords(header, history=True, file_name=fname) if fix_imagetype: change_imagetype_to_IRAF(header, history=True) if add_time: add_time_info(header, history=True) if add_overscan: add_overscan_header(header, history=True) if add_unit: add_image_unit(header, history=True) # add_apparent_pos_airmass can raise a ValueError, do it last. if add_apparent_pos and (header['imagetyp'] == 'LIGHT'): add_object_pos_airmass(header, history=True) except (KeyError, ValueError) as e: warning_msg = ('********* FILE NOT PATCHED *********' 'Stopped patching header of {0} because of ' '{1}: {2}'.format(fname, type(e).__name__, e)) logger.warn(warning_msg) header.add_history(warning_msg) continue finally: header.add_history( history(patch_headers, mode='end', time=run_time)) logger.info('END PATCHING FILE: {0}'.format(fname))
def delete_keys(): ic = ImageFileCollection('.', keywords='*') for h in ic.headers(imageh='*', imagew='*', overwrite=True): del h['imageh'], h['imagew']