def main(argv=None): ''' Main Function ''' # Call initial parser from init_utils parser = ap.ArgumentParser(description="""Create HDF5 file.""", add_help=True) parser.add_argument("-d", "--date", help='''Date, e.g., 20170321, YYYYMMDD''', type=str, default=None) parser.add_argument("-o", "--observation", help='''Observation number, "00000007" or "7"''', type=str, default=None) parser.add_argument("-r", "--rootdir", help='''Root Directory for Reductions''', type=str, default='/work/03946/hetdex/maverick') parser.add_argument('-of', '--outfilename', type=str, help='''Relative or absolute path for output HDF5 file.''', default=None) parser.add_argument('-a', '--append', help='''Appending to existing file.''', action="count", default=0) args = parser.parse_args(argv) args.log = setup_logging() # Get the daterange over which reduced files will be collected files = get_files(args) # Creates a new file if the "--append" option is not set or the file # does not already exist. if op.exists(args.outfilename) and args.append: fileh = tb.open_file(args.outfilename, 'a') else: fileh = tb.open_file(args.outfilename, 'w') imagetable = fileh.create_table(fileh.root, 'Cals', VIRUSImage, 'Cal Info') for fn in files: args.log.info('Working on %s' % fn) im = imagetable.row success = append_fibers_to_table(im, fn, args) if success: imagetable.flush() fileh.close()
def main(argv=None): ''' Main Function ''' # Call initial parser from init_utils parser = ap.ArgumentParser(description="""Create HDF5 file.""", add_help=True) parser.add_argument("hdf5_file", type=str, help='''Name of h5/hdf5 file''') parser.add_argument("-e", "--extension", help='''Extension or extensions''', type=str, default=None) parser.add_argument("-q", "--query", help='''Query to be applied''', type=str, default=None) parser.add_argument('-s', '--show', help='''Show tables/extensions within file''', action="count", default=0) args = parser.parse_args(argv) args.log = setup_logging() try: h5file = open_file(args.hdf5_file, mode='r') except: if not op.exists(args.hdf5_file): args.log.error('%s does not exist' % args.hdf5_file) return None else: args.log.error('File exists but could not open %s' % args.hdf5_file) return None if args.show: table_names = ['Shot', 'Fibers', 'Images'] for kind in table_names: print('%s column names:' % kind) b = getattr(h5file.root.Info, kind) for name in b.colnames: base = getattr(b.cols, name) shape = str(base.shape) print('\t%s: %s %s' % (name, base.type, shape)) return None if args.extension is None: args.log.error('No extension provided to display in ds9') return None table = h5file.root.Info.Fibers spec = np.array(table.cols.spectrum[:]) ds9 = pyds9.DS9() ds9.set_np2arr(spec) h5file.close()
def __init__(self, wave=None): ''' Initialize Extract class Parameters ---------- wave: numpy 1d array wavelength of calfib extension for hdf5 files, does not need to be set unless needed by development team ''' if wave is not None: self.wave = wave else: self.wave = self.get_wave() self.get_ADR() self.log = setup_logging('Extract') self.set_dither_pattern()
# Mask additionally spectra whose average weight is less than 5% if np.nanmedian(weigh) < 0.05: mask = True return mask DIRNAME = get_script_path() T = Table.read(op.join(DIRNAME, 'filters/ps1g.dat'), format='ascii') filtg = np.interp(def_wave, T['col1'], T['col2'], left=0.0, right=0.0) filtg /= filtg.sum() T = Table.read(op.join(DIRNAME, 'calibration', 'normalization.txt'), format='ascii.fixed_width_two_line') flux_normalization = np.array(T['normalization']) log = setup_logging('extractions') parser = ap.ArgumentParser(add_help=True) parser.add_argument("folder", help='''Folder with h5 files''', type=str) parser.add_argument("extraction_file", help='''list of RA and Decs to extract''', type=str) parser.add_argument("outputname", help='''Name of fits file output''', type=str)
parser.add_argument('outfolder', type=str, help='''name of the output file''') parser.add_argument("-lr", "--lowres", help='''low resolution''', action="count", default=0) folder = '/Users/gregz/cure/Remedy/virusw/VIRUS-W_Jan21' outfolder = '/Users/gregz/cure/Remedy/virusw/reductions' argv = [folder, outfolder, "-lr"] argv = None args = parser.parse_args(args=argv) folder = args.folder outfolder = args.outfolder log = setup_logging('virusw_reductions') gain = 0.62 rdnoise = 2.55 # ============================================================================= # LowRes Mode # ============================================================================= if args.lowres: lines = [ 4046.563, 4077.831, 4348.063, 4358.327, 4916.068, 5037.75, 5330.778, 5400.562, 5719.23, 5820.16, 5852.488, 5881.895, 5944.834, 5975.534 ] fiberref = 130 xref = np.array([ 462.4524, 514.266, 990.160, 1009.781, 2042.973, 2275.308, 2843.932, 2980.563, 3607.36, 3806.076, 3870.173, 3928.036, 4050.715, 4109.934
def main(argv=None): ''' Main Function ''' # Call initial parser from init_utils parser = ap.ArgumentParser(description="""Create rsp tmpXXX.datfiles.""", add_help=True) parser.add_argument("-s", "--datevobs", help='''ShotID, e.g., 20170321v009, YYYYMMDDvOBS''', type=str, default=None) parser.add_argument("-ra", "--ra", help='''ra, e.g., right ascension in degrees''', type=float, default=None) parser.add_argument("-dec", "--dec", help='''ra, e.g., right ascension in degrees''', type=float, default=None) parser.add_argument("-rad", "--rad", help='''radius, e.g., aperture radius in arcsec''', type=float, default=3.0) parser.add_argument("-w", "--wave", help='''wavelength in AA''', type=float, default=None) parser.add_argument("-dw", "--dwave", help='''delta wavelength in AA''', type=float, default=50.0) args = parser.parse_args(argv) args.log = setup_logging() # initiate Fibers and Survey Classes print(args) fibers = Fibers(args.datevobs) survey = Survey('hdr1') fwhm = survey.fwhm_moffat[survey.datevobs == args.datevobs] structaz = survey.structaz[survey.datevobs == args.datevobs] ascii.write([fwhm, structaz], 'shot.info', names=['fwhm_moffat', 'structaz'], overwrite=True) obj_coords = SkyCoord(args.ra * u.deg, args.dec * u.deg, frame='icrs') idx = fibers.query_region_idx(obj_coords, radius=(args.rad/3600.)) output = Table() output['ra'] = fibers.coords.ra[idx]*u.deg output['dec'] = fibers.coords.dec[idx]*u.deg filenames = [] fileidx = 101 for i in idx: filename = 'tmp' + str(fileidx) + '.dat' filenames.append(filename) save_rsp_spectrum(fibers, i, file=filename, ) fileidx += 1 output['filename'] = np.array(filenames) ascii.write(output, 'fib_coords.dat', overwrite=True)
y[j] = evalf(x[j], n, avg=y[j]) for nsi in ns: y[nsi] = savgol_filter(y[nsi], 11, 1) ftf[:, k] = y / n for i in np.arange(len(scispectra)): I = interp1d(W, ftf[i, :], kind='quadratic', fill_value='extrapolate') scispectra[i] /= I(def_wave) log.info('Getting average sky') sky = np.nanmedian(scispectra, axis=0) return scispectra - sky, sky # GET DIRECTORY NAME FOR PATH BUILDING DIRNAME = get_script_path() instrument = 'virus' log = setup_logging() if args.hdf5file is None: args.hdf5file = op.join(DIRNAME, 'cals', 'default_cals.h5') # OPEN HDF5 FILE h5file = open_file(args.hdf5file, mode='r') h5table = h5file.root.Cals # Collect indices for ifuslot ifuslots = h5table.cols.ifuslot[:] ifuloop = np.arange(len(ifuslots)) # Reducing IFUSLOT log.info('Reducing all ifus in CAL HDF5') pos, twispectra, scispectra, errspectra, fn = reduce_ifuslot(ifuloop, h5table) average_twi = np.percentile(twispectra, 95, axis=0)
type=str, help='''name of the output file''') parser.add_argument("-f", "--folder", help='''Output folder''', type=str, default='output') parser.add_argument("-i", "--ifuslot", help='''IFUSLOT''', type=str, default='047') args = parser.parse_args(args=None) args.log = setup_logging(logname='build_master_bias') args = set_daterange(args) kinds = ['twi', 'cmp'] mkpath(args.folder) dirname = get_script_path() filename_dict = {} tarname_dict = {} for kind in kinds: args.log.info('Getting file names for %s' % kind) if kind == 'drk': daterange = expand_date_range(args.daterange, 7) else: daterange = list(args.daterange) filename_dict[kind] = get_filenames(args, daterange, kind) tarname_dict[kind] = get_tarfiles(filename_dict[kind])
trajcra = float(b.split(' ')[5]) * 15. trajcdec = float(b.split(' ')[6]) mjd = float(b.split(' ')[4]) except: exptime = 180. trajcra = 180. trajcdec = 50. mjd = -999. return [trajcra, trajcdec, mjd, exptime] outname = sys.argv[1] def_wave = np.linspace(3470, 5540, 1036) log = setup_logging('catalog') loc = EarthLocation.of_site('McDonald Observatory') basedir = '/work/00115/gebhardt/maverick/gettar' folder = '/work/03946/hetdex/virus_parallels/data' DIRNAME = get_script_path() T = Table.read(op.join(DIRNAME, 'filters/ps1g.dat'), format='ascii') filtg = np.interp(def_wave, T['col1'], T['col2'], left=0.0, right=0.0) filtg /= filtg.sum() h5names = sorted(glob.glob(op.join(folder, '*.h5'))) totN = 0 Nshots = 0 cnt = 0 C = 0
(indy < N) or (indy >= (len(yg) - N))): continue ly = indy - N hy = indy + N + 1 lx = indx - N hx = indx + N + 1 d = np.sqrt((xgrid[ly:hy,lx:hx]-p[0])**2 + (ygrid[ly:hy,lx:hx]-p[1])**2) G = np.exp(-0.5 * d**2 / sigma**2) G[:] /= G.sum() image[ly:hy,lx:hx] += yi*G weight[ly:hy,lx:hx] += G image[weight < 0.8 * np.max(weight)] = 0. return image args = parser.parse_args(args=None) args.log = setup_logging('make_image_from_h5') def_wave = np.linspace(3470., 5540., 1036) wave_extract = [float(i.replace(' ', '')) for i in args.wave_extract.split(',')] back_wave = [float(i.replace(' ', '')) for i in args.back_wave.split(',')] bsel = (def_wave >= back_wave[0]) * (def_wave <= back_wave[1]) wsel = np.abs(def_wave - wave_extract[0]) < (2.5 * wave_extract[1]) bsel = bsel * (~wsel) Gmodel = np.exp(-0.5 * (def_wave - wave_extract[0])**2 / wave_extract[1]**2) Gmodel[:] = Gmodel / Gmodel[wsel].sum() t = tables.open_file(args.h5file) ra = t.root.Info.cols.ra[:] dec = t.root.Info.cols.dec[:] RA = t.root.Survey.cols.ra[0] Dec = t.root.Survey.cols.dec[0]