def update_hdr_config(self): self.survey = self.survey_widget.value.lower() self.hetdex_api_config = HDRconfig( survey=self.survey) self.FibIndex = FiberIndex(self.survey) self.ampflag_table = Table.read(self.hetdex_api_config.badamp) # update survey class and shot list self.survey_class = Survey(self.survey)
if shotid in badshot: print("Shot is in badshot list. Making mask zero everywhere") badshot = True else: badshot = False # also check if shot is in bad throughput list # and set mask to zero everywhere badtpshots = np.loadtxt(config.lowtpshots, dtype=int) if shotid in badtpshots: badshot = True print("Shot has bad throughput. Setting flux limit mask to 0") bad_amps = Table.read(config.badamp) FibIndex = FiberIndex() try: hdf_filename = return_sensitivity_hdf_path(datevshot, release=LATEST_HDR_NAME) except NoFluxLimsAvailable: sys.exit("No flux limit file found for " + datevshot) flimhdf = SensitivityCubeHDF5Container( filename=hdf_filename, aper_corr=1.0, flim_model="hdr2pt1" ) hdf_outfilename = datevshot + "_mask.h5" fileh = tb.open_file(hdf_outfilename, "w")
def get_source_spectra_mp(source_dict, shotid, manager, args): E = Extract() FibIndex = FiberIndex(args.survey) if args.survey == "hdr1": source_num_switch = 20 else: source_num_switch = 0 if len(args.matched_sources[shotid]) > 0: args.log.info("Working on shot: %s" % shotid) if args.survey == "hdr1": fwhm = args.survey_class.fwhm_moffat[args.survey_class.shotid == shotid][0] else: fwhm = args.survey_class.fwhm_virus[args.survey_class.shotid == shotid][0] moffat = E.moffat_psf(fwhm, 10.5, 0.25) if len(args.matched_sources[shotid]) > source_num_switch: E.load_shot(shotid, fibers=True, survey=args.survey) else: E.load_shot(shotid, fibers=False, survey=args.survey) for ind in args.matched_sources[shotid]: try: info_result = E.get_fiberinfo_for_coord( args.coords[ind], radius=args.rad, ffsky=args.ffsky, return_fiber_info=True, ) except TypeError: info_result = E.get_fiberinfo_for_coord( args.coords, radius=args.rad, ffsky=args.ffsky, return_fiber_info=True, ) if info_result is not None: if np.size(args.ID) > 1: args.log.info("Extracting %s" % args.ID[ind]) else: args.log.info("Extracting %s" % args.ID) ifux, ifuy, xc, yc, ra, dec, data, error, mask, fiberid, \ multiframe = info_result weights = E.build_weights(xc, yc, ifux, ifuy, moffat) # added by EMC 20210609 norm = np.sum(weights, axis=0) weights = weights / norm[np.newaxis, :] result = E.get_spectrum(data, error, mask, weights, remove_low_weights=False) spectrum_aper, spectrum_aper_error = [res for res in result] # apply aperture correction spectrum_aper /= norm spectrum_aper_error /= norm weights *= norm[np.newaxis, :] #add in the total weight of each fiber (as the sum of its weight per wavebin) if args.fiberweights: try: fiber_weights = np.array([ x for x in zip(ra, dec, np.sum(weights * mask, axis=1)) ]) except: fiber_weights = [] else: fiber_weights = [] # get fiber info no matter what so we can flag try: fiber_info = np.array([ x for x in zip(fiberid, multiframe, ra, dec, np.sum(weights * mask, axis=1)) ]) except: args.log.warning( 'Could not get fiber info, no flagging created') fiber_info = [] if len(fiber_info) > 0: try: flags = FibIndex.get_fiber_flags( coord=args.coords[ind], shotid=shotid) except: flags = FibIndex.get_fiber_flags(coord=args.coords, shotid=shotid) else: flags = None if np.size(args.ID) > 1: if args.ID[ind] in source_dict: source_dict[args.ID[ind]][shotid] = [ spectrum_aper, spectrum_aper_error, weights.sum(axis=0), fiber_weights, fiber_info, flags, ] else: source_dict[args.ID[ind]] = manager.dict() source_dict[args.ID[ind]][shotid] = [ spectrum_aper, spectrum_aper_error, weights.sum(axis=0), fiber_weights, fiber_info, flags, ] else: if args.ID in source_dict: source_dict[args.ID][shotid] = [ spectrum_aper, spectrum_aper_error, weights.sum(axis=0), fiber_weights, fiber_info, flags, ] else: source_dict[args.ID] = manager.dict() source_dict[args.ID][shotid] = [ spectrum_aper, spectrum_aper_error, weights.sum(axis=0), fiber_weights, fiber_info, flags, ] E.shoth5.close() FibIndex.close() return source_dict