def mosaicDetectors(self, adinputs=None, **params): """ This primitive does a full mosaic of all the arrays in an AD object. An appropriate geometry_conf.py module containing geometric information is required. Parameters ---------- suffix: str suffix to be added to output files. sci_only: bool mosaic only SCI image data. Default is False order: int (1-5) order of spline interpolation """ log = self.log log.debug(gt.log_message("primitive", self.myself(), "starting")) timestamp_key = self.timestamp_keys[self.myself()] suffix = params['suffix'] order = params['order'] attributes = ['data'] if params['sci_only'] else None geotable = import_module('.geometry_conf', self.inst_lookups) adoutputs = [] for ad in adinputs: if ad.phu.get(timestamp_key): log.warning("No changes will be made to {}, since it has " "already been processed by mosaicDetectors". format(ad.filename)) adoutputs.append(ad) continue if len(ad) == 1: log.warning("{} has only one extension, so there's nothing " "to mosaic".format(ad.filename)) adoutputs.append(ad) continue # If there's an overscan section, we must trim it before mosaicking try: overscan_kw = ad._keyword_for('overscan_section') except AttributeError: # doesn't exist for this AD, so carry on pass else: if overscan_kw in ad.hdr: ad = gt.trim_to_data_section(ad, self.keyword_comments) adg = transform.create_mosaic_transform(ad, geotable) ad_out = adg.transform(attributes=attributes, order=order, process_objcat=False) ad_out.orig_filename = ad.filename gt.mark_history(ad_out, primname=self.myself(), keyword=timestamp_key) ad_out.update_filename(suffix=suffix, strip=True) adoutputs.append(ad_out) return adoutputs
def test_inverse_transform_gmos(astrofaker, binning): # Creates GMOS images with stars at predefined points ad = astrofaker.create('GMOS-N') ad.init_default_extensions(binning=binning, overscan=False) for ext in ad: ext.add(np.random.randn(*ext.shape)) for ystar, xstar in GMOS_STAR_LOCATIONS: ext.add_star(amplitude=10000, x=xstar / binning, y=ystar / binning) adg = transform.create_mosaic_transform(ad, geotable) admos = adg.transform(attributes=None, order=1) adout = adg.inverse_transform(admos, order=3) p = GMOSImage([adout]) p.detectSources() adout = p.streams['main'][0] xbin, ybin = ad.detector_x_bin(), ad.detector_y_bin() for ext in adout: objcat = ext.OBJCAT objcat.sort(['Y_IMAGE']) for row, location in zip(objcat, GMOS_STAR_LOCATIONS): # OBJCAT is 1-indexed assert abs(row['Y_IMAGE'] - location[0] / ybin - 1) < 0.1 assert abs(row['X_IMAGE'] - location[1] / xbin - 1) < 0.1
def applyQECorrection(self, adinputs=None, **params): """ This primitive applies a wavelength-dependent QE correction to a 2D spectral image, based on the wavelength solution of an associated processed_arc. It is only designed to work on FLATs, and therefore unmosaicked data. Parameters ---------- suffix: str suffix to be added to output files """ log = self.log log.debug(gt.log_message("primitive", self.myself(), "starting")) timestamp_key = self.timestamp_keys[self.myself()] sfx = params["suffix"] arc = params["arc"] # Get a suitable arc frame (with distortion map) for every science AD if arc is None: self.getProcessedArc(adinputs, refresh=False) arc_list = self._get_cal(adinputs, 'processed_arc') else: arc_list = arc distort_model = models.Identity(2) for ad, arc in zip(*gt.make_lists(adinputs, arc_list, force_ad=True)): if ad.phu.get(timestamp_key): log.warning( "No changes will be made to {}, since it has " "already been processed by applyQECorrection".format( ad.filename)) continue if 'e2v' in ad.detector_name(pretty=True): log.warning("{} has the e2v CCDs, so no QE correction " "is necessary".format(ad.filename)) continue # Determines whether to multiply or divide by QE correction is_flat = 'FLAT' in ad.tags # If the arc's binning doesn't match, we may still be able to # fall back to the approximate solution xbin, ybin = ad.detector_x_bin(), ad.detector_y_bin() if arc is not None and (arc.detector_x_bin() != xbin or arc.detector_y_bin() != ybin): log.warning( "Science frame {} and arc {} have different binnings," "so cannot use arc".format(ad.filename, arc.filename)) arc = None # OK, we definitely want to try to do this, get a wavelength solution try: wavecal = arc[0].WAVECAL except (TypeError, AttributeError): wave_model = None else: model_dict = dict(zip(wavecal['name'], wavecal['coefficients'])) wave_model = astromodels.dict_to_chebyshev(model_dict) if not isinstance(wave_model, models.Chebyshev1D): log.warning("Problem reading wavelength solution from arc " "{}".format(arc.filename)) if wave_model is None: if 'sq' in self.mode: raise OSError("No wavelength solution for {}".format( ad.filename)) else: log.warning("Using approximate wavelength solution for " "{}".format(ad.filename)) try: fitcoord = arc[0].FITCOORD except (TypeError, AttributeError): # distort_model already has Identity inverse so nothing required pass else: # TODO: This is copied from determineDistortion() and will need # to be refactored out. Or we might be able to simply replace it # with a gWCS.pixel_to_world() call model_dict = dict( zip(fitcoord['inv_name'], fitcoord['inv_coefficients'])) m_inverse = astromodels.dict_to_chebyshev(model_dict) if not isinstance(m_inverse, models.Chebyshev2D): log.warning("Problem reading distortion model from arc " "{}".format(arc.filename)) else: distort_model.inverse = models.Mapping( (0, 1, 1)) | (m_inverse & models.Identity(1)) if distort_model.inverse == distort_model: # Identity(2) if 'sq' in self.mode: raise OSError("No distortion model for {}".format( ad.filename)) else: log.warning( "Proceeding without a disortion correction for " "{}".format(ad.filename)) ad_detsec = ad.detector_section() adg = transform.create_mosaic_transform(ad, geotable) if arc is not None: arc_detsec = arc.detector_section()[0] shifts = [ c1 - c2 for c1, c2 in zip( np.array(ad_detsec).min(axis=0), arc_detsec) ] xshift, yshift = shifts[0] / xbin, shifts[2] / ybin # x1, y1 if xshift or yshift: log.stdinfo("Found a shift of ({},{}) pixels between " "{} and the calibration.".format( xshift, yshift, ad.filename)) add_shapes, add_transforms = [], [] for (arr, trans) in adg: # Try to work out shape of this Block in the unmosaicked # arc, and then apply a shift to align it with the # science Block before applying the same transform. if xshift == 0: add_shapes.append( ((arc_detsec.y2 - arc_detsec.y1) // ybin, arr.shape[1])) else: add_shapes.append( (arr.shape[0], (arc_detsec.x2 - arc_detsec.x1) // xbin)) t = transform.Transform( models.Shift(-xshift) & models.Shift(-yshift)) t.append(trans) add_transforms.append(t) adg.calculate_output_shape( additional_array_shapes=add_shapes, additional_transforms=add_transforms) origin_shift = models.Shift(-adg.origin[1]) & models.Shift( -adg.origin[0]) for t in adg.transforms: t.append(origin_shift) # Irrespective of arc or not, apply the distortion model (it may # be Identity), recalculate output_shape and reset the origin for t in adg.transforms: t.append(distort_model.copy()) adg.calculate_output_shape() adg.reset_origin() # Now we know the shape of the output, we can construct the # approximate wavelength solution; ad.dispersion() returns a list! if wave_model is None: wave_model = ( models.Shift(-0.5 * adg.output_shape[1]) | models.Scale(ad.dispersion(asNanometers=True)[0]) | models.Shift(ad.central_wavelength(asNanometers=True))) for ccd, (block, trans) in enumerate(adg, start=1): if ccd == 2: continue for ext, corner in zip(block, block.corners): ygrid, xgrid = np.indices(ext.shape) xgrid += corner[1] # No need for ygrid xnew = trans(xgrid, ygrid)[0] # Some unit-based stuff here to prepare for gWCS waves = wave_model(xnew) * u.nm try: qe_correction = qeModel(ext)( (waves / u.nm).to(u.dimensionless_unscaled).value) except TypeError: # qeModel() returns None msg = "No QE correction found for {}:{}".format( ad.filename, ext.hdr['EXTVER']) if 'sq' in self.mode: raise ValueError(msg) else: log.warning(msg) log.fullinfo( "Mean relative QE of EXTVER {} is {:.5f}".format( ext.hdr['EXTVER'], qe_correction.mean())) if not is_flat: qe_correction = 1. / qe_correction qe_correction[qe_correction < 0] = 0 qe_correction[qe_correction > 10] = 0 ext.multiply(qe_correction) # Timestamp and update the filename gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key) ad.update_filename(suffix=sfx, strip=True) return adinputs