def test_calculate_position_with_evil_inputs(upload_data_token, view_only_token, ztf_camera, public_group): ra, dec = 10.5, -20.8 obj_id = str(uuid.uuid4()) status, data = api( 'POST', 'sources', data={ 'id': obj_id, 'ra': ra, 'dec': dec, 'group_ids': [public_group.id] }, token=upload_data_token, ) assert status == 200 assert data['data']['id'] == obj_id n_phot = 10 mjd = 58000.0 + np.arange(n_phot) flux = np.zeros_like(mjd) fluxerr = 1e-6 + np.random.random(n_phot) filters = ['ztfg'] * n_phot ras = ra + np.cos(np.radians(dec)) * np.random.randn(n_phot) / (10 * 3600) decs = dec + np.random.randn(n_phot) / (10 * 3600) dec_unc = np.zeros_like(mjd) med_ra, med_dec = np.median(ras), np.median(decs) # valid request with zero-flux sources and astrometry with zero uncertainty status, data = api( 'POST', 'photometry', data={ 'obj_id': obj_id, 'mjd': list(mjd), 'instrument_id': ztf_camera.id, 'flux': list(flux), 'fluxerr': list(fluxerr), 'filter': list(filters), 'ra': list(ras), 'dec': list(decs), 'magsys': 'ab', 'zp': 25.0, 'dec_unc': list(dec_unc), 'ra_unc': 0.2, 'group_ids': [public_group.id], }, token=upload_data_token, ) assert status == 200 assert data['status'] == 'success' assert len(data['data']['ids']) == n_phot removed_kwargs = ["instrument_name", "groups", "magsys", "zp"] phot_list = [] for photometry_id in data['data']['ids']: status, data = api('GET', f'photometry/{photometry_id}?format=flux', token=upload_data_token) assert status == 200 assert data['status'] == 'success' for key in removed_kwargs: data['data'].pop(key) phot_list.append(Photometry(**data['data'])) ra_calc_snr, dec_calc_snr = _calculate_best_position_for_offset_stars( phot_list, fallback=(ra, dec), how="snr2", max_offset=0.5, sigma_clip=4.0) # make sure we get back a the median position npt.assert_almost_equal(ra_calc_snr, med_ra, decimal=10) npt.assert_almost_equal(dec_calc_snr, med_dec, decimal=10) ra_calc_err, dec_calc_err = _calculate_best_position_for_offset_stars( phot_list, fallback=(ra, dec), how="invvar", max_offset=0.5, sigma_clip=4.0) # make sure we get back a the median position npt.assert_almost_equal(ra_calc_err, med_ra, decimal=10) npt.assert_almost_equal(dec_calc_err, med_dec, decimal=10)
def parse_mag(self, data, **kwargs): """Return a `Photometry` object from a `PhotometryMag` marshmallow schema. Parameters ---------- data : dict The instance of the PhotometryMag schema to convert to Photometry. Returns ------- Photometry The Photometry object generated from the PhotometryMag dict. """ from skyportal.models import Instrument, Obj, PHOT_SYS, PHOT_ZP, Photometry from sncosmo.photdata import PhotometricData # check that mag and magerr are both null or both not null, not a mix ok = any([ all([op(field, None) for field in [data['mag'], data['magerr']]]) for op in [operator.is_, operator.is_not] ]) if not ok: raise ValidationError(f'Error parsing packet "{data}": mag ' f'and magerr must both be null, or both be ' f'not null.') # get the instrument instrument = Instrument.query.get(data['instrument_id']) if not instrument: raise ValidationError( f'Invalid instrument ID: {data["instrument_id"]}') # get the object obj = Obj.query.get( data['obj_id']) # TODO: implement permissions checking if not obj: raise ValidationError(f'Invalid object ID: {data["obj_id"]}') if data["filter"] not in instrument.filters: raise ValidationError( f"Instrument {instrument.name} has no filter " f"{data['filter']}.") # determine if this is a limit or a measurement hasmag = data['mag'] is not None if hasmag: flux = 10**(-0.4 * (data['mag'] - PHOT_ZP)) fluxerr = data['magerr'] / (2.5 / np.log(10)) * flux else: nsigflux = 10**(-0.4 * (data['limiting_mag'] - PHOT_ZP)) flux = None fluxerr = nsigflux / PHOT_DETECTION_THRESHOLD # convert flux to microJanskies. table = Table([{ 'flux': flux, 'fluxerr': fluxerr, 'magsys': data['magsys'], 'zp': PHOT_ZP, 'filter': data['filter'], 'mjd': data['mjd'], }]) if flux is None: # this needs to be non-null for the conversion step # will be replaced later with null table['flux'] = 0.0 # conversion happens here photdata = PhotometricData(table).normalized(zp=PHOT_ZP, zpsys=PHOT_SYS) # replace with null if needed final_flux = None if flux is None else photdata.flux[0] p = Photometry( obj_id=data['obj_id'], mjd=data['mjd'], flux=final_flux, fluxerr=photdata.fluxerr[0], instrument_id=data['instrument_id'], assignment_id=data['assignment_id'], filter=data['filter'], ra=data['ra'], dec=data['dec'], ra_unc=data['ra_unc'], dec_unc=data['dec_unc'], ) if 'alert_id' in data and data['alert_id'] is not None: p.alert_id = data['alert_id'] return p
(basedir / 'static/thumbnails').mkdir(parents=True, exist_ok=True) for source_info in SOURCES: comments = source_info.pop('comments') s = Source(**source_info, groups=[g]) s.comments = [ Comment(text=comment, user=group_admin_user) for comment in comments ] phot_file = os.path.join( os.path.dirname(os.path.dirname(__file__)), 'skyportal', 'tests', 'data', 'phot.csv') phot_data = pd.read_csv(phot_file) s.photometry = [ Photometry(instrument=i1, **row) for j, row in phot_data.iterrows() ] spec_file = os.path.join( os.path.dirname(os.path.dirname(__file__)), 'skyportal', 'tests', 'data', 'spec.csv') spec_data = pd.read_csv(spec_file) s.spectra = [ Spectrum(instrument_id=int(i), observed_at=datetime.datetime(2014, 10, 24), wavelengths=df.wavelength, fluxes=df.flux, errors=None) for i, df in spec_data.groupby('instrument_id') ]
def parse_flux(self, data, **kwargs): """Return a `Photometry` object from a `PhotometryFlux` marshmallow schema. Parameters ---------- data : dict The instance of the PhotometryFlux schema to convert to Photometry. Returns ------- Photometry The Photometry object generated from the PhotometryFlux object. """ from skyportal.models import Instrument, Obj, PHOT_SYS, PHOT_ZP, Photometry from sncosmo.photdata import PhotometricData # get the instrument instrument = Instrument.query.get(data['instrument_id']) if not instrument: raise ValidationError( f'Invalid instrument ID: {data["instrument_id"]}') # get the object obj = Obj.query.get( data['obj_id']) # TODO : implement permissions checking if not obj: raise ValidationError(f'Invalid object ID: {data["obj_id"]}') if data["filter"] not in instrument.filters: raise ValidationError( f"Instrument {instrument.name} has no filter " f"{data['filter']}.") # convert flux to microJanskies. table = Table([data]) if data['flux'] is None: # this needs to be non-null for the conversion step # will be replaced later with null table['flux'] = 0.0 # conversion happens here photdata = PhotometricData(table).normalized(zp=PHOT_ZP, zpsys=PHOT_SYS) # replace with null if needed final_flux = None if data['flux'] is None else photdata.flux[0] p = Photometry( obj_id=data['obj_id'], mjd=data['mjd'], flux=final_flux, fluxerr=photdata.fluxerr[0], instrument_id=data['instrument_id'], assignment_id=data['assignment_id'], filter=data['filter'], ra=data['ra'], dec=data['dec'], ra_unc=data['ra_unc'], dec_unc=data['dec_unc'], ) if 'alert_id' in data and data['alert_id'] is not None: p.alert_id = data['alert_id'] return p
def save_packets(self): for packet in self._open_avro(): print(f"working on {packet['objectId']}") do_process = True if self.only_pure and not self._is_alert_pure(packet): do_process = False if not do_process: print(f"{self.fname}: not pure. Skipping") continue s = Source.query.filter(Source.id == packet["objectId"]).first() if s: print("Found: an existing source with id = " + packet["objectId"]) source_is_varstar = s.varstar in [True] if not self.clobber and s.origin == f"{os.path.basename(self.fname)}": print( f"already added this source with this avro packet {os.path.basename(self.fname)}" ) continue # make a dataframe and save the source/phot dflc = self._make_dataframe(packet) source_info = { 'id': packet["objectId"], 'ra': packet["candidate"]["ra"], 'dec': packet["candidate"]["dec"], 'ra_dis': packet["candidate"]["ra"], 'dec_dis': packet["candidate"]["dec"], 'dist_nearest_source': packet["candidate"].get("distnr"), 'mag_nearest_source': packet["candidate"].get("magnr"), 'e_mag_nearest_source': packet["candidate"].get("sigmagnr"), 'sgmag1': packet["candidate"].get("sgmag1"), 'srmag1': packet["candidate"].get("srmag1"), 'simag1': packet["candidate"].get("simag1"), 'objectidps1': packet["candidate"].get("objectidps1"), 'sgscore1': packet["candidate"].get("sgscore1"), 'distpsnr1': packet["candidate"].get("distpsnr1"), 'score': packet['candidate']['rb'] } if s is None: s = Source(**source_info, origin=f"{os.path.basename(self.fname)}", groups=[self.ztfpack.g]) source_is_varstar = False new_source = True else: print("Found an existing source with id = " + packet["objectId"]) new_source = False # let's see if we have already comments = Comment.query.filter(Comment.source_id == packet["objectId"]) \ .filter(Comment.origin == f"{os.path.basename(self.fname)}") skip = False if self.clobber: if comments.count() > 0: print("removing preexisting comments from this packet") comments.delete() DBSession().commit() else: if comments.count() > 0: skip = True if not skip: print(f"packet id: {packet['objectId']}") if new_source: s.comments = [Comment(text=comment, source_id=packet["objectId"], user=self.ztfpack.group_admin_user, origin=f"{os.path.basename(self.fname)}") for comment in ["Added by ztf_upload_avro", \ f"filename = {os.path.basename(self.fname)}"]] else: comment_list = [Comment(text=comment, source_id=packet["objectId"], user=self.ztfpack.group_admin_user, origin=f"{os.path.basename(self.fname)}") for comment in ["Added by ztf_upload_avro", \ f"filename = {os.path.basename(self.fname)}"]] photdata = [] varstarness = [] ssdistnr = packet["candidate"].get("ssdistnr") is_roid = False if packet["candidate"].get("isdiffpos", 'f') in ["1", "t"]: if not ((ssdistnr is None) or (ssdistnr < 0) or (ssdistnr > 5)): is_roid = True for j, row in dflc.iterrows(): rj = row.to_dict() if ((packet["candidate"].get("sgscore1", 1.0) or 1.0) >= 0.5) and \ ((packet["candidate"].get("distpsnr1", 10) or 10) < 1.0) or \ (rj.get("isdiffpos", 'f') not in ["1", "t"] and \ not pd.isnull(rj.get('magpsf'))): if not is_roid: # make sure it's not a roid varstarness.append(True) else: varstarness.append(False) phot = { "mag": rj.pop('magpsf'), "e_mag": rj.pop("sigmapsf"), "lim_mag": rj.pop('diffmaglim'), "filter": str(rj.pop('fid')), "score": rj.pop("rb"), "candid": rj.pop("candid"), "isdiffpos": rj.pop("isdiffpos") in ["1", "t"], 'dist_nearest_source': rj.pop("distnr"), 'mag_nearest_source': rj.pop("magnr"), 'e_mag_nearest_source': rj.pop("sigmagnr") } t = Time(rj.pop("jd"), format="jd") phot.update({ "observed_at": t.iso, "mjd": t.mjd, "time_format": "iso", "time_scale": "utc" }) # calculate the variable star mag sign = 1.0 if phot["isdiffpos"] else -1.0 mref = phot["mag_nearest_source"] mref_err = phot["e_mag_nearest_source"] mdiff = phot["mag"] mdiff_err = phot["e_mag"] # Three options here: # diff is detected in positive (ref source got brighter) # diff is detected in the negative (ref source got fainter) # diff is undetected in the neg/pos (ref similar source) try: if not pd.isnull(mdiff): total_mag = -2.5 * np.log10(10**(-0.4 * mref) + sign * 10**(-0.4 * mdiff)) tmp_total_mag_errs = (-2.5*np.log10(10**(-0.4*mref) + sign*10**(-0.4*(mdiff + mdiff_err))) \ - total_mag, -2.5*np.log10(10**(-0.4*mref) + sign*10**(-0.4*(mdiff - mdiff_err))) \ - total_mag) # add errors in quadature -- geometric mean of diff err # and ref err total_mag_err = np.sqrt(-1.0 * tmp_total_mag_errs[0] * tmp_total_mag_errs[1] + mref_err**2) else: # undetected source mref = packet["candidate"].get("magnr") mref_err = packet["candidate"].get("sigmagnr") # 5 sigma diff_err = (-2.5 * np.log10(10** (-0.4 * mref) + sign * 10** (-0.4 * phot["lim_mag"])) - mref) / 5 total_mag = mref total_mag_err = np.sqrt(mref_err**2 + diff_err**2) except: #print("Error in varstar calc") #print(mdiff, mref, sign, mdiff_err, packet["candidate"].get("magnr"), packet["candidate"].get("sigmagnr")) total_mag = 99 total_mag_err = 0 phot.update({"var_mag": total_mag, "var_e_mag": total_mag_err}) # just keep all the remaining non-nan values for this epoch altdata = dict() for k in rj: if not pd.isnull(rj[k]): altdata.update({k: rj[k]}) phot.update({"altdata": altdata}) photdata.append(copy.copy(phot)) photometry = Photometry.query.filter(Photometry.source_id == packet["objectId"]) \ .filter(Photometry.origin == f"{os.path.basename(self.fname)}") skip = False if self.clobber: if photometry.count() > 0: print("removing preexisting photometry from this packet") photometry.delete() DBSession().commit() else: if photometry.count() > 0: print( "Existing photometry from this packet. Skipping addition of more." ) skip = True if not skip: if new_source: s.photometry = [ Photometry(instrument=self.ztfpack.i1, source_id=packet["objectId"], origin=f"{os.path.basename(self.fname)}", **row) for j, row in enumerate(photdata) ] else: phot_list = [ Photometry(instrument=self.ztfpack.i1, source_id=packet["objectId"], origin=f"{os.path.basename(self.fname)}", **row) for j, row in enumerate(photdata) ] # s.spectra = [] source_is_varstar = source_is_varstar or any(varstarness) s.varstar = source_is_varstar s.is_roid = is_roid s.transient = self._is_transient(dflc) DBSession().add(s) try: DBSession().commit() except: print("error committing DB") pass for ttype, ztftype in [('new', 'Science'), ('ref', 'Template'), ('sub', 'Difference')]: fname = f'{packet["candid"]}_{ttype}.png' gzname = f'{packet["candid"]}_{ttype}.fits.gz' t = Thumbnail( type=ttype, photometry_id=s.photometry[0].id, file_uri=f'static/thumbnails/{packet["objectId"]}/{fname}', origin=f"{os.path.basename(self.fname)}", public_url= f'/static/thumbnails/{packet["objectId"]}/{fname}') tgz = Thumbnail( type=ttype + "_gz", photometry_id=s.photometry[0].id, file_uri=f'static/thumbnails/{packet["objectId"]}/{gzname}', origin=f"{os.path.basename(self.fname)}", public_url= f'/static/thumbnails/{packet["objectId"]}/{gzname}') DBSession().add(t) stamp = packet['cutout{}'.format(ztftype)]['stampData'] if (not os.path.exists(self.ztfpack.basedir/f'static/thumbnails/{packet["objectId"]}/{fname}') or \ not os.path.exists(self.ztfpack.basedir/f'static/thumbnails/{packet["objectId"]}/{gzname}')) and \ not self.clobber: with gzip.open(io.BytesIO(stamp), 'rb') as f: gz = open(f"/tmp/{gzname}", "wb") gz.write(f.read()) gz.close() f.seek(0) with fits.open(io.BytesIO(f.read())) as hdul: hdul[0].data = np.flip(hdul[0].data, axis=0) ffig = aplpy.FITSFigure(hdul[0]) ffig.show_grayscale( stretch='arcsinh', invert=True) #ztftype != 'Difference') ffig.save(f"/tmp/{fname}") if not os.path.exists( self.ztfpack.basedir / f'static/thumbnails/{packet["objectId"]}'): os.makedirs(self.ztfpack.basedir / f'static/thumbnails/{packet["objectId"]}') shutil.copy( f"/tmp/{fname}", self.ztfpack.basedir / f'static/thumbnails/{packet["objectId"]}/{fname}') shutil.copy( f"/tmp/{gzname}", self.ztfpack.basedir / f'static/thumbnails/{packet["objectId"]}/{gzname}') try: s.add_linked_thumbnails() except: print("Not linking thumbnails...not on the 'net?") # grab the photometry for this source and update relevant quanities # ra, dec update dat = pd.read_sql( DBSession().query(Photometry).filter( Photometry.source_id == packet["objectId"]).filter( Photometry.mag < 30).statement, DBSession().bind) if not s.varstar: infos = [(x["altdata"]["ra"], x["altdata"]["dec"], x["mag"], x["e_mag"], x["score"], x["filter"]) for i, x in dat.iterrows()] else: infos = [ (x["altdata"]["ra"], x["altdata"]["dec"], x["var_mag"], x["var_e_mag"], x["score"], x["filter"]) for i, x in dat.iterrows() ] ndet = len(dat[~pd.isnull(dat["mag"])]) s.detect_photometry_count = ndet s.last_detected = np.max( dat[~pd.isnull(dat["mag"])]["observed_at"]) calc_source_data = dict() new_ra = np.average([x[0] for x in infos], weights=[1. / x[3] for x in infos]) new_dec = np.average([x[1] for x in infos], weights=[1. / x[3] for x in infos]) ra_err = np.std([x[0] for x in infos]) dec_err = np.std([x[1] for x in infos]) calc_source_data.update( {"min_score": np.nanmin([x[4] for x in infos])}) calc_source_data.update( {"max_score": np.nanmax([x[4] for x in infos])}) filts = list(set([x[-1] for x in infos])) for f in filts: ii = [x for x in infos if x[-1] == f] rez = np.average([x[2] for x in ii], weights=[1 / x[3] for x in ii]) if pd.isnull(rez): rez = None md = np.nanmax([x[2] for x in ii]) - np.nanmin([x[2] for x in ii]) max_delta = md if not pd.isnull(md) else None calc_source_data.update( {f: { "max_delta": max_delta, "mag_avg": rez }}) s = Source.query.get(packet["objectId"]) altdata = dict() for k in calc_source_data: if not pd.isnull(calc_source_data[k]): altdata.update({k: calc_source_data[k]}) s.altdata = altdata s.ra = new_ra s.dec = new_dec s.ra_err = ra_err s.dec_err = dec_err c1 = SkyCoord(s.ra_dis * u.deg, s.dec_dis * u.deg, frame='fk5') c2 = SkyCoord(new_ra * u.deg, new_dec * u.deg, frame='fk5') sep = c1.separation(c2) s.offset = sep.arcsecond if not pd.isnull(sep.arcsecond) else 0.0 # TNS tns = self._tns_search(s.ra_dis, s.dec_dis) s.tns_info = tns if tns["Name"]: s.tns_name = tns["Name"] # catalog search result_table = customSimbad.query_region(SkyCoord( f"{s.ra_dis}d {s.dec_dis}d", frame='icrs'), radius='0d0m3s') if result_table: try: s.simbad_class = result_table["OTYPE"][0].decode( "utf-8", "ignore") altdata = dict() rj = result_table.to_pandas().dropna( axis='columns').iloc[0].to_json() s.simbad_info = rj except: pass if s.simbad_class: comments = [ Comment(text=comment, source_id=packet["objectId"], user=self.ztfpack.group_admin_user, ctype="classification", origin=f"{os.path.basename(self.fname)}") for comment in [f"Simbad class = {s.simbad_class}"] ] result_table = customGaia.query_region(SkyCoord(ra=s.ra_dis, dec=s.dec_dis, unit=(u.deg, u.deg), frame='icrs'), width="3s", catalog=["I/345/gaia2"]) if result_table: try: rj = result_table.pop().to_pandas().dropna( axis='columns').iloc[0].to_json() s.gaia_info = rj except: pass DBSession().commit() print("added")