def kn_candidates(objectId, rf_kn_vs_nonkn, rf_snia_vs_nonia, snn_snia_vs_nonia, snn_sn_vs_all, drb, classtar, jdstarthist, ndethist, cdsxmatch, ra, dec, cjdc, cfidc, cmagpsfc, csigmapsfc, cmagnrc, csigmagnrc, cmagzpscic, cisdiffposc) -> pd.Series: """ Pandas UDF of kn_candidates_ for Spark If the environment variable KNWEBHOOK is defined and match a webhook url, the alerts that pass the filter will be sent to the matching Slack channel. Parameters ---------- objectId: Spark DataFrame Column Column containing the alert IDs rf_kn_vs_nonkn, rf_snia_vs_nonia, snn_snia_vs_nonia, snn_sn_vs_all: Spark DataFrame Columns Columns containing the scores for: 'Kilonova', 'Early SN Ia', 'Ia SN vs non-Ia SN', 'SN Ia and Core-Collapse vs non-SN events' drb: Spark DataFrame Column Column containing the Deep-Learning Real Bogus score classtar: Spark DataFrame Column Column containing the sextractor score jdstarthist: Spark DataFrame Column Column containing earliest Julian dates of epoch [days] ndethist: Spark DataFrame Column Column containing the number of prior detections (theshold of 3 sigma) cdsxmatch: Spark DataFrame Column Column containing the cross-match values ra: Spark DataFrame Column Column containing the right Ascension of candidate; J2000 [deg] dec: Spark DataFrame Column Column containing the declination of candidate; J2000 [deg] cjdc, cfidc, cmagpsfc, csigmapsfc, cmagnrc, csigmagnrc, cmagzpscic: Spark DataFrame Columns Columns containing history of fid, magpsf, sigmapsf, magnr, sigmagnr, magzpsci, isdiffpos as arrays Returns ---------- out: pandas.Series of bool Return a Pandas DataFrame with the appropriate flag: false for bad alert, and true for good alert. """ # Extract last (new) measurement from the concatenated column jd = cjdc.apply(lambda x: x[-1]) fid = cfidc.apply(lambda x: x[-1]) f_kn = kn_candidates_(rf_kn_vs_nonkn, rf_snia_vs_nonia, snn_snia_vs_nonia, snn_sn_vs_all, drb, classtar, jd, jdstarthist, ndethist, cdsxmatch) if f_kn.any(): # Galactic latitude transformation b = SkyCoord(np.array(ra[f_kn], dtype=float), np.array(dec[f_kn], dtype=float), unit='deg').galactic.b.deg # Simplify notations ra = Angle(np.array(ra.astype(float)[f_kn]) * u.degree).deg dec = Angle(np.array(dec.astype(float)[f_kn]) * u.degree).deg ra_formatted = Angle(ra * u.degree).to_string(precision=2, sep=' ', unit=u.hour) dec_formatted = Angle(dec * u.degree).to_string(precision=1, sep=' ', alwayssign=True) delta_jd_first = np.array( jd.astype(float)[f_kn] - jdstarthist.astype(float)[f_kn]) rf_kn_vs_nonkn = np.array(rf_kn_vs_nonkn.astype(float)[f_kn]) rf_snia_vs_nonia = np.array(rf_snia_vs_nonia.astype(float)[f_kn]) snn_snia_vs_nonia = np.array(snn_snia_vs_nonia.astype(float)[f_kn]) snn_sn_vs_all = np.array(snn_sn_vs_all.astype(float)[f_kn]) # Redefine jd & fid relative to candidates fid = np.array(fid.astype(int)[f_kn]) jd = np.array(jd)[f_kn] dict_filt = {1: 'g', 2: 'r'} for i, alertID in enumerate(objectId[f_kn]): # Careful - Spark casts None as NaN! maskNotNone = ~np.isnan(np.array(cmagpsfc[f_kn].values[i])) # Time since last detection (independently of the band) jd_hist_allbands = np.array(np.array(cjdc[f_kn])[i])[maskNotNone] delta_jd_last = jd_hist_allbands[-1] - jd_hist_allbands[-2] filt = fid[i] maskFilter = np.array(cfidc[f_kn].values[i]) == filt m = maskNotNone * maskFilter if sum(m) < 2: continue # DC mag (history + last measurement) mag_hist, err_hist = np.array([ dc_mag(k[0], k[1], k[2], k[3], k[4], k[5], k[6]) for k in zip( cfidc[f_kn].values[i][m][-2:], cmagpsfc[f_kn].values[i][m][-2:], csigmapsfc[f_kn].values[i][m][-2:], cmagnrc[f_kn].values[i][m][-2:], csigmagnrc[f_kn].values[i][m][-2:], cmagzpscic[f_kn].values[i][m][-2:], cisdiffposc[f_kn].values[i][m][-2:], ) ]).T # Grab the last measurement and its error estimate mag = mag_hist[-1] err_mag = err_hist[-1] # Compute rate only if more than 1 measurement available if len(mag_hist) > 1: jd_hist = cjdc[f_kn].values[i][m] # rate is between `last` and `last-1` measurements only dmag = mag_hist[-1] - mag_hist[-2] dt = jd_hist[-1] - jd_hist[-2] rate = dmag / dt error_rate = np.sqrt(err_hist[-1]**2 + err_hist[-2]**2) / dt # information to send alert_text = """ *New kilonova candidate:* <http://134.158.75.151:24000/{}|{}> """.format(alertID, alertID) knscore_text = "*Kilonova score:* {:.2f}".format(rf_kn_vs_nonkn[i]) score_text = """ *Other scores:*\n- Early SN Ia: {:.2f}\n- Ia SN vs non-Ia SN: {:.2f}\n- SN Ia and Core-Collapse vs non-SN: {:.2f} """.format(rf_snia_vs_nonia[i], snn_snia_vs_nonia[i], snn_sn_vs_all[i]) time_text = """ *Time:*\n- {} UTC\n - Time since last detection: {:.1f} days\n - Time since first detection: {:.1f} days """.format( Time(jd[i], format='jd').iso, delta_jd_last, delta_jd_first[i]) measurements_text = """ *Measurement (band {}):*\n- Apparent magnitude: {:.2f} ± {:.2f} \n- Rate: ({:.2f} ± {:.2f}) mag/day\n """.format(dict_filt[fid[i]], mag, err_mag, rate, error_rate) radec_text = """ *RA/Dec:*\n- [hours, deg]: {} {}\n- [deg, deg]: {:.7f} {:+.7f} """.format(ra_formatted[i], dec_formatted[i], ra[i], dec[i]) galactic_position_text = """ *Galactic latitude:*\n- [deg]: {:.7f}""".format(b[i]) tns_text = '*TNS:* <https://www.wis-tns.org/search?ra={}&decl={}&radius=5&coords_unit=arcsec|link>'.format( ra[i], dec[i]) # message formatting blocks = [ { "type": "section", "fields": [{ "type": "mrkdwn", "text": alert_text }, { "type": "mrkdwn", "text": knscore_text }] }, { "type": "section", "fields": [ { "type": "mrkdwn", "text": time_text }, { "type": "mrkdwn", "text": score_text }, { "type": "mrkdwn", "text": radec_text }, { "type": "mrkdwn", "text": measurements_text }, { "type": "mrkdwn", "text": galactic_position_text }, { "type": "mrkdwn", "text": tns_text }, ] }, ] error_message = """ {} is not defined as env variable if an alert has passed the filter, the message has not been sent to Slack """ for url_name in ['KNWEBHOOK', 'KNWEBHOOK_FINK']: if (url_name in os.environ): requests.post( os.environ[url_name], json={ 'blocks': blocks, 'username': '******' }, headers={'Content-Type': 'application/json'}, ) else: log = logging.Logger('Kilonova filter') log.warning(error_message.format(url_name)) ama_in_env = ('KNWEBHOOK_AMA_CL' in os.environ) # Send alerts to amateurs only on Friday now = datetime.datetime.utcnow() # Monday is 1 and Sunday is 7 is_friday = (now.isoweekday() == 5) if (np.abs(b[i]) > 20) & (mag < 20) & is_friday & ama_in_env: requests.post( os.environ['KNWEBHOOK_AMA_CL'], json={ 'blocks': blocks, 'username': '******' }, headers={'Content-Type': 'application/json'}, ) else: log = logging.Logger('Kilonova filter') log.warning(error_message.format(url_name)) return f_kn
def early_kn_candidates(objectId, drb, classtar, jd, jdstarthist, ndethist, cdsxmatch, fid, magpsf, sigmapsf, magnr, sigmagnr, magzpsci, isdiffpos, ra, dec, roid, field) -> pd.Series: """ Return alerts considered as KN candidates. If the environment variable KNWEBHOOK is defined and match a webhook url, the alerts that pass the filter will be sent to the matching Slack channel. Note the default `data/mangrove_filtered.csv` catalog is loaded. Parameters ---------- objectId: Spark DataFrame Column Column containing the alert IDs drb: Spark DataFrame Column Column containing the Deep-Learning Real Bogus score classtar: Spark DataFrame Column Column containing the sextractor score jd: Spark DataFrame Column Column containing observation Julian dates at start of exposure [days] jdstarthist: Spark DataFrame Column Column containing earliest Julian dates corresponding to ndethist ndethist: Spark DataFrame Column Column containing the number of prior detections (theshold of 3 sigma) cdsxmatch: Spark DataFrame Column Column containing the cross-match values fid: Spark DataFrame Column Column containing filter, 1 for green and 2 for red magpsf,sigmapsf: Spark DataFrame Columns Columns containing magnitude from PSF-fit photometry, and 1-sigma error magnr,sigmagnr: Spark DataFrame Columns Columns containing magnitude of nearest source in reference image PSF-catalog within 30 arcsec and 1-sigma error magzpsci: Spark DataFrame Column Column containing magnitude zero point for photometry estimates isdiffpos: Spark DataFrame Column Column containing: t or 1 => candidate is from positive (sci minus ref) subtraction; f or 0 => candidate is from negative (ref minus sci) subtraction ra: Spark DataFrame Column Column containing the right Ascension of candidate; J2000 [deg] dec: Spark DataFrame Column Column containing the declination of candidate; J2000 [deg] magpsf: Spark DataFrame Column Column containing the magnitude from PSF-fit photometry [mag] roid: Spark DataFrame Column Column containing the Solar System label field: Spark DataFrame Column Column containing the ZTF field numbers (int) Returns ------- out: pandas.Series of bool Return a Pandas DataFrame with the appropriate flag: false for bad alert, and true for good alert. """ # galactic plane gal = SkyCoord(ra.astype(float), dec.astype(float), unit='deg').galactic out = perform_classification(drb, classtar, jd, jdstarthist, ndethist, cdsxmatch, fid, magpsf, sigmapsf, magnr, sigmagnr, magzpsci, isdiffpos, ra, dec, roid) f_kn, pdf_mangrove, host_galaxies, host_alert_separation, \ abs_mag_candidate, mag, err_mag = out if f_kn.any(): # Simplify notations b = gal.b.degree[f_kn] ra = Angle(np.array(ra.astype(float)[f_kn]) * u.degree).deg dec = Angle(np.array(dec.astype(float)[f_kn]) * u.degree).deg ra_formatted = Angle(ra * u.degree).to_string(precision=2, sep=' ', unit=u.hour) dec_formatted = Angle(dec * u.degree).to_string(precision=1, sep=' ', alwayssign=True) delta_jd_first = np.array( jd.astype(float)[f_kn] - jdstarthist.astype(float)[f_kn]) # Redefine notations relative to candidates fid = np.array(fid)[f_kn] jd = np.array(jd)[f_kn] mag = mag[f_kn] err_mag = err_mag[f_kn] field = field[f_kn] dict_filt = {1: 'g', 2: 'r'} for i, alertID in enumerate(objectId[f_kn]): # information to send alert_text = """ *New kilonova candidate:* <http://134.158.75.151:24000/{}|{}> """.format(alertID, alertID) time_text = """ *Time:*\n- {} UTC\n - Time since first detection: {:.1f} hours """.format(Time(jd[i], format='jd').iso, delta_jd_first[i] * 24) measurements_text = """ *Measurement (band {}):*\n- Apparent magnitude: {:.2f} ± {:.2f} """.format(dict_filt[fid[i]], mag[i], err_mag[i]) host_text = """ *Presumed host galaxy:*\n- HyperLEDA Name: {:s}\n- 2MASS XSC Name: {:s}\n- Luminosity distance: ({:.2f} ± {:.2f}) Mpc\n- RA/Dec: {:.7f} {:+.7f}\n- log10(Stellar mass/Ms): {:.2f} """.format( pdf_mangrove.loc[host_galaxies[i], 'HyperLEDA_name'][2:-1], pdf_mangrove.loc[host_galaxies[i], '2MASS_name'][2:-1], pdf_mangrove.loc[host_galaxies[i], 'lum_dist'], pdf_mangrove.loc[host_galaxies[i], 'dist_err'], pdf_mangrove.loc[host_galaxies[i], 'ra'], pdf_mangrove.loc[host_galaxies[i], 'dec'], pdf_mangrove.loc[host_galaxies[i], 'stellarmass'], ) crossmatch_text = """ *Cross-match: *\n- Alert-host distance: {:.2f} kpc\n- Absolute magnitude: {:.2f} """.format( host_alert_separation[i] * pdf_mangrove.loc[host_galaxies[i], 'ang_dist'] * 1000, abs_mag_candidate[i], ) radec_text = """ *RA/Dec:*\n- [hours, deg]: {} {}\n- [deg, deg]: {:.7f} {:+.7f} """.format(ra_formatted[i], dec_formatted[i], ra[i], dec[i]) galactic_position_text = """ *Galactic latitude:*\n- [deg]: {:.7f}""".format(b[i]) tns_text = '*TNS:* <https://www.wis-tns.org/search?ra={}&decl={}&radius=5&coords_unit=arcsec|link>'.format( ra[i], dec[i]) # message formatting blocks = [ { "type": "section", "fields": [ { "type": "mrkdwn", "text": alert_text }, ] }, { "type": "section", "fields": [ { "type": "mrkdwn", "text": time_text }, { "type": "mrkdwn", "text": host_text }, { "type": "mrkdwn", "text": radec_text }, { "type": "mrkdwn", "text": crossmatch_text }, { "type": "mrkdwn", "text": galactic_position_text }, { "type": "mrkdwn", "text": measurements_text }, { "type": "mrkdwn", "text": tns_text }, ] }, ] # Standard channels error_message = """ {} is not defined as env variable if an alert has passed the filter, the message has not been sent to Slack """ for url_name in ['KNWEBHOOK', 'KNWEBHOOK_FINK']: if (url_name in os.environ): requests.post( os.environ[url_name], json={ 'blocks': blocks, 'username': '******' }, headers={'Content-Type': 'application/json'}, ) else: log = logging.Logger('Kilonova filter') log.warning(error_message.format(url_name)) # Grandma amateur channel ama_in_env = ('KNWEBHOOK_AMA_GALAXIES' in os.environ) # Send alerts to amateurs only on Friday now = datetime.datetime.utcnow() # Monday is 1 and Sunday is 7 is_friday = (now.isoweekday() == 5) if (np.abs(b[i]) > 20) & (mag[i] < 20) & is_friday & ama_in_env: requests.post( os.environ['KNWEBHOOK_AMA_GALAXIES'], json={ 'blocks': blocks, 'username': '******' }, headers={'Content-Type': 'application/json'}, ) else: log = logging.Logger('Kilonova filter') log.warning(error_message.format('KNWEBHOOK_AMA_GALAXIES')) # DWF channel and requirements dwf_ztf_fields = [1525, 530, 482, 1476, 388, 1433] dwf_in_env = ('KNWEBHOOK_DWF' in os.environ) if (int(field.values[i]) in dwf_ztf_fields) and dwf_in_env: requests.post( os.environ['KNWEBHOOK_DWF'], json={ 'blocks': blocks, 'username': '******' }, headers={'Content-Type': 'application/json'}, ) else: log = logging.Logger('Kilonova filter') log.warning(error_message.format('KNWEBHOOK_DWF')) return f_kn
def rate_based_kn_candidates(objectId, rfscore, snn_snia_vs_nonia, snn_sn_vs_all, drb, classtar, jdstarthist, ndethist, cdsxmatch, ra, dec, ssdistnr, cjdc, cfidc, cmagpsfc, csigmapsfc, cmagnrc, csigmagnrc, cmagzpscic, cisdiffposc) -> pd.Series: """ Return alerts considered as KN candidates. The cuts are based on Andreoni et al. 2021 https://arxiv.org/abs/2104.06352 If the environment variable KNWEBHOOK is defined and match a webhook url, the alerts that pass the filter will be sent to the matching Slack channel. Parameters ---------- objectId: Spark DataFrame Column Column containing the alert IDs rfscore, snn_snia_vs_nonia, snn_sn_vs_all: Spark DataFrame Columns Columns containing the scores for: 'Early SN Ia', 'Ia SN vs non-Ia SN', 'SN Ia and Core-Collapse vs non-SN events' drb: Spark DataFrame Column Column containing the Deep-Learning Real Bogus score classtar: Spark DataFrame Column Column containing the sextractor score jdstarthist: Spark DataFrame Column Column containing earliest Julian dates of epoch [days] ndethist: Spark DataFrame Column Column containing the number of prior detections (theshold of 3 sigma) cdsxmatch: Spark DataFrame Column Column containing the cross-match values ra: Spark DataFrame Column Column containing the right Ascension of candidate; J2000 [deg] dec: Spark DataFrame Column Column containing the declination of candidate; J2000 [deg] ssdistnr: Spark DataFrame Column distance to nearest known solar system object; -999.0 if none [arcsec] cjdc, cfidc, cmagpsfc, csigmapsfc, cmagnrc, csigmagnrc, cmagzpscic: Spark DataFrame Columns Columns containing history of fid, magpsf, sigmapsf, magnr, sigmagnr, magzpsci, isdiffpos as arrays Returns ---------- out: pandas.Series of bool Return a Pandas DataFrame with the appropriate flag: false for bad alert, and true for good alert. """ # Extract last (new) measurement from the concatenated column jd = cjdc.apply(lambda x: x[-1]) fid = cfidc.apply(lambda x: x[-1]) isdiffpos = cisdiffposc.apply(lambda x: x[-1]) high_drb = drb.astype(float) > 0.9 high_classtar = classtar.astype(float) > 0.4 new_detection = jd.astype(float) - jdstarthist.astype(float) < 14 small_detection_history = ndethist.astype(float) < 20 appeared = isdiffpos.astype(str) == 't' far_from_mpc = (ssdistnr.astype(float) > 10) | (ssdistnr.astype(float) < 0) # galactic plane b = SkyCoord(ra.astype(float), dec.astype(float), unit='deg').galactic.b.deg awaw_from_galactic_plane = np.abs(b) > 10 list_simbad_galaxies = [ "galaxy", "Galaxy", "EmG", "Seyfert", "Seyfert_1", "Seyfert_2", "BlueCompG", "StarburstG", "LSB_G", "HII_G", "High_z_G", "GinPair", "GinGroup", "BClG", "GinCl", "PartofG", ] keep_cds = \ ["Unknown", "Transient", "Fail"] + list_simbad_galaxies f_kn = high_drb & high_classtar & new_detection & small_detection_history f_kn = f_kn & cdsxmatch.isin(keep_cds) & appeared & far_from_mpc f_kn = f_kn & awaw_from_galactic_plane # Compute rate and error rate, get magnitude and its error rate = np.zeros(len(fid)) sigma_rate = np.zeros(len(fid)) mag = np.zeros(len(fid)) err_mag = np.zeros(len(fid)) index_mask = np.argwhere(f_kn) for i, alertID in enumerate(objectId[f_kn]): # Spark casts None as NaN maskNotNone = ~np.isnan(np.array(cmagpsfc[f_kn].values[i])) maskFilter = np.array(cfidc[f_kn].values[i]) == np.array(fid)[f_kn][i] m = maskNotNone * maskFilter if sum(m) < 2: continue # DC mag (history + last measurement) mag_hist, err_hist = np.array([ dc_mag(k[0], k[1], k[2], k[3], k[4], k[5], k[6]) for k in zip( cfidc[f_kn].values[i][m], cmagpsfc[f_kn].values[i][m], csigmapsfc[f_kn].values[i][m], cmagnrc[f_kn].values[i][m], csigmagnrc[f_kn].values[i][m], cmagzpscic[f_kn].values[i][m], cisdiffposc[f_kn].values[i][m], ) ]).T # remove abnormal values mask_outliers = mag_hist < 21 if sum(mask_outliers) < 2: continue jd_hist = cjdc[f_kn].values[i][m][mask_outliers] if jd_hist[-1] - jd_hist[0] > 0.5: # Compute rate popt, pcov = curve_fit( lambda x, a, b: a * x + b, jd_hist, mag_hist[mask_outliers], sigma=err_hist[mask_outliers], ) rate[index_mask[i]] = popt[0] sigma_rate[index_mask[i]] = pcov[0, 0] # Grab the last measurement and its error estimate mag[index_mask[i]] = mag_hist[-1] err_mag[index_mask[i]] = err_hist[-1] # filter on rate. rate is 0 where f_kn is already false. f_kn = pd.Series(np.array(rate) > 0.3) # check the nature of close objects in SDSS catalog if f_kn.any(): no_star = [] for i in range(sum(f_kn)): pos = SkyCoord(ra=np.array(ra[f_kn])[i] * u.degree, dec=np.array(dec[f_kn])[i] * u.degree) # for a test on "many" objects, you may wait 1s to stay under the # query limit. table = SDSS.query_region(pos, fields=['type'], radius=5 * u.arcsec) type_close_objects = [] if table is not None: type_close_objects = table['type'] # types: 0: UNKNOWN, 1: STAR, 2: GALAXY, 3: QSO, 4: HIZ_QSO, # 5: SKY, 6: STAR_LATE, 7: GAL_EM to_remove_types = [1, 3, 4, 6] no_star.append( len(np.intersect1d(type_close_objects, to_remove_types)) == 0) f_kn.loc[f_kn] = np.array(no_star, dtype=bool) # Simplify notations if f_kn.any(): # coordinates b = np.array(b)[f_kn] ra = Angle(np.array(ra.astype(float)[f_kn]) * u.degree).deg dec = Angle(np.array(dec.astype(float)[f_kn]) * u.degree).deg ra_formatted = Angle(ra * u.degree).to_string(precision=2, sep=' ', unit=u.hour) dec_formatted = Angle(dec * u.degree).to_string(precision=1, sep=' ', alwayssign=True) delta_jd_first = np.array( jd.astype(float)[f_kn] - jdstarthist.astype(float)[f_kn]) # scores rfscore = np.array(rfscore.astype(float)[f_kn]) snn_snia_vs_nonia = np.array(snn_snia_vs_nonia.astype(float)[f_kn]) snn_sn_vs_all = np.array(snn_sn_vs_all.astype(float)[f_kn]) # time fid = np.array(fid.astype(int)[f_kn]) jd = np.array(jd)[f_kn] # measurements mag = mag[f_kn] rate = rate[f_kn] err_mag = err_mag[f_kn] sigma_rate = sigma_rate[f_kn] # message for candidates for i, alertID in enumerate(objectId[f_kn]): # Time since last detection (independently of the band) maskNotNone = ~np.isnan(np.array(cmagpsfc[f_kn].values[i])) jd_hist_allbands = np.array(np.array(cjdc[f_kn])[i])[maskNotNone] delta_jd_last = jd_hist_allbands[-1] - jd_hist_allbands[-2] # information to send dict_filt = {1: 'g', 2: 'r'} alert_text = """ *New kilonova candidate:* <http://134.158.75.151:24000/{}|{}> """.format(alertID, alertID) score_text = """ *Scores:*\n- Early SN Ia: {:.2f}\n- Ia SN vs non-Ia SN: {:.2f}\n- SN Ia and Core-Collapse vs non-SN: {:.2f} """.format(rfscore[i], snn_snia_vs_nonia[i], snn_sn_vs_all[i]) time_text = """ *Time:*\n- {} UTC\n - Time since last detection: {:.1f} days\n - Time since first detection: {:.1f} days """.format( Time(jd[i], format='jd').iso, delta_jd_last, delta_jd_first[i]) measurements_text = """ *Measurement (band {}):*\n- Apparent magnitude: {:.2f} ± {:.2f} \n- Rate: ({:.2f} ± {:.2f}) mag/day\n """.format(dict_filt[fid[i]], mag[i], err_mag[i], rate[i], sigma_rate[i]) radec_text = """ *RA/Dec:*\n- [hours, deg]: {} {}\n- [deg, deg]: {:.7f} {:+.7f} """.format(ra_formatted[i], dec_formatted[i], ra[i], dec[i]) galactic_position_text = """ *Galactic latitude:*\n- [deg]: {:.7f}""".format(b[i]) tns_text = '*TNS:* <https://www.wis-tns.org/search?ra={}&decl={}&radius=5&coords_unit=arcsec|link>'.format( ra[i], dec[i]) # message formatting blocks = [ { "type": "section", "fields": [ { "type": "mrkdwn", "text": alert_text }, ] }, { "type": "section", "fields": [ { "type": "mrkdwn", "text": time_text }, { "type": "mrkdwn", "text": score_text }, { "type": "mrkdwn", "text": radec_text }, { "type": "mrkdwn", "text": measurements_text }, { "type": "mrkdwn", "text": galactic_position_text }, { "type": "mrkdwn", "text": tns_text }, ] }, ] error_message = """ {} is not defined as env variable if an alert has passed the filter, the message has not been sent to Slack """ for url_name in ['KNWEBHOOK', 'KNWEBHOOK_FINK']: if (url_name in os.environ): requests.post( os.environ[url_name], json={ 'blocks': blocks, 'username': '******' }, headers={'Content-Type': 'application/json'}, ) else: log = logging.Logger('Kilonova filter') log.warning(error_message.format(url_name)) ama_in_env = ('KNWEBHOOK_AMA_RATE' in os.environ) # Send alerts to amateurs only on Friday now = datetime.datetime.utcnow() # Monday is 1 and Sunday is 7 is_friday = (now.isoweekday() == 5) if (np.abs(b[i]) > 20) & (mag[i] < 20) & is_friday & ama_in_env: requests.post( os.environ['KNWEBHOOK_AMA_RATE'], json={ 'blocks': blocks, 'username': '******' }, headers={'Content-Type': 'application/json'}, ) else: log = logging.Logger('Kilonova filter') log.warning(error_message.format(url_name)) return f_kn