def filter_by_snr(utc_date, observable_asts, obs_config, obs_filter, exptime, snr_cut, sky_delta_mag=0.0, dbg=False): obs_tech_dict_list = construct_obs_tech_dict() site_code = obs_tech_dict_list[obs_config]['mpc_site_code'] (site_name, site_long, site_lat, site_hgt) = get_sitepos(site_code) moon_ra, moon_dec, moon_diam = moon_ra_dec(utc_date, site_long, site_lat, site_hgt, dbg) moon_sep = radians(30.0) detected_asts = [] num_moon_filtered = 0 for asteroid in observable_asts: # emp_line = (ra, dec, mag, total_motion, alt_deg) emp_line = asteroid.values()[0] ra = emp_line[0] dec = emp_line[1] obj_moon_sep = S.sla_dsep(ra, dec, moon_ra, moon_dec) if obj_moon_sep > moon_sep: mag = emp_line[2] airmass = calculate_airmass(emp_line[4]) if dbg: print("ra, dec, mag, total_motion, alt_deg,airmass", emp_line, airmass) snr = compute_snr(obs_tech_dict_list, obs_config, obs_filter, mag, airmass, exptime, sky_delta_mag, dbg) if snr >= snr_cut: detected_asts.append(asteroid) else: if dbg: print("Too close to the moon (%.1f deg) for %s: %s" % (degrees(obj_moon_sep), asteroid.keys()[0], emp_line)) num_moon_filtered += 1 return detected_asts, num_moon_filtered
def compute_moon_sep(date, object_ra, object_dec, site='500'): '''Compute the separation between an object at <object_ra>, <object_dec> and the Moon at time <date> from the specified [site] (defaults to geocenter if not specified. The separation is returned in degrees.''' site_name, site_long, site_lat, site_hgt = get_sitepos(site) moon_ra, moon_dec, diam = moon_ra_dec(date, site_long, site_lat, site_hgt) moon_obj_sep = sla_dsep(object_ra, object_dec, moon_ra, moon_dec) moon_obj_sep = degrees(moon_obj_sep) return moon_obj_sep
def angular_distance_between(app_ra1, app_dec1, app_ra2, app_dec2): radians_between = sla.sla_dsep(app_ra1.in_radians(), app_dec1.in_radians(), app_ra2.in_radians(), app_dec2.in_radians()) return Angle(radians=radians_between)
def Known_Pulsar_Rating(pfd): """ Calculate the probability that a candidate is a known pulsar or a harmonic of a known pulsar. Parameters ---------- pfd : class An instance of the prepfold.pfd class Returns ------- names : list A list of ratings names ratings : list A list of ratings values """ # The rating name name1 = "Known_Pulsar_Rating" # A fudge factor for error calculations factor = 0.3*pfd.proflen # Get the candidate RA and DEC (in radians) cand_ra = PU.ra_to_rad(pfd.rastr) cand_dec = PU.dec_to_rad(pfd.decstr) # Get the period and folding epoch from bestprof, if it exists if hasattr(pfd.bestprof, "p0"): cand_p = pfd.bestprof.p0 cand_epoch = pfd.bestprof.epochi + pfd.bestprof.epochf # Get the candidate period error from bestprof, if it exists if hasattr(pfd.bestprof, "p0err"): cand_p_err = factor*pfd.bestprof.p0err # Otherwise, try to estimate it else: cand_p_err = factor*cand_p**2/(pfd.proflen*pfd.T) # Otherwise, use the barycentric or topocentric values elif pfd.bary_p1 != 0.0: cand_p = pfd.bary_p1 cand_epoch = pfd.bepoch cand_p_err = factor*cand_p**2/(pfd.proflen*pfd.T) elif pfd.topo_p1 != 0.0: cand_p = pfd.topo_p1 cand_epoch = pfd.tepoch cand_p_err = factor*cand_p**2/(pfd.proflen*pfd.T) # Get the min and max frequencies for this observation f_min = pfd.subfreqs.min() f_max = pfd.subfreqs.max() # Get the candidate's best DM cand_dm = pfd.bestdm # Try to estimate the DM error cand_dm_err = factor*cand_p*f_max**2*f_min**2/ \ (4.15e3*pfd.proflen*(f_max**2 - f_min**2)) # Now loop through the catalog of known pulsars and find any that are # close to the candidate nearby_psrs = [psr for psr in PSRCAT.psrs \ if (hasattr(psr, "ra") and hasattr(psr, "dec") and \ sla_dsep(cand_ra,cand_dec,psr.ra,psr.dec)<1.3*BEAM_FWHM)] # If there were no pulsars nearby, return a rating of 0.0 and exit if len(nearby_psrs) == 0: rating1 = 0.0 return [name1],[rating1] # Otherwise, try to estimate the probability that the candidate is a # known pulsar or its harmonic. This is done by calculting the difference # between the candidates period and DM and those any nearby known pulsars, # normalized by the error in the candidate values (i.e., difference in # "sigmas"). Calculate the probability assuming Gaussian statistics. else: # Start off assuming zero probability that the candidate is known max_prob = 0.0 for psr in nearby_psrs: # If possible, calculate the known pulsar period at the observing # epoch if hasattr(psr,"pepoch"): delta_t = (cand_epoch - psr.pepoch)*86400.0 # seconds if hasattr(psr,"pd") and hasattr(psr,"pdd"): psr_p = psr.p + psr.pd*delta_t + 0.5*psr.pdd*delta_t**2 elif hasattr(psr,"pd"): psr_p = psr.p + psr.pd*delta_t else: psr_p = psr.p p_prob = 0.0 # The probability that the periods are the same # Try all harmonic ratios with up to 16 harmonics for a in xrange(16): a += 1.0 # Since xrange starts with 0 for b in xrange(16): b += 1.0 # Since xrange starts with 0 # Difference between candidate and known pulsar harmonic delta_p = abs(a/b*cand_p - psr_p) # Calculate the equivalent Gaussian probability tmp_prob = \ S.special.erfc(delta_p/((a/b)**2*cand_p_err)/ \ N.sqrt(2)) # If this is the highest probability so far, store it if tmp_prob > p_prob: p_prob = tmp_prob # Update the total probability prob = p_prob # Get the difference in DM delta_dm = abs(cand_dm - psr.dm) # Multiply the total probability by the DM probability prob *= S.special.erfc(delta_dm/cand_dm_err/N.sqrt(2)) # Update max_prob if necessary if prob > max_prob: max_prob = prob # Store the rating rating1 = max_prob return [name1],[rating1]
def Known_Pulsar_Rating(pfd): """ Calculate the probability that a candidate is a known pulsar or a harmonic of a known pulsar. Parameters ---------- pfd : class An instance of the prepfold.pfd class Returns ------- names : list A list of ratings names ratings : list A list of ratings values """ # The rating name name1 = "Known_Pulsar_Rating" # A fudge factor for error calculations factor = 0.3 * pfd.proflen # Get the candidate RA and DEC (in radians) cand_ra = PU.ra_to_rad(pfd.rastr) cand_dec = PU.dec_to_rad(pfd.decstr) # Get the period and folding epoch from bestprof, if it exists if hasattr(pfd.bestprof, "p0"): cand_p = pfd.bestprof.p0 cand_epoch = pfd.bestprof.epochi + pfd.bestprof.epochf # Get the candidate period error from bestprof, if it exists if hasattr(pfd.bestprof, "p0err"): cand_p_err = factor * pfd.bestprof.p0err # Otherwise, try to estimate it else: cand_p_err = factor * cand_p**2 / (pfd.proflen * pfd.T) # Otherwise, use the barycentric or topocentric values elif pfd.bary_p1 != 0.0: cand_p = pfd.bary_p1 cand_epoch = pfd.bepoch cand_p_err = factor * cand_p**2 / (pfd.proflen * pfd.T) elif pfd.topo_p1 != 0.0: cand_p = pfd.topo_p1 cand_epoch = pfd.tepoch cand_p_err = factor * cand_p**2 / (pfd.proflen * pfd.T) # Get the min and max frequencies for this observation f_min = pfd.subfreqs.min() f_max = pfd.subfreqs.max() # Get the candidate's best DM cand_dm = pfd.bestdm # Try to estimate the DM error cand_dm_err = factor*cand_p*f_max**2*f_min**2/ \ (4.15e3*pfd.proflen*(f_max**2 - f_min**2)) # Now loop through the catalog of known pulsars and find any that are # close to the candidate nearby_psrs = [psr for psr in PSRCAT.psrs \ if (hasattr(psr, "ra") and hasattr(psr, "dec") and \ sla_dsep(cand_ra,cand_dec,psr.ra,psr.dec)<1.3*BEAM_FWHM)] # If there were no pulsars nearby, return a rating of 0.0 and exit if len(nearby_psrs) == 0: rating1 = 0.0 return [name1], [rating1] # Otherwise, try to estimate the probability that the candidate is a # known pulsar or its harmonic. This is done by calculting the difference # between the candidates period and DM and those any nearby known pulsars, # normalized by the error in the candidate values (i.e., difference in # "sigmas"). Calculate the probability assuming Gaussian statistics. else: # Start off assuming zero probability that the candidate is known max_prob = 0.0 for psr in nearby_psrs: # If possible, calculate the known pulsar period at the observing # epoch if hasattr(psr, "pepoch"): delta_t = (cand_epoch - psr.pepoch) * 86400.0 # seconds if hasattr(psr, "pd") and hasattr(psr, "pdd"): psr_p = psr.p + psr.pd * delta_t + 0.5 * psr.pdd * delta_t**2 elif hasattr(psr, "pd"): psr_p = psr.p + psr.pd * delta_t else: psr_p = psr.p p_prob = 0.0 # The probability that the periods are the same # Try all harmonic ratios with up to 16 harmonics for a in xrange(16): a += 1.0 # Since xrange starts with 0 for b in xrange(16): b += 1.0 # Since xrange starts with 0 # Difference between candidate and known pulsar harmonic delta_p = abs(a / b * cand_p - psr_p) # Calculate the equivalent Gaussian probability tmp_prob = \ S.special.erfc(delta_p/((a/b)**2*cand_p_err)/ \ N.sqrt(2)) # If this is the highest probability so far, store it if tmp_prob > p_prob: p_prob = tmp_prob # Update the total probability prob = p_prob # Get the difference in DM delta_dm = abs(cand_dm - psr.dm) # Multiply the total probability by the DM probability prob *= S.special.erfc(delta_dm / cand_dm_err / N.sqrt(2)) # Update max_prob if necessary if prob > max_prob: max_prob = prob # Store the rating rating1 = max_prob return [name1], [rating1]
def handle(self, *args, **options): # Suppress incorrect FITSFixedWarnings warnings.simplefilter('ignore', FITSFixedWarning) self.stdout.write("==== Light curve building %s ====" % (datetime.now().strftime('%Y-%m-%d %H:%M'))) try: start_super_block = SuperBlock.objects.get(tracking_number=options['supblock']) except SuperBlock.DoesNotExist: self.stdout.write("Cannot find SuperBlock with Tracking Number %d" % options['supblock']) exit(-1) start_blocks = Block.objects.filter(superblock=start_super_block.id) start_block = start_blocks[0] if options['single'] is True: super_blocks = [start_super_block, ] else: super_blocks = SuperBlock.objects.filter(body=start_super_block.body, block_start__gte=start_super_block.block_start-timedelta(days=options['timespan'])) obs_date = None if options['date']: if isinstance(options['date'], str): try: obs_date = datetime.strptime(options['date'], '%Y%m%d') except ValueError: raise CommandError(usage) else: obs_date = options['date'] # Initialize lists times = [] alltimes = [] mags = [] mag_errs = [] zps = [] zp_errs = [] mpc_lines = [] psv_lines = [] total_frame_count = 0 mpc_site = [] fwhm = [] air_mass = [] output_file_list = [] # build directory path / set permissions obj_name = sanitize_object_name(start_super_block.body.current_name()) datadir = os.path.join(options['datadir'], obj_name) out_path = settings.DATA_ROOT data_path = '' rw_permissions = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH if not os.path.exists(datadir) and not settings.USE_S3: try: os.makedirs(datadir) # Set directory permissions correctly for shared directories # Sets to (r)ead,(w)rite,e(x)ecute for owner & group, r-x for others os.chmod(datadir, stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IXOTH) except: msg = "Error creating output path %s" % datadir raise CommandError(msg) sb_day = start_super_block.block_start.strftime("%Y%m%d") # Turn telescope class into a diameter for theoretical FWHM curve tel_classes = start_super_block.get_telclass() if len(tel_classes.split(",")) > 1: self.stdout.write("Multiple telescope sizes found; theoretical FWHM curve will be wrong") tel_class = tel_classes.split(",")[0] else: tel_class = tel_classes try: tel_diameter = float(tel_class.replace('m', '.')) tel_diameter *= u.m except ValueError: self.stdout.write("Error determining telescope diameter, assuming 0.4m") tel_diameter = 0.4*u.m # Set offsets, convert from Arcsec to Radians ra_offset = radians(options['ra_offset'] / 3600) dec_offset = radians(options['dec_offset'] / 3600) for super_block in super_blocks: # Create, name, open ALCDEF file. if obs_date: alcdef_date = options['date'] else: alcdef_date = super_block.block_start.strftime("%Y%m%d") base_name = '{}_{}_{}_{}_'.format(obj_name, super_block.get_sites().replace(',', ''), alcdef_date, super_block.tracking_number) alcdef_filename = base_name + 'ALCDEF.txt' output_file_list.append('{},{}'.format(alcdef_filename, datadir.lstrip(out_path))) alcdef_txt = '' block_list = Block.objects.filter(superblock=super_block.id) if obs_date: block_list = block_list.filter(when_observed__lt=obs_date+timedelta(days=2)).filter(when_observed__gt=obs_date) self.stdout.write("Analyzing SuperblockBlock# %s for %s" % (super_block.tracking_number, super_block.body.current_name())) for block in block_list: block_mags = [] block_mag_errs = [] block_times = [] outmag = "NONE" self.stdout.write("Analyzing Block# %d" % block.id) obs_site = block.site # Get all Useful frames from each block frames_red = Frame.objects.filter(block=block.id, frametype__in=[Frame.BANZAI_RED_FRAMETYPE]).order_by('filter', 'midpoint') frames_ql = Frame.objects.filter(block=block.id, frametype__in=[Frame.BANZAI_QL_FRAMETYPE]).order_by('filter', 'midpoint') if len(frames_red) >= len(frames_ql): frames_all_zp = frames_red else: frames_all_zp = frames_ql frames = frames_all_zp.filter(zeropoint__isnull=False) self.stdout.write("Found %d frames (of %d total) for Block# %d with good ZPs" % (frames.count(), frames_all_zp.count(), block.id)) self.stdout.write("Searching within %.1f arcseconds and +/-%.2f delta magnitudes" % (options['boxwidth'], options['deltamag'])) total_frame_count += frames.count() frame_data = [] if frames_all_zp.count() != 0: elements = model_to_dict(block.body) filter_list = [] for frame in frames_all_zp: # get predicted position and magnitude of target during time of each frame emp_line = compute_ephem(frame.midpoint, elements, frame.sitecode) ra = S.sla_dranrm(emp_line['ra'] + ra_offset) dec = copysign(S.sla_drange(emp_line['dec'] + dec_offset), emp_line['dec'] + dec_offset) mag_estimate = emp_line['mag'] (ra_string, dec_string) = radec2strings(ra, dec, ' ') # Find list of frame sources within search region of predicted coordinates sources = search_box(frame, ra, dec, options['boxwidth']) midpoint_string = frame.midpoint.strftime('%Y-%m-%d %H:%M:%S') self.stdout.write("%s %s %s V=%.1f %s (%d) %s" % (midpoint_string, ra_string, dec_string, mag_estimate, frame.sitecode, len(sources), frame.filename)) best_source = None # Find source most likely to be target (Could Use Some Work) if len(sources) != 0 and frame.zeropoint is not None: if len(sources) == 1: best_source = sources[0] elif len(sources) > 1: # If more than 1 source, pick closest within deltamag min_sep = options['boxwidth'] * options['boxwidth'] for source in sources: sep = S.sla_dsep(ra, dec, radians(source.obs_ra), radians(source.obs_dec)) sep = degrees(sep) * 3600.0 src_ra_string, src_dec_string = radec2strings(radians(source.obs_ra), radians(source.obs_dec)) if len(block_mags) > 0: delta_mag = abs(block_mags[-1] - source.obs_mag) else: delta_mag = abs(mag_estimate - source.obs_mag) self.stdout.write("%s %s %s %s %.1f %.1f-%.1f %.1f" % ( ra_string, dec_string, src_ra_string, src_dec_string, sep, mag_estimate, source.obs_mag, delta_mag)) if sep < min_sep and delta_mag <= options['deltamag']: min_sep = sep best_source = source # Save target source and add to output files. if best_source and best_source.obs_mag > 0.0 and abs(mag_estimate - best_source.obs_mag) <= 3 * options['deltamag']: block_times.append(frame.midpoint) mpc_line, psv_line = self.make_source_measurement(block.body, frame, best_source, persist=options['persist']) mpc_lines.append(mpc_line) psv_lines.append(psv_line) block_mags.append(best_source.obs_mag) block_mag_errs.append(best_source.err_obs_mag) filter_list.append(frame.ALCDEF_filter_format()) # We append these even if we don't have a matching source or zeropoint # so we can plot conditions for all frames zps.append(frame.zeropoint) zp_errs.append(frame.zeropoint_err) frame_data.append({'ra': ra, 'dec': dec, 'mag': mag_estimate, 'bw': options['boxwidth'], 'dm': options['deltamag'], 'best_source': best_source}) alltimes.append(frame.midpoint) fwhm.append(frame.fwhm) azimuth, altitude = moon_alt_az(frame.midpoint, ra, dec, *get_sitepos(frame.sitecode)[1:]) zenith_distance = radians(90) - altitude air_mass.append(S.sla_airmas(zenith_distance)) obs_site = frame.sitecode catalog = frame.photometric_catalog if catalog == 'GAIA-DR2': outmag = 'GG' elif catalog == 'UCAC4': outmag = 'SR' if obs_site not in mpc_site: mpc_site.append(obs_site) if len(block_times) > 1: filter_set = list(set(filter_list)) for filt in filter_set: mag_set = [m for m, f in zip(block_mags, filter_list) if f == filt] time_set = [t for t, f in zip(block_times, filter_list) if f == filt] error_set = [e for e, f in zip(block_mag_errs, filter_list) if f == filt] alcdef_txt += self.output_alcdef(block, obs_site, time_set, mag_set, error_set, filt, outmag) mags += block_mags mag_errs += block_mag_errs times += block_times # Create gif of fits files used for LC extraction data_path = make_data_dir(out_path, model_to_dict(frames_all_zp[0])) frames_list = [os.path.join(data_path, f.filename) for f in frames_all_zp] if not options['nogif']: movie_file = make_gif(frames_list, sort=False, init_fr=100, center=3, out_path=data_path, plot_source=True, target_data=frame_data, show_reticle=True, progress=True) if "WARNING" not in movie_file: # Add write permissions to movie file try: os.chmod(movie_file, rw_permissions) except PermissionError: pass # Create DataProduct save_dataproduct(obj=block, filepath=movie_file, filetype=DataProduct.FRAME_GIF, force=options['overwrite']) output_file_list.append('{},{}'.format(movie_file, data_path.lstrip(out_path))) self.stdout.write("New gif created: {}".format(movie_file)) else: self.stdout.write(movie_file) save_dataproduct(obj=super_block, filepath=None, filetype=DataProduct.ALCDEF_TXT, filename=alcdef_filename, content=alcdef_txt, force=options['overwrite']) self.stdout.write("Found matches in %d of %d frames" % (len(times), total_frame_count)) if not settings.USE_S3: # Write light curve data out in similar format to Make_lc.csh i = 0 lightcurve_file = open(os.path.join(datadir, base_name + 'lightcurve_data.txt'), 'w') mpc_file = open(os.path.join(datadir, base_name + 'mpc_positions.txt'), 'w') psv_file = open(os.path.join(datadir, base_name + 'ades_positions.psv'), 'w') output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'lightcurve_data.txt'), datadir.lstrip(out_path))) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'mpc_positions.txt'), datadir.lstrip(out_path))) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'ades_positions.psv'), datadir.lstrip(out_path))) # Calculate integer part of JD for first frame and use this as a # constant in case of wrapover to the next day if len(times) > 0 and len(mags) > 0: mjd_offset = int(datetime2mjd_utc(times[0])) for time in times: time_jd = datetime2mjd_utc(time) time_jd_truncated = time_jd - mjd_offset if i == 0: lightcurve_file.write('#Object: %s\n' % start_super_block.body.current_name()) lightcurve_file.write("#MJD-%.1f Mag. Mag. error\n" % mjd_offset) lightcurve_file.write("%7.5lf %6.3lf %5.3lf\n" % (time_jd_truncated, mags[i], mag_errs[i])) i += 1 lightcurve_file.close() try: os.chmod(os.path.join(datadir, base_name + 'lightcurve_data.txt'), rw_permissions) except PermissionError: pass # Write out MPC1992 80 column file for mpc_line in mpc_lines: mpc_file.write(mpc_line + '\n') mpc_file.close() try: os.chmod(os.path.join(datadir, base_name + 'mpc_positions.txt'), rw_permissions) except PermissionError: pass # Write out ADES Pipe Separated Value file for psv_line in psv_lines: psv_file.write(psv_line + '\n') psv_file.close() try: os.chmod(os.path.join(datadir, base_name + 'ades_positions.psv'), rw_permissions) except PermissionError: pass # Create Default Plot Title if options['title'] is None: sites = ', '.join(mpc_site) try: # for single dates and short site lists, put everything on single line. if options['timespan'] < 1 and len(sites) <= 13: plot_title = '%s from %s (%s) on %s' % (start_super_block.body.current_name(), start_block.site.upper(), sites, start_super_block.block_end.strftime("%Y-%m-%d")) subtitle = '' # for lc covering multiple nights, reformat title elif options['timespan'] < 1: plot_title = '%s from %s to %s' % (start_block.body.current_name(), (start_super_block.block_end - timedelta( days=options['timespan'])).strftime("%Y-%m-%d"), start_super_block.block_end.strftime("%Y-%m-%d")) subtitle = 'Sites: ' + sites # for single night LC using many sites, put sites on 2nd line. else: plot_title = '%s from %s on %s' % (start_super_block.body.current_name(), start_block.site.upper(), start_super_block.block_end.strftime("%Y-%m-%d")) subtitle = 'Sites: ' + sites except TypeError: plot_title = 'LC for %s' % (start_super_block.body.current_name()) subtitle = '' else: plot_title = options['title'] subtitle = '' # Make plots if not settings.USE_S3: self.plot_timeseries(times, alltimes, mags, mag_errs, zps, zp_errs, fwhm, air_mass, title=plot_title, sub_title=subtitle, datadir=datadir, filename=base_name, diameter=tel_diameter) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'lightcurve_cond.png'), datadir.lstrip(out_path))) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'lightcurve.png'), datadir.lstrip(out_path))) try: os.chmod(os.path.join(datadir, base_name + 'lightcurve_cond.png'), rw_permissions) except PermissionError: pass try: os.chmod(os.path.join(datadir, base_name + 'lightcurve.png'), rw_permissions) except PermissionError: pass else: self.stdout.write("No sources matched.") if data_path: with open(os.path.join(data_path, base_name + 'lc_file_list.txt'), 'w') as outfut_file_file: outfut_file_file.write('# == Files created by Lightcurve Extraction for {} on {} ==\n'.format(obj_name, sb_day)) for output_file in output_file_list: outfut_file_file.write(output_file) outfut_file_file.write('\n') self.stdout.write(f"New lc file list created: {os.path.join(data_path, base_name + 'lc_file_list.txt')}") try: os.chmod(os.path.join(data_path, base_name + 'lc_file_list.txt'), rw_permissions) except PermissionError: pass