def getPairedStarsSkyPositions(img_x, img_y, jd, platepar): """ Compute RA, Dec of all paired stars on the image given the platepar. Arguments: img_x: [ndarray] Array of column values of the stars. img_y: [ndarray] Array of row values of the stars. jd: [float] Julian date for transformation. platepar: [Platepar instance] Platepar object. Return: (ra_array, dec_array): [tuple of ndarrays] Arrays of RA and Dec of stars on the image. """ # Compute RA, Dec of image stars img_time = jd2Date(jd) _, ra_array, dec_array, _ = xyToRaDecPP(len(img_x)*[img_time], img_x, img_y, len(img_x)*[1], platepar, extinction_correction=False) return ra_array, dec_array
def xyHt2Geo(platepar, x, y, area_ht, indicate_limit=False, elev_limit=5): """ Given pixel coordiantes on the image and a height above sea level, compute geo coordiantes of the point. The elevation is limited to 5 deg above horizon. Arguments: platepar: [Platepar object] x: [float] Image X coordinate. y: [float] Image Y coordiante. area_ht: [float] Height above sea level (meters). Keyword arguments: indicate_limit: [bool] Indicate that the elevation was below the limit of 5 deg by setting the height to -1. False by default. elev_limit: [float] Limit of elevation above horizon (deg). 5 degrees by default. Return: (r, lat, lon, ht): [tuple of floats] range in meters, latitude and longitude in degrees, \ WGS84 height in meters """ # Compute RA/Dec in J2000 of the image point, at J2000 epoch time so we don't have to precess _, ra, dec, _ = xyToRaDecPP([jd2Date(J2000_JD.days)], [x], [y], [1], platepar, \ extinction_correction=False) # Compute alt/az of the point azim, elev = raDec2AltAz(ra[0], dec[0], J2000_JD.days, platepar.lat, platepar.lon) # Limit the elevation to elev_limit degrees above the horizon limit_hit = False if elev < elev_limit: elev = elev_limit limit_hit = True # Compute the geo location of the point along the line of sight p_r, p_lat, p_lon, p_ht = AEH2LatLonAlt(azim, elev, area_ht, platepar.lat, platepar.lon, \ platepar.elev) # If the elevation limit was hit, and the indicate flag is True, set the elevation to -1 if indicate_limit and limit_hit: p_ht = -1 return p_r, p_lat, p_lon, p_ht
def collectingArea(platepar, mask=None, side_points=20, ht_min=60, ht_max=130, dht=2, elev_limit=10): """ Compute the collecting area for the range of given heights. Arguments: platepar: [Platepar object] Keyword arguments: mask: [Mask object] Mask object, None by default. side_points: [int] How many points to use to evaluate the FOV on seach side of the image. Normalized to the longest side. ht_min: [float] Minimum height (km). ht_max: [float] Maximum height (km). dht: [float] Height delta (km). elev_limit: [float] Limit of elevation above horizon (deg). 10 degrees by default. Return: col_areas_ht: [dict] A dictionary where the keys are heights of area evaluation, and values are segment dictionaries. Segment dictionaries have keys which are tuples of (x, y) coordinates of segment midpoints, and values are segment collection areas corrected for sensor effects. """ # If the mask is not given, make a dummy mask with all white pixels if mask is None: mask = MaskStructure(255 + np.zeros((platepar.Y_res, platepar.X_res), dtype=np.uint8)) # Compute the number of samples for every image axis longer_side_points = side_points shorter_side_points = int(np.ceil(side_points*platepar.Y_res/platepar.X_res)) # Compute pixel delta for every side longer_dpx = int(platepar.X_res//longer_side_points) shorter_dpx = int(platepar.Y_res//shorter_side_points) # Distionary of collection areas per height col_areas_ht = collections.OrderedDict() # Estimate the collection area for a given range of heights for ht in np.arange(ht_min, ht_max + dht, dht): # Convert the height to meters ht = 1000*ht print(ht/1000, "km") total_area = 0 # Dictionary of computed sensor-corrected collection areas where X and Y are keys col_areas_xy = collections.OrderedDict() # Sample the image for x0 in np.linspace(0, platepar.X_res, longer_side_points, dtype=np.int, endpoint=False): for y0 in np.linspace(0, platepar.Y_res, shorter_side_points, dtype=np.int, endpoint=False): # Compute lower right corners of the segment xe = x0 + longer_dpx ye = y0 + shorter_dpx # Compute geo coordinates of the image corners (if the corner is below the elevation limit, # the *_elev value will be -1) _, ul_lat, ul_lon, ul_ht = xyHt2Geo(platepar, x0, y0, ht, indicate_limit=True, \ elev_limit=elev_limit) _, ll_lat, ll_lon, ll_ht = xyHt2Geo(platepar, x0, ye, ht, indicate_limit=True, \ elev_limit=elev_limit) _, lr_lat, lr_lon, lr_ht = xyHt2Geo(platepar, xe, ye, ht, indicate_limit=True, \ elev_limit=elev_limit) _, ur_lat, ur_lon, ur_ht = xyHt2Geo(platepar, xe, y0, ht, indicate_limit=True, \ elev_limit=elev_limit) # Skip the block if all corners are hitting the lower apparent elevation limit if np.all([ul_ht < 0, ll_ht < 0, lr_ht < 0, ur_ht < 0]): continue # Make a polygon (clockwise direction) lats = [ul_lat, ll_lat, lr_lat, ur_lat] lons = [ul_lon, ll_lon, lr_lon, ur_lon] # Compute the area of the polygon area = areaGeoPolygon(lats, lons, ht) ### Apply sensitivity corrections to the area ### # Compute ratio of masked portion of the segment mask_segment = mask.img[y0:ye, x0:xe] unmasked_ratio = 1 - np.count_nonzero(~mask_segment)/mask_segment.size ## Compute the pointing direction and the vignetting and extinction loss for the mean location x_mean = (x0 + xe)/2 y_mean = (y0 + ye)/2 # Use a test pixel sum test_px_sum = 400 # Compute the pointing direction and magnitude corrected for vignetting and extinction _, ra, dec, mag = xyToRaDecPP([jd2Date(J2000_JD.days)], [x_mean], [y_mean], [test_px_sum], \ platepar) azim, elev = raDec2AltAz(ra[0], dec[0], J2000_JD.days, platepar.lat, platepar.lon) # Compute the pixel sum back assuming no corrections rev_level = 10**((mag[0] - platepar.mag_lev)/(-2.5)) # Compute the sensitivty loss due to vignetting and extinction sensitivity_ratio = test_px_sum/rev_level # print(np.abs(np.hypot(x_mean - platepar.X_res/2, y_mean - platepar.Y_res/2)), sensitivity_ratio, mag[0]) ## # Compute the range correction (w.r.t 100 km) to the mean point r, _, _, _ = xyHt2Geo(platepar, x_mean, y_mean, ht, indicate_limit=True, \ elev_limit=elev_limit) # Correct the area for the masked portion area *= unmasked_ratio ### ### # Store the raw masked segment collection area, sensivitiy, and the range col_areas_xy[(x_mean, y_mean)] = [area, azim, elev, sensitivity_ratio, r] total_area += area # Store segments to the height dictionary (save a copy so it doesn't get overwritten) col_areas_ht[float(ht)] = dict(col_areas_xy) print("SUM:", total_area/1e6, "km^2") # Compare to total area computed from the whole area side_points_list = fovArea(platepar, mask=mask, area_ht=ht, side_points=side_points, \ elev_limit=elev_limit) lats = [] lons = [] for side in side_points_list: for entry in side: lats.append(entry[0]) lons.append(entry[1]) print("DIR:", areaGeoPolygon(lats, lons, ht)/1e6) return col_areas_ht
print(pp) # Try forward and reverse mapping, and compare results for i in range(5): # Randomly generate a pick inside the image x_img = np.random.uniform(0, pp.X_res) y_img = np.random.uniform(0, pp.Y_res) # Take current time time_data = [2020, 5, 30, 1, 20, 34, 567] # Map to RA/Dec jd_data, ra_data, dec_data, _ = xyToRaDecPP([time_data], [x_img], \ [y_img], [1], pp, extinction_correction=False) # Map back to X, Y x_data, y_data = raDecToXYPP(ra_data, dec_data, jd_data[0], pp) # Map forward to sky again _, ra_data_rev, dec_data_rev, _ = xyToRaDecPP([time_data], x_data, \ y_data, [1], pp, extinction_correction=False) print() print("-----------------------") print("Init image coordinates:") print("X = {:.3f}".format(x_img)) print("Y = {:.3f}".format(y_img)) print("Sky coordinates:")
def updateRaDecGrid(grid, platepar): """ Updates the values of grid to form a right ascension and declination grid Arguments: grid: [pg.PlotCurveItem] platepar: [Platepar object] """ # Estimate RA,dec of the centre of the FOV _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(platepar.JD)], [platepar.X_res / 2], [platepar.Y_res / 2], [1], platepar, extinction_correction=False) # azim_centre, alt_centre = trueRaDec2ApparentAltAz(RA_c, dec_c, platepar.JD, platepar.lat, platepar.lon) # Compute FOV size fov_radius = np.hypot(*computeFOVSize(platepar)) # Determine gridline frequency (double the gridlines if the number is < 4eN) grid_freq = 10**np.floor(np.log10(fov_radius)) if 10**(np.log10(fov_radius) - np.floor(np.log10(fov_radius))) < 4: grid_freq /= 2 # Set a maximum grid frequency of 15 deg if grid_freq > 15: grid_freq = 15 # Grid plot density plot_dens = grid_freq / 100 ra_grid_arr = np.arange(0, 360, grid_freq) dec_grid_arr = np.arange(-90, 90, grid_freq) x = [] y = [] cuts = [] # Plot the celestial parallel grid (circles) for dec_grid in dec_grid_arr: ra_grid_plot = np.arange(0, 360, plot_dens) dec_grid_plot = np.zeros_like(ra_grid_plot) + dec_grid # Compute alt/az az_grid_plot, alt_grid_plot = trueRaDec2ApparentAltAz( ra_grid_plot, dec_grid_plot, platepar.JD, platepar.lat, platepar.lon, platepar.refraction) # Filter out points below the horizon and outside the FOV filter_arr = (alt_grid_plot > 0) # & (angularSeparation(alt_centre, # azim_centre, # alt_grid_plot, # az_grid_plot) < fov_radius) ra_grid_plot = ra_grid_plot[filter_arr] dec_grid_plot = dec_grid_plot[filter_arr] # Compute image coordinates for every grid celestial parallel x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, platepar.JD, platepar) filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & ( y_grid >= 0) & (y_grid <= platepar.Y_res) x_grid = x_grid[filter_arr] y_grid = y_grid[filter_arr] x.extend(x_grid) y.extend(y_grid) cuts.append(len(x) - 1) # Plot the celestial meridian grid (outward lines) for ra_grid in ra_grid_arr: dec_grid_plot = np.arange(-90, 90, plot_dens) # how close to horizon ra_grid_plot = np.zeros_like(dec_grid_plot) + ra_grid # Compute alt/az az_grid_plot, alt_grid_plot = trueRaDec2ApparentAltAz( ra_grid_plot, dec_grid_plot, platepar.JD, platepar.lat, platepar.lon, platepar.refraction) # Filter out points below the horizon filter_arr = (alt_grid_plot > 0) #& (angularSeparation(alt_centre, # azim_centre, # alt_grid_plot, # az_grid_plot) < fov_radius) ra_grid_plot = ra_grid_plot[filter_arr] dec_grid_plot = dec_grid_plot[filter_arr] # Compute image coordinates for every grid celestial parallel x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, platepar.JD, platepar) filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & ( y_grid >= 0) & (y_grid <= platepar.Y_res) x_grid = x_grid[filter_arr] y_grid = y_grid[filter_arr] x.extend(x_grid) y.extend(y_grid) cuts.append(len(x) - 1) # horizon az_horiz_arr = np.arange(0, 360, plot_dens) alt_horiz_arr = np.zeros_like(az_horiz_arr) ra_horiz_plot, dec_horiz_plot = apparentAltAz2TrueRADec( az_horiz_arr, alt_horiz_arr, platepar.JD, platepar.lat, platepar.lon, platepar.refraction) x_horiz, y_horiz = raDecToXYPP(ra_horiz_plot, dec_horiz_plot, platepar.JD, platepar) filter_arr = (x_horiz >= 0) & (x_horiz <= platepar.X_res) & ( y_horiz >= 0) & (y_horiz <= platepar.Y_res) x_horiz = x_horiz[filter_arr] y_horiz = y_horiz[filter_arr] x.extend(x_horiz) y.extend(y_horiz) cuts.append(len(x) - 1) r = 15 # adjust this parameter if you see extraneous lines # disconnect lines that are distant (unfinished circles had straight lines completing them) for i in range(len(x) - 1): if (x[i] - x[i + 1])**2 + (y[i] - y[i + 1])**2 > r**2: cuts.append(i) # convert cuts into connect connect = np.full(len(x), 1) if len(connect) > 0: for i in cuts: connect[i] = 0 grid.setData(x=x, y=y, connect=connect)
def addEquatorialGrid(plt_handle, platepar, jd): """ Given the plot handle containing the image, the function plots an equatorial grid. Arguments: plt_handle: [pyplot instance] platepar: [Platepar object] jd: [float] Julian date of the image. Return: plt_handle: [pyplot instance] Pyplot instance with the added grid. """ # Estimate RA,dec of the centre of the FOV _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(jd)], [platepar.X_res / 2], [platepar.Y_res / 2], [1], platepar, extinction_correction=False) RA_c = RA_c[0] dec_c = dec_c[0] # Compute FOV centre alt/az azim_centre, alt_centre = raDec2AltAz(RA_c, dec_c, jd, platepar.lat, platepar.lon) # Compute FOV size fov_h, fov_v = computeFOVSize(platepar) fov_radius = np.hypot(*computeFOVSize(platepar)) # Determine gridline frequency (double the gridlines if the number is < 4eN) grid_freq = 10**np.floor(np.log10(fov_radius)) if 10**(np.log10(fov_radius) - np.floor(np.log10(fov_radius))) < 4: grid_freq /= 2 # Set a maximum grid frequency of 15 deg if grid_freq > 15: grid_freq = 15 # Grid plot density plot_dens = grid_freq / 100 # Compute the range of declinations to consider dec_min = platepar.dec_d - fov_radius / 2 if dec_min < -90: dec_min = -90 dec_max = platepar.dec_d + fov_radius / 2 if dec_max > 90: dec_max = 90 ra_grid_arr = np.arange(0, 360, grid_freq) dec_grid_arr = np.arange(-90, 90, grid_freq) # Filter out the dec grid for min/max declination dec_grid_arr = dec_grid_arr[(dec_grid_arr >= dec_min) & (dec_grid_arr <= dec_max)] # Plot the celestial parallel grid for dec_grid in dec_grid_arr: ra_grid_plot = np.arange(0, 360, plot_dens) dec_grid_plot = np.zeros_like(ra_grid_plot) + dec_grid # Compute alt/az az_grid_plot, alt_grid_plot = raDec2AltAz_vect(ra_grid_plot, dec_grid_plot, jd, platepar.lat, \ platepar.lon) # Filter out points below the horizon and outside the FOV filter_arr = (alt_grid_plot > 0) & (np.degrees(angularSeparation(np.radians(alt_centre), \ np.radians(azim_centre), np.radians(alt_grid_plot), np.radians(az_grid_plot))) < fov_radius) ra_grid_plot = ra_grid_plot[filter_arr] dec_grid_plot = dec_grid_plot[filter_arr] # Find gaps in continuity and break up plotting individual lines gap_indices = np.argwhere( np.abs(ra_grid_plot[1:] - ra_grid_plot[:-1]) > fov_radius) if len(gap_indices): ra_grid_plot_list = [] dec_grid_plot_list = [] # Separate gridlines with large gaps prev_gap_indx = 0 for entry in gap_indices: gap_indx = entry[0] ra_grid_plot_list.append(ra_grid_plot[prev_gap_indx:gap_indx + 1]) dec_grid_plot_list.append( dec_grid_plot[prev_gap_indx:gap_indx + 1]) prev_gap_indx = gap_indx # Add the last segment ra_grid_plot_list.append(ra_grid_plot[prev_gap_indx + 1:-1]) dec_grid_plot_list.append(dec_grid_plot[prev_gap_indx + 1:-1]) else: ra_grid_plot_list = [ra_grid_plot] dec_grid_plot_list = [dec_grid_plot] # Plot all grid segments for ra_grid_plot, dec_grid_plot in zip(ra_grid_plot_list, dec_grid_plot_list): # Compute image coordinates for every grid celestial parallel x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, jd, platepar) # Plot the grid plt_handle.plot(x_grid, y_grid, color='w', alpha=0.2, zorder=2, linewidth=0.5, linestyle='dotted') # Plot the celestial meridian grid for ra_grid in ra_grid_arr: dec_grid_plot = np.arange(-90, 90, plot_dens) ra_grid_plot = np.zeros_like(dec_grid_plot) + ra_grid # Filter out the dec grid filter_arr = (dec_grid_plot >= dec_min) & (dec_grid_plot <= dec_max) ra_grid_plot = ra_grid_plot[filter_arr] dec_grid_plot = dec_grid_plot[filter_arr] # Compute alt/az az_grid_plot, alt_grid_plot = raDec2AltAz_vect(ra_grid_plot, dec_grid_plot, jd, platepar.lat, \ platepar.lon) # Filter out points below the horizon filter_arr = (alt_grid_plot > 0) & (np.degrees(angularSeparation(np.radians(alt_centre), \ np.radians(azim_centre), np.radians(alt_grid_plot), np.radians(az_grid_plot))) < fov_radius) ra_grid_plot = ra_grid_plot[filter_arr] dec_grid_plot = dec_grid_plot[filter_arr] # Compute image coordinates for every grid celestial parallel x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, jd, platepar) # # Filter out everything outside the FOV # filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & (y_grid >= 0) & (y_grid <= platepar.Y_res) # x_grid = x_grid[filter_arr] # y_grid = y_grid[filter_arr] # Plot the grid plt_handle.plot(x_grid, y_grid, color='w', alpha=0.2, zorder=2, linewidth=0.5, linestyle='dotted') return plt_handle
def fovArea(platepar, mask=None, area_ht=100000, side_points=10): """ Given a platepar file and a mask file, compute geo points of the FOV at the given height. Arguments: platepar: [Platepar object] Keyword arguments: mask: [Mask object] Mask object, None by default. area_ht: [float] Height in meters of the computed area. side_points: [int] How many points to use to evaluate the FOV on seach side of the image. Normalized to the longest side. Return: [list] A list points for every side of the image, and every side is a list of (lat, lon, elev) describing the sides of the FOV area. Values are in degrees and meters. """ # If the mask is not given, make a dummy mask with all white pixels if mask is None: mask = MaskStructure(255 + np.zeros( (platepar.Y_res, platepar.X_res), dtype=np.uint8)) # Compute the number of points for the sizes longer_side_points = side_points shorter_side_points = int( np.ceil(side_points * platepar.Y_res / platepar.X_res)) # Define operations for each side (number of points, axis of sampling, sampling start, direction of sampling, reverse sampling direction) side_operations = [ [shorter_side_points, 'y', 0, 1, False], # left [longer_side_points, 'x', platepar.Y_res - 1, -1, False], # bottom [shorter_side_points, 'y', platepar.X_res - 1, -1, True], # right [longer_side_points, 'x', 0, 1, True] ] # up # Sample the points on image borders side_points_list = [] for n_sample, axis, c0, sampling_direction, reverse_sampling in side_operations: # Reverse some ordering to make the sampling counter-clockwise, starting in the top-left corner sampling_offsets = range(n_sample + 1) if reverse_sampling: sampling_offsets = reversed(sampling_offsets) # Sample points on every side side_points = [] for i_sample in sampling_offsets: # Compute x, y coordinate of the sampled pixel if axis == 'x': axis_side = platepar.X_res other_axis_side = platepar.Y_res x0 = int((i_sample / n_sample) * (axis_side - 1)) y0 = c0 else: axis_side = platepar.Y_res other_axis_side = platepar.X_res x0 = c0 y0 = int((i_sample / n_sample) * (axis_side - 1)) # Find a pixel position along the axis that is not masked using increments of 10 pixels unmasked_point_found = False # Make a list of points to sample for mask_offset in np.arange(0, other_axis_side, 10): # Compute the current pixel position if axis == 'x': x = x0 y = y0 + sampling_direction * mask_offset else: x = x0 + sampling_direction * mask_offset y = y0 # If the position is not masked, stop searching for unmasked point if mask.img[y, x] > 0: unmasked_point_found = True break # Find azimuth and altitude at the given pixel, if a found unmask pixel was found along this # line if unmasked_point_found: # Compute RA/Dec in J2000 of the image point, at J2000 epoch time so we don't have to precess _, ra, dec, _ = xyToRaDecPP([jd2Date(J2000_JD.days)], [x], [y], [1], platepar, \ extinction_correction=False) # Compute alt/az of the point azim, alt = raDec2AltAz(ra[0], dec[0], J2000_JD.days, platepar.lat, platepar.lon) # Limit the elevation to 5 degrees above the horizon if alt < 5: alt = 5 # Compute the geo location of the point along the line of sight p_lat, p_lon, p_elev = AEH2LatLonAlt(azim, alt, area_ht, platepar.lat, platepar.lon, \ platepar.elev) side_points.append([x, y, p_lat, p_lon, p_elev]) # Add points from every side to the list (store a copy) side_points_list.append(list(side_points)) # Postprocess the point list by removing points which intersect points on the previous side side_points_list_filtered = [] for i, (n_sample, axis, c0, sampling_direction, reverse_sampling) in enumerate(side_operations): # Get the current and previous points list side_points = side_points_list[i] side_points_prev = side_points_list[i - 1] # Remove all points from the list that intersect points on the previous side side_points_filtered = [] for x, y, p_lat, p_lon, p_elev in side_points: # Check all points from the previous side skip_point = False for entry_prev in side_points_prev: x_prev, y_prev = entry_prev[:2] # # Skip duplicates # if (x == x_prev) and (y == y_prev): # skip_point = True # break if axis == 'x': if reverse_sampling: if (y_prev < y) and (x_prev < x): skip_point = True break else: if (y_prev > y) and (x_prev > x): skip_point = True break else: if reverse_sampling: if (y_prev < y) and (x_prev > x): skip_point = True break else: if (y_prev > y) and (x_prev < x): skip_point = True break # If the point should not be skipped, add it to the final list if not skip_point: side_points_filtered.append([p_lat, p_lon, p_elev]) # print("ADDING = {:4d}, {:4d}, {:10.6f}, {:11.6f}, {:.2f}".format(int(x), int(y), p_lat, p_lon, p_elev)) # else: # print("SKIPPING = {:4d}, {:4d}, {:10.6f}, {:11.6f}, {:.2f}".format(int(x), int(y), p_lat, p_lon, p_elev)) side_points_list_filtered.append(side_points_filtered) return side_points_list_filtered
def trackStack(dir_path, config, border=5, background_compensation=True, hide_plot=False): """ Generate a stack with aligned stars, so the sky appears static. The folder should have a platepars_all_recalibrated.json file. Arguments: dir_path: [str] Path to the directory with image files. config: [Config instance] Keyword arguments: border: [int] Border around the image to exclude (px). background_compensation: [bool] Normalize the background by applying a median filter to avepixel and use it as a flat field. Slows down the procedure and may sometimes introduce artifacts. True by default. """ # Load recalibrated platepars, if they exist ### # Find recalibrated platepars file per FF file platepars_recalibrated_file = None for file_name in os.listdir(dir_path): if file_name == config.platepars_recalibrated_name: platepars_recalibrated_file = file_name break # Load all recalibrated platepars if the file is available recalibrated_platepars = None if platepars_recalibrated_file is not None: with open(os.path.join(dir_path, platepars_recalibrated_file)) as f: recalibrated_platepars = json.load(f) print( 'Loaded recalibrated platepars JSON file for the calibration report...' ) # ### # If the recalib platepars is not found, stop if recalibrated_platepars is None: print("The {:s} file was not found!".format( config.platepars_recalibrated_name)) return False # Get a list of FF files in the folder ff_list = [] for file_name in os.listdir(dir_path): if validFFName(file_name): ff_list.append(file_name) # Take the platepar with the middle time as the reference one ff_found_list = [] jd_list = [] for ff_name_temp in recalibrated_platepars: if ff_name_temp in ff_list: # Compute the Julian date of the FF middle dt = getMiddleTimeFF(ff_name_temp, config.fps, ret_milliseconds=True) jd = date2JD(*dt) jd_list.append(jd) ff_found_list.append(ff_name_temp) if len(jd_list) < 2: print("Not more than 1 FF image!") return False # Take the FF file with the middle JD jd_list = np.array(jd_list) jd_middle = np.mean(jd_list) jd_mean_index = np.argmin(np.abs(jd_list - jd_middle)) ff_mid = ff_found_list[jd_mean_index] # Load the middle platepar as the reference one pp_ref = Platepar() pp_ref.loadFromDict(recalibrated_platepars[ff_mid], use_flat=config.use_flat) # Try loading the mask mask_path = None if os.path.exists(os.path.join(dir_path, config.mask_file)): mask_path = os.path.join(dir_path, config.mask_file) # Try loading the default mask elif os.path.exists(config.mask_file): mask_path = os.path.abspath(config.mask_file) # Load the mask if given mask = None if mask_path is not None: mask = loadMask(mask_path) print("Loaded mask:", mask_path) # If the shape of the mask doesn't fit, init an empty mask if mask is not None: if (mask.img.shape[0] != pp_ref.Y_res) or (mask.img.shape[1] != pp_ref.X_res): print("Mask is of wrong shape!") mask = None if mask is None: mask = MaskStructure(255 + np.zeros( (pp_ref.Y_res, pp_ref.X_res), dtype=np.uint8)) # Compute the middle RA/Dec of the reference platepar _, ra_temp, dec_temp, _ = xyToRaDecPP([jd2Date(jd_middle)], [pp_ref.X_res / 2], [pp_ref.Y_res / 2], [1], pp_ref, extinction_correction=False) ra_mid, dec_mid = ra_temp[0], dec_temp[0] # Go through all FF files and find RA/Dec of image corners to find the size of the stack image ### # List of corners x_corns = [0, pp_ref.X_res, 0, pp_ref.X_res] y_corns = [0, 0, pp_ref.Y_res, pp_ref.Y_res] ra_list = [] dec_list = [] for ff_temp in ff_found_list: # Load the recalibrated platepar pp_temp = Platepar() pp_temp.loadFromDict(recalibrated_platepars[ff_temp], use_flat=config.use_flat) for x_c, y_c in zip(x_corns, y_corns): _, ra_temp, dec_temp, _ = xyToRaDecPP( [getMiddleTimeFF(ff_temp, config.fps, ret_milliseconds=True)], [x_c], [y_c], [1], pp_ref, extinction_correction=False) ra_c, dec_c = ra_temp[0], dec_temp[0] ra_list.append(ra_c) dec_list.append(dec_c) # Compute the angular separation from the middle equatorial coordinates of the reference image to all # RA/Dec corner coordinates ang_sep_list = [] for ra_c, dec_c in zip(ra_list, dec_list): ang_sep = np.degrees( angularSeparation(np.radians(ra_mid), np.radians(dec_mid), np.radians(ra_c), np.radians(dec_c))) ang_sep_list.append(ang_sep) # Find the maximum angular separation and compute the image size using the plate scale # The image size will be resampled to 1/2 of the original size to avoid interpolation scale = 0.5 ang_sep_max = np.max(ang_sep_list) img_size = int(scale * 2 * ang_sep_max * pp_ref.F_scale) # # Create the stack platepar with no distortion and a large image size pp_stack = copy.deepcopy(pp_ref) pp_stack.resetDistortionParameters() pp_stack.X_res = img_size pp_stack.Y_res = img_size pp_stack.F_scale *= scale pp_stack.refraction = False # Init the image avg_stack_sum = np.zeros((img_size, img_size), dtype=float) avg_stack_count = np.zeros((img_size, img_size), dtype=int) max_deaveraged = np.zeros((img_size, img_size), dtype=np.uint8) # Load individual FFs and map them to the stack for i, ff_name in enumerate(ff_found_list): print("Stacking {:s}, {:.1f}% done".format( ff_name, 100 * i / len(ff_found_list))) # Read the FF file ff = readFF(dir_path, ff_name) # Load the recalibrated platepar pp_temp = Platepar() pp_temp.loadFromDict(recalibrated_platepars[ff_name], use_flat=config.use_flat) # Make a list of X and Y image coordinates x_coords, y_coords = np.meshgrid( np.arange(border, pp_ref.X_res - border), np.arange(border, pp_ref.Y_res - border)) x_coords = x_coords.ravel() y_coords = y_coords.ravel() # Map image pixels to sky jd_arr, ra_coords, dec_coords, _ = xyToRaDecPP( len(x_coords) * [getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)], x_coords, y_coords, len(x_coords) * [1], pp_temp, extinction_correction=False) # Map sky coordinates to stack image coordinates stack_x, stack_y = raDecToXYPP(ra_coords, dec_coords, jd_middle, pp_stack) # Round pixel coordinates stack_x = np.round(stack_x, decimals=0).astype(int) stack_y = np.round(stack_y, decimals=0).astype(int) # Cut the image to limits filter_arr = (stack_x > 0) & (stack_x < img_size) & (stack_y > 0) & ( stack_y < img_size) x_coords = x_coords[filter_arr].astype(int) y_coords = y_coords[filter_arr].astype(int) stack_x = stack_x[filter_arr] stack_y = stack_y[filter_arr] # Apply the mask to maxpixel and avepixel maxpixel = copy.deepcopy(ff.maxpixel) maxpixel[mask.img == 0] = 0 avepixel = copy.deepcopy(ff.avepixel) avepixel[mask.img == 0] = 0 # Compute deaveraged maxpixel max_deavg = maxpixel - avepixel # Normalize the backgroud brightness by applying a large-kernel median filter to avepixel if background_compensation: # # Apply a median filter to the avepixel to get an estimate of the background brightness # avepixel_median = scipy.ndimage.median_filter(ff.avepixel, size=101) avepixel_median = cv2.medianBlur(ff.avepixel, 301) # Make sure to avoid zero division avepixel_median[avepixel_median < 1] = 1 # Normalize the avepixel by subtracting out the background brightness avepixel = avepixel.astype(float) avepixel /= avepixel_median avepixel *= 50 # Normalize to a good background value, which is usually 50 avepixel = np.clip(avepixel, 0, 255) avepixel = avepixel.astype(np.uint8) # plt.imshow(avepixel, cmap='gray', vmin=0, vmax=255) # plt.show() # Add the average pixel to the sum avg_stack_sum[stack_y, stack_x] += avepixel[y_coords, x_coords] # Increment the counter image where the avepixel is not zero ones_img = np.ones_like(avepixel) ones_img[avepixel == 0] = 0 avg_stack_count[stack_y, stack_x] += ones_img[y_coords, x_coords] # Set pixel values to the stack, only take the max values max_deaveraged[stack_y, stack_x] = np.max(np.dstack( [max_deaveraged[stack_y, stack_x], max_deavg[y_coords, x_coords]]), axis=2) # Compute the blended avepixel background stack_img = avg_stack_sum stack_img[avg_stack_count > 0] /= avg_stack_count[avg_stack_count > 0] stack_img += max_deaveraged stack_img = np.clip(stack_img, 0, 255) stack_img = stack_img.astype(np.uint8) # Crop image non_empty_columns = np.where(stack_img.max(axis=0) > 0)[0] non_empty_rows = np.where(stack_img.max(axis=1) > 0)[0] crop_box = (np.min(non_empty_rows), np.max(non_empty_rows), np.min(non_empty_columns), np.max(non_empty_columns)) stack_img = stack_img[crop_box[0]:crop_box[1] + 1, crop_box[2]:crop_box[3] + 1] # Plot and save the stack ### dpi = 200 plt.figure(figsize=(stack_img.shape[1] / dpi, stack_img.shape[0] / dpi), dpi=dpi) plt.imshow(stack_img, cmap='gray', vmin=0, vmax=256, interpolation='nearest') plt.axis('off') plt.gca().get_xaxis().set_visible(False) plt.gca().get_yaxis().set_visible(False) plt.xlim([0, stack_img.shape[1]]) plt.ylim([stack_img.shape[0], 0]) # Remove the margins (top and right are set to 0.9999, as setting them to 1.0 makes the image blank in # some matplotlib versions) plt.subplots_adjust(left=0, bottom=0, right=0.9999, top=0.9999, wspace=0, hspace=0) filenam = os.path.join(dir_path, os.path.basename(dir_path) + "_track_stack.jpg") plt.savefig(filenam, bbox_inches='tight', pad_inches=0, dpi=dpi) # if hide_plot is False: plt.show()
def matchStarsResiduals(config, platepar, catalog_stars, star_dict, match_radius, ret_nmatch=False, \ sky_coords=False, lim_mag=None, verbose=False): """ Match the image and catalog stars with the given astrometry solution and estimate the residuals between them. Arguments: config: [Config structure] platepar: [Platepar structure] Astrometry parameters. catalog_stars: [ndarray] An array of catalog stars (ra, dec, mag). star_dict: [ndarray] A dictionary where the keys are JDs when the stars were recorded and values are 2D list of stars, each entry is (X, Y, bg_level, level). match_radius: [float] Maximum radius for star matching (pixels). min_matched_stars: [int] Minimum number of matched stars on the image for the image to be accepted. Keyword arguments: ret_nmatch: [bool] If True, the function returns the number of matched stars and the average deviation. False by default. sky_coords: [bool] If True, sky coordinate residuals in RA, dec will be used to compute the cost, function, not image coordinates. lim_mag: [float] Override the limiting magnitude from config. None by default. verbose: [bool] Print results. True by default. Return: cost: [float] The cost function which weights the number of matched stars and the average deviation. """ if lim_mag is None: lim_mag = config.catalog_mag_limit # Estimate the FOV radius fov_w = platepar.X_res/platepar.F_scale fov_h = platepar.Y_res/platepar.F_scale fov_radius = np.sqrt((fov_w/2)**2 + (fov_h/2)**2) # print('fscale', platepar.F_scale) # print('FOV w:', fov_w) # print('FOV h:', fov_h) # print('FOV radius:', fov_radius) # Dictionary containing the matched stars, the keys are JDs of every image matched_stars = {} # Go through every FF image and its stars for jd in star_dict: # Estimate RA,dec of the centre of the FOV _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(jd)], [platepar.X_res/2], [platepar.Y_res/2], [1], platepar) RA_c = RA_c[0] dec_c = dec_c[0] # Get stars from the catalog around the defined center in a given radius _, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c, fov_radius, lim_mag) ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T # Extract stars for the given Julian date stars_list = star_dict[jd] stars_list = np.array(stars_list) # Convert all catalog stars to image coordinates cat_x_array, cat_y_array = raDecToXYPP(ra_catalog, dec_catalog, jd, platepar) # Take only those stars which are within the FOV x_indices = np.argwhere((cat_x_array >= 0) & (cat_x_array < platepar.X_res)) y_indices = np.argwhere((cat_y_array >= 0) & (cat_y_array < platepar.Y_res)) cat_good_indices = np.intersect1d(x_indices, y_indices).astype(np.uint32) # cat_x_array = cat_x_array[good_indices] # cat_y_array = cat_y_array[good_indices] # # Plot image stars # im_y, im_x, _, _ = stars_list.T # plt.scatter(im_y, im_x, facecolors='none', edgecolor='g') # # Plot catalog stars # plt.scatter(cat_y_array[cat_good_indices], cat_x_array[cat_good_indices], c='r', s=20, marker='+') # plt.show() # Match image and catalog stars matched_indices = matchStars(stars_list, cat_x_array, cat_y_array, cat_good_indices, match_radius) # Skip this image is no stars were matched if len(matched_indices) < config.min_matched_stars: continue matched_indices = np.array(matched_indices) matched_img_inds, matched_cat_inds, dist_list = matched_indices.T # Extract data from matched stars matched_img_stars = stars_list[matched_img_inds.astype(np.int)] matched_cat_stars = extracted_catalog[matched_cat_inds.astype(np.int)] # Put the matched stars to a dictionary matched_stars[jd] = [matched_img_stars, matched_cat_stars, dist_list] # # Plot matched stars # im_y, im_x, _, _ = matched_img_stars.T # cat_y = cat_y_array[matched_cat_inds.astype(np.int)] # cat_x = cat_x_array[matched_cat_inds.astype(np.int)] # plt.scatter(im_x, im_y, c='r', s=5) # plt.scatter(cat_x, cat_y, facecolors='none', edgecolor='g') # plt.xlim([0, platepar.X_res]) # plt.ylim([platepar.Y_res, 0]) # plt.show() # If residuals on the image should be computed if not sky_coords: unit_label = 'px' # Extract all distances global_dist_list = [] # level_list = [] # mag_list = [] for jd in matched_stars: # matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd] _, _, dist_list = matched_stars[jd] global_dist_list += dist_list.tolist() # # TEST # level_list += matched_img_stars[:, 3].tolist() # mag_list += matched_cat_stars[:, 2].tolist() # # Plot levels vs. magnitudes # plt.scatter(mag_list, np.log10(level_list)) # plt.xlabel('Magnitude') # plt.ylabel('Log10 level') # plt.show() # Compute the residuals on the sky else: unit_label = 'arcmin' global_dist_list = [] # Go through all matched stars for jd in matched_stars: matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd] # Go through all stars on the image for img_star_entry, cat_star_entry in zip(matched_img_stars, matched_cat_stars): # Extract star coords star_y = img_star_entry[0] star_x = img_star_entry[1] cat_ra = cat_star_entry[0] cat_dec = cat_star_entry[1] # Convert image coordinates to RA/Dec _, star_ra, star_dec, _ = xyToRaDecPP([jd2Date(jd)], [star_x], [star_y], [1], \ platepar) # Compute angular distance between the predicted and the catalog position ang_dist = np.degrees(angularSeparation(np.radians(cat_ra), np.radians(cat_dec), \ np.radians(star_ra[0]), np.radians(star_dec[0]))) # Store the angular separation in arc minutes global_dist_list.append(ang_dist*60) # Number of matched stars n_matched = len(global_dist_list) if n_matched == 0: if verbose: print('No matched stars with radius {:.2f} px!'.format(match_radius)) if ret_nmatch: return 0, 9999.0, 9999.0, {} else: return 9999.0 # Calculate the average distance avg_dist = np.mean(global_dist_list) cost = (avg_dist**2)*(1.0/np.sqrt(n_matched + 1)) if verbose: print('Matched {:d} stars with radius of {:.2f} px'.format(n_matched, match_radius)) print('Avg dist', avg_dist, unit_label) print('Cost:', cost) print('-----') if ret_nmatch: return n_matched, avg_dist, cost, matched_stars else: return cost
def matchStarsResiduals(config, platepar, catalog_stars, star_dict, match_radius, ret_nmatch=False, \ sky_coords=False, lim_mag=None, verbose=False): """ Match the image and catalog stars with the given astrometry solution and estimate the residuals between them. Arguments: config: [Config structure] platepar: [Platepar structure] Astrometry parameters. catalog_stars: [ndarray] An array of catalog stars (ra, dec, mag). star_dict: [ndarray] A dictionary where the keys are JDs when the stars were recorded and values are 2D list of stars, each entry is (X, Y, bg_level, level, fwhm). match_radius: [float] Maximum radius for star matching (pixels). min_matched_stars: [int] Minimum number of matched stars on the image for the image to be accepted. Keyword arguments: ret_nmatch: [bool] If True, the function returns the number of matched stars and the average deviation. False by default. sky_coords: [bool] If True, sky coordinate residuals in RA, dec will be used to compute the cost, function, not image coordinates. lim_mag: [float] Override the limiting magnitude from config. None by default. verbose: [bool] Print results. True by default. Return: cost: [float] The cost function which weights the number of matched stars and the average deviation. """ if lim_mag is None: lim_mag = config.catalog_mag_limit # Estimate the FOV radius fov_radius = getFOVSelectionRadius(platepar) # Dictionary containing the matched stars, the keys are JDs of every image matched_stars = {} # Go through every FF image and its stars for jd in star_dict: # Estimate RA,dec of the centre of the FOV _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(jd)], [platepar.X_res/2], [platepar.Y_res/2], [1], \ platepar, extinction_correction=False) RA_c = RA_c[0] dec_c = dec_c[0] # Get stars from the catalog around the defined center in a given radius _, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c, jd, platepar.lat, platepar.lon, \ fov_radius, lim_mag) ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T # Extract stars for the given Julian date stars_list = star_dict[jd] stars_list = np.array(stars_list) # Convert all catalog stars to image coordinates cat_x_array, cat_y_array = raDecToXYPP(ra_catalog, dec_catalog, jd, platepar) # Take only those stars which are within the FOV x_indices = np.argwhere((cat_x_array >= 0) & (cat_x_array < platepar.X_res)) y_indices = np.argwhere((cat_y_array >= 0) & (cat_y_array < platepar.Y_res)) cat_good_indices = np.intersect1d(x_indices, y_indices).astype(np.uint32) # cat_x_array = cat_x_array[good_indices] # cat_y_array = cat_y_array[good_indices] # # Plot image stars # im_y, im_x, _, _ = stars_list.T # plt.scatter(im_y, im_x, facecolors='none', edgecolor='g') # # Plot catalog stars # plt.scatter(cat_y_array[cat_good_indices], cat_x_array[cat_good_indices], c='r', s=20, marker='+') # plt.show() # Match image and catalog stars matched_indices = matchStars(stars_list, cat_x_array, cat_y_array, cat_good_indices, match_radius) # Skip this image is no stars were matched if len(matched_indices) < config.min_matched_stars: continue matched_indices = np.array(matched_indices) matched_img_inds, matched_cat_inds, dist_list = matched_indices.T # Extract data from matched stars matched_img_stars = stars_list[matched_img_inds.astype(np.int)] matched_cat_stars = extracted_catalog[matched_cat_inds.astype(np.int)] # Put the matched stars to a dictionary matched_stars[jd] = [matched_img_stars, matched_cat_stars, dist_list] # # Plot matched stars # im_y, im_x, _, _ = matched_img_stars.T # cat_y = cat_y_array[matched_cat_inds.astype(np.int)] # cat_x = cat_x_array[matched_cat_inds.astype(np.int)] # plt.scatter(im_x, im_y, c='r', s=5) # plt.scatter(cat_x, cat_y, facecolors='none', edgecolor='g') # plt.xlim([0, platepar.X_res]) # plt.ylim([platepar.Y_res, 0]) # plt.show() # If residuals on the image should be computed if not sky_coords: unit_label = 'px' # Extract all distances global_dist_list = [] # level_list = [] # mag_list = [] for jd in matched_stars: # matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd] _, _, dist_list = matched_stars[jd] global_dist_list += dist_list.tolist() # # TEST # level_list += matched_img_stars[:, 3].tolist() # mag_list += matched_cat_stars[:, 2].tolist() # # Plot levels vs. magnitudes # plt.scatter(mag_list, np.log10(level_list)) # plt.xlabel('Magnitude') # plt.ylabel('Log10 level') # plt.show() # Compute the residuals on the sky else: unit_label = 'arcmin' global_dist_list = [] # Go through all matched stars for jd in matched_stars: matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd] # Go through all stars on the image for img_star_entry, cat_star_entry in zip(matched_img_stars, matched_cat_stars): # Extract star coords star_y = img_star_entry[0] star_x = img_star_entry[1] cat_ra = cat_star_entry[0] cat_dec = cat_star_entry[1] # Convert image coordinates to RA/Dec _, star_ra, star_dec, _ = xyToRaDecPP([jd2Date(jd)], [star_x], [star_y], [1], \ platepar, extinction_correction=False) # Compute angular distance between the predicted and the catalog position ang_dist = np.degrees(angularSeparation(np.radians(cat_ra), np.radians(cat_dec), \ np.radians(star_ra[0]), np.radians(star_dec[0]))) # Store the angular separation in arc minutes global_dist_list.append(ang_dist * 60) # Number of matched stars n_matched = len(global_dist_list) if n_matched == 0: if verbose: print( 'No matched stars with radius {:.1f} px!'.format(match_radius)) if ret_nmatch: return 0, 9999.0, 9999.0, {} else: return 9999.0 # Calculate the average distance avg_dist = np.median(global_dist_list) cost = (avg_dist**2) * (1.0 / np.sqrt(n_matched + 1)) if verbose: print() print("Matched {:d} stars with radius of {:.1f} px".format( n_matched, match_radius)) print(" Average distance = {:.3f} {:s}".format( avg_dist, unit_label)) print(" Cost function = {:.5f}".format(cost)) if ret_nmatch: return n_matched, avg_dist, cost, matched_stars else: return cost
def generateCalibrationReport(config, night_dir_path, match_radius=2.0, platepar=None, show_graphs=False): """ Given the folder of the night, find the Calstars file, check the star fit and generate a report with the quality of the calibration. The report contains information about both the astrometry and the photometry calibration. Graphs will be saved in the given directory of the night. Arguments: config: [Config instance] night_dir_path: [str] Full path to the directory of the night. Keyword arguments: match_radius: [float] Match radius for star matching between image and catalog stars (px). platepar: [Platepar instance] Use this platepar instead of finding one in the folder. show_graphs: [bool] Show the graphs on the screen. False by default. Return: None """ # Find the CALSTARS file in the given folder calstars_file = None for calstars_file in os.listdir(night_dir_path): if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file): break if calstars_file is None: print('CALSTARS file could not be found in the given directory!') return None # Load the calstars file star_list = readCALSTARS(night_dir_path, calstars_file) ### Load recalibrated platepars, if they exist ### # Find recalibrated platepars file per FF file platepars_recalibrated_file = None for file_name in os.listdir(night_dir_path): if file_name == config.platepars_recalibrated_name: platepars_recalibrated_file = file_name break # Load all recalibrated platepars if the file is available recalibrated_platepars = None if platepars_recalibrated_file: with open(os.path.join(night_dir_path, platepars_recalibrated_file)) as f: recalibrated_platepars = json.load(f) print('Loaded recalibrated platepars JSON file for the calibration report...') ### ### ### Load the platepar file ### # Find the platepar file in the given directory if it was not given if platepar is None: # Find the platepar file platepar_file = None for file_name in os.listdir(night_dir_path): if file_name == config.platepar_name: platepar_file = file_name break if platepar_file is None: print('The platepar cannot be found in the night directory!') return None # Load the platepar file platepar = Platepar() platepar.read(os.path.join(night_dir_path, platepar_file)) ### ### night_name = os.path.split(night_dir_path.strip(os.sep))[1] # Go one mag deeper than in the config lim_mag = config.catalog_mag_limit + 1 # Load catalog stars (load one magnitude deeper) catalog_stars, mag_band_str, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(\ config.star_catalog_path, config.star_catalog_file, lim_mag=lim_mag, \ mag_band_ratios=config.star_catalog_band_ratios) ### Take only those CALSTARS entires for which FF files exist in the folder ### # Get a list of FF files in the folder\ ff_list = [] for file_name in os.listdir(night_dir_path): if validFFName(file_name): ff_list.append(file_name) # Filter out calstars entries, generate a star dictionary where the keys are JDs of FFs star_dict = {} ff_dict = {} for entry in star_list: ff_name, star_data = entry # Check if the FF from CALSTARS exists in the folder if ff_name not in ff_list: continue dt = getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True) jd = date2JD(*dt) # Add the time and the stars to the dict star_dict[jd] = star_data ff_dict[jd] = ff_name ### ### # If there are no FF files in the directory, don't generate a report if len(star_dict) == 0: print('No FF files from the CALSTARS file in the directory!') return None # If the recalibrated platepars file exists, take the one with the most stars max_jd = 0 if recalibrated_platepars is not None: max_stars = 0 for ff_name_temp in recalibrated_platepars: # Compute the Julian date of the FF middle dt = getMiddleTimeFF(ff_name_temp, config.fps, ret_milliseconds=True) jd = date2JD(*dt) # Check that this file exists in CALSTARS and the list of FF files if (jd not in star_dict) or (jd not in ff_dict): continue # Check if the number of stars on this FF file is larger than the before if len(star_dict[jd]) > max_stars: max_jd = jd max_stars = len(star_dict[jd]) # Set a flag to indicate if using recalibrated platepars has failed if max_jd == 0: using_recalib_platepars = False else: print('Using recalibrated platepars, file:', ff_dict[max_jd]) using_recalib_platepars = True # Select the platepar where the FF file has the most stars platepar_dict = recalibrated_platepars[ff_dict[max_jd]] platepar = Platepar() platepar.loadFromDict(platepar_dict) filtered_star_dict = {max_jd: star_dict[max_jd]} # Match stars on the image with the stars in the catalog n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \ filtered_star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag) max_matched_stars = n_matched # Otherwise take the optimal FF file for evaluation if (recalibrated_platepars is None) or (not using_recalib_platepars): # If there are more than a set number of FF files to evaluate, choose only the ones with most stars on # the image if len(star_dict) > config.calstars_files_N: # Find JDs of FF files with most stars on them top_nstars_indices = np.argsort([len(x) for x in star_dict.values()])[::-1][:config.calstars_files_N \ - 1] filtered_star_dict = {} for i in top_nstars_indices: filtered_star_dict[list(star_dict.keys())[i]] = list(star_dict.values())[i] star_dict = filtered_star_dict # Match stars on the image with the stars in the catalog n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \ star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag) # If no recalibrated platepars where found, find the image with the largest number of matched stars if (not using_recalib_platepars) or (max_jd == 0): max_jd = 0 max_matched_stars = 0 for jd in matched_stars: _, _, distances = matched_stars[jd] if len(distances) > max_matched_stars: max_jd = jd max_matched_stars = len(distances) # If there are no matched stars, use the image with the largest number of detected stars if max_matched_stars <= 2: max_jd = max(star_dict, key=lambda x: len(star_dict[x])) distances = [np.inf] # Take the FF file with the largest number of matched stars ff_name = ff_dict[max_jd] # Load the FF file ff = readFF(night_dir_path, ff_name) img_h, img_w = ff.avepixel.shape dpi = 200 plt.figure(figsize=(ff.avepixel.shape[1]/dpi, ff.avepixel.shape[0]/dpi), dpi=dpi) # Take the average pixel img = ff.avepixel # Slightly adjust the levels img = Image.adjustLevels(img, np.percentile(img, 1.0), 1.2, np.percentile(img, 99.99)) plt.imshow(img, cmap='gray', interpolation='nearest') legend_handles = [] # Plot detected stars for img_star in star_dict[max_jd]: y, x, _, _ = img_star rect_side = 5*match_radius square_patch = plt.Rectangle((x - rect_side/2, y - rect_side/2), rect_side, rect_side, color='g', \ fill=False, label='Image stars') plt.gca().add_artist(square_patch) legend_handles.append(square_patch) # If there are matched stars, plot them if max_matched_stars > 2: # Take the solution with the largest number of matched stars image_stars, matched_catalog_stars, distances = matched_stars[max_jd] # Plot matched stars for img_star in image_stars: x, y, _, _ = img_star circle_patch = plt.Circle((y, x), radius=3*match_radius, color='y', fill=False, \ label='Matched stars') plt.gca().add_artist(circle_patch) legend_handles.append(circle_patch) ### Plot match residuals ### # Compute preducted positions of matched image stars from the catalog x_predicted, y_predicted = raDecToXYPP(matched_catalog_stars[:, 0], \ matched_catalog_stars[:, 1], max_jd, platepar) img_y, img_x, _, _ = image_stars.T delta_x = x_predicted - img_x delta_y = y_predicted - img_y # Compute image residual and angle of the error res_angle = np.arctan2(delta_y, delta_x) res_distance = np.sqrt(delta_x**2 + delta_y**2) # Calculate coordinates of the beginning of the residual line res_x_beg = img_x + 3*match_radius*np.cos(res_angle) res_y_beg = img_y + 3*match_radius*np.sin(res_angle) # Calculate coordinates of the end of the residual line res_x_end = img_x + 100*np.cos(res_angle)*res_distance res_y_end = img_y + 100*np.sin(res_angle)*res_distance # Plot the 100x residuals for i in range(len(x_predicted)): res_plot = plt.plot([res_x_beg[i], res_x_end[i]], [res_y_beg[i], res_y_end[i]], color='orange', \ lw=0.5, label='100x residuals') legend_handles.append(res_plot[0]) ### ### else: distances = [np.inf] # If there are no matched stars, plot large text in the middle of the screen plt.text(img_w/2, img_h/2, "NO MATCHED STARS!", color='r', alpha=0.5, fontsize=20, ha='center', va='center') ### Plot positions of catalog stars to the limiting magnitude of the faintest matched star + 1 mag ### # Find the faintest magnitude among matched stars if max_matched_stars > 2: faintest_mag = np.max(matched_catalog_stars[:, 2]) + 1 else: # If there are no matched stars, use the limiting magnitude from config faintest_mag = config.catalog_mag_limit + 1 # Estimate RA,dec of the centre of the FOV _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(max_jd)], [platepar.X_res/2], [platepar.Y_res/2], [1], platepar) RA_c = RA_c[0] dec_c = dec_c[0] fov_radius = np.hypot(*computeFOVSize(platepar)) # Get stars from the catalog around the defined center in a given radius _, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c, fov_radius, faintest_mag) ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T # Compute image positions of all catalog stars that should be on the image x_catalog, y_catalog = raDecToXYPP(ra_catalog, dec_catalog, max_jd, platepar) # Filter all catalog stars outside the image temp_arr = np.c_[x_catalog, y_catalog, mag_catalog] temp_arr = temp_arr[temp_arr[:, 0] >= 0] temp_arr = temp_arr[temp_arr[:, 0] <= ff.avepixel.shape[1]] temp_arr = temp_arr[temp_arr[:, 1] >= 0] temp_arr = temp_arr[temp_arr[:, 1] <= ff.avepixel.shape[0]] x_catalog, y_catalog, mag_catalog = temp_arr.T # Plot catalog stars on the image cat_stars_handle = plt.scatter(x_catalog, y_catalog, c='none', marker='D', lw=1.0, alpha=0.4, \ s=((4.0 + (faintest_mag - mag_catalog))/3.0)**(2*2.512), edgecolor='r', label='Catalog stars') legend_handles.append(cat_stars_handle) ### ### # Add info text info_text = ff_dict[max_jd] + '\n' \ + "Matched stars: {:d}/{:d}\n".format(max_matched_stars, len(star_dict[max_jd])) \ + "Median distance: {:.2f} px\n".format(np.median(distances)) \ + "Catalog limiting magnitude: {:.1f}".format(lim_mag) plt.text(10, 10, info_text, bbox=dict(facecolor='black', alpha=0.5), va='top', ha='left', fontsize=4, \ color='w') legend = plt.legend(handles=legend_handles, prop={'size': 4}, loc='upper right') legend.get_frame().set_facecolor('k') legend.get_frame().set_edgecolor('k') for txt in legend.get_texts(): txt.set_color('w') plt.axis('off') plt.gca().get_xaxis().set_visible(False) plt.gca().get_yaxis().set_visible(False) plt.xlim([0, ff.avepixel.shape[1]]) plt.ylim([ff.avepixel.shape[0], 0]) # Remove the margins plt.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0) plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_astrometry.jpg'), \ bbox_inches='tight', pad_inches=0, dpi=dpi) if show_graphs: plt.show() else: plt.clf() plt.close() if max_matched_stars > 2: ### Plot the photometry ### plt.figure(dpi=dpi) # Take only those stars which are inside the 3/4 of the shorter image axis from the center photom_selection_radius = np.min([img_h, img_w])/3 filter_indices = ((image_stars[:, 0] - img_h/2)**2 + (image_stars[:, 1] \ - img_w/2)**2) <= photom_selection_radius**2 star_intensities = image_stars[filter_indices, 2] catalog_mags = matched_catalog_stars[filter_indices, 2] # Plot intensities of image stars #star_intensities = image_stars[:, 2] plt.scatter(-2.5*np.log10(star_intensities), catalog_mags, s=5, c='r') # Fit the photometry on automated star intensities photom_offset, fit_stddev, _ = photometryFit(np.log10(star_intensities), catalog_mags) # Plot photometric offset from the platepar x_min, x_max = plt.gca().get_xlim() y_min, y_max = plt.gca().get_ylim() x_min_w = x_min - 3 x_max_w = x_max + 3 y_min_w = y_min - 3 y_max_w = y_max + 3 photometry_info = 'Platepar: {:+.2f}LSP {:+.2f} +/- {:.2f} \nGamma = {:.2f}'.format(platepar.mag_0, \ platepar.mag_lev, platepar.mag_lev_stddev, platepar.gamma) # Plot the photometry calibration from the platepar logsum_arr = np.linspace(x_min_w, x_max_w, 10) plt.plot(logsum_arr, logsum_arr + platepar.mag_lev, label=photometry_info, linestyle='--', \ color='k', alpha=0.5) # Plot the fitted photometry calibration fit_info = "Fit: {:+.2f}LSP {:+.2f} +/- {:.2f}".format(-2.5, photom_offset, fit_stddev) plt.plot(logsum_arr, logsum_arr + photom_offset, label=fit_info, linestyle='--', color='red', alpha=0.5) plt.legend() plt.ylabel("Catalog magnitude ({:s})".format(mag_band_str)) plt.xlabel("Uncalibrated magnitude") # Set wider axis limits plt.xlim(x_min_w, x_max_w) plt.ylim(y_min_w, y_max_w) plt.gca().invert_yaxis() plt.gca().invert_xaxis() plt.grid() plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_photometry.png'), dpi=150) if show_graphs: plt.show() else: plt.clf() plt.close()
def updateAzAltGrid(grid, platepar): """ Updates the values of grid to form an azimuth and altitude grid on a pyqtgraph plot. Arguments: grid: [pg.PlotCurveItem] platepar: [Platepar object] """ ### COMPUTE FOV CENTRE ### # Estimate RA,dec of the centre of the FOV _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(platepar.JD)], [platepar.X_res/2], [platepar.Y_res/2], [1], \ platepar, extinction_correction=False) # Compute alt/az of FOV centre azim_centre, alt_centre = trueRaDec2ApparentAltAz(RA_c[0], dec_c[0], platepar.JD, platepar.lat, \ platepar.lon) ### ### # Compute FOV size fov_radius = getFOVSelectionRadius(platepar) # Determine gridline frequency (double the gridlines if the number is < 4eN) grid_freq = 10**np.floor(np.log10(2 * fov_radius)) if 10**(np.log10(2 * fov_radius) - np.floor(np.log10(2 * fov_radius))) < 4: grid_freq /= 2 # Set a maximum grid frequency of 15 deg if grid_freq > 15: grid_freq = 15 # Grid plot density plot_dens = grid_freq / 100 # Generate a grid of all azimuths and altitudes alt_grid_arr = np.arange(0, 90, grid_freq) az_grid_arr = np.arange(0, 360, grid_freq) x = [] y = [] cuts = [] # Altitude lines for alt_grid in alt_grid_arr: # Keep the altitude fixed and plot all azimuth lines az_grid_plot = np.arange(0, 360, plot_dens) alt_grid_plot = np.zeros_like(az_grid_plot) + alt_grid # Filter out all lines outside the FOV filter_arr = np.degrees(angularSeparation(np.radians(azim_centre), np.radians(alt_centre), \ np.radians(az_grid_plot), np.radians(alt_grid_plot))) <= fov_radius az_grid_plot = az_grid_plot[filter_arr] alt_grid_plot = alt_grid_plot[filter_arr] # Compute image coordinates ra_grid_plot, dec_grid_plot = apparentAltAz2TrueRADec(az_grid_plot, alt_grid_plot, platepar.JD, \ platepar.lat, platepar.lon, platepar.refraction) x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, platepar.JD, platepar) # Filter out all points outside the image filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & ( y_grid >= 0) & (y_grid <= platepar.Y_res) x_grid = x_grid[filter_arr] y_grid = y_grid[filter_arr] x.extend(x_grid) y.extend(y_grid) cuts.append(len(x) - 1) # Azimuth lines for az_grid in az_grid_arr: # Keep the azimuth fixed and plot all altitude lines alt_grid_plot = np.arange(0, 90 + plot_dens, plot_dens) az_grid_plot = np.zeros_like(alt_grid_plot) + az_grid # Filter out all lines outside the FOV filter_arr = np.degrees(angularSeparation(np.radians(azim_centre), np.radians(alt_centre), \ np.radians(az_grid_plot), np.radians(alt_grid_plot))) <= fov_radius az_grid_plot = az_grid_plot[filter_arr] alt_grid_plot = alt_grid_plot[filter_arr] # Compute image coordinates ra_grid_plot, dec_grid_plot = apparentAltAz2TrueRADec(az_grid_plot, alt_grid_plot, platepar.JD, \ platepar.lat, platepar.lon, platepar.refraction) x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, platepar.JD, platepar) # Filter out all points outside the image filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & ( y_grid >= 0) & (y_grid <= platepar.Y_res) x_grid = x_grid[filter_arr] y_grid = y_grid[filter_arr] x.extend(x_grid) y.extend(y_grid) cuts.append(len(x) - 1) r = 15 # adjust this parameter if you see extraneous lines # disconnect lines that are distant (unfinished circles had straight lines completing them) for i in range(len(x) - 1): if (x[i] - x[i + 1])**2 + (y[i] - y[i + 1])**2 > r**2: cuts.append(i) connect = np.full(len(x), 1) for i in cuts[:-1]: connect[i] = 0 grid.setData(x=x, y=y, connect=connect)
def updateRaDecGrid(grid, platepar): """ Updates the values of grid to form a right ascension and declination grid on a pyqtgraph plot. Arguments: grid: [pg.PlotCurveItem] platepar: [Platepar object] """ ### COMPUTE FOV CENTRE ### # Estimate RA,dec of the centre of the FOV _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(platepar.JD)], [platepar.X_res/2], [platepar.Y_res/2], [1], \ platepar, extinction_correction=False) # Compute alt/az of FOV centre azim_centre, alt_centre = trueRaDec2ApparentAltAz(RA_c[0], dec_c[0], platepar.JD, platepar.lat, \ platepar.lon) ### ### # Compute FOV size fov_radius = getFOVSelectionRadius(platepar) # Determine gridline frequency (double the gridlines if the number is < 4eN) grid_freq = 10**np.floor(np.log10(2 * fov_radius)) if 10**(np.log10(2 * fov_radius) - np.floor(np.log10(2 * fov_radius))) < 4: grid_freq /= 2 # Set a maximum grid frequency of 15 deg if grid_freq > 15: grid_freq = 15 # Grid plot density plot_dens = grid_freq / 100 # Make an array of RA and Dec ra_grid_arr = np.arange(0, 360, grid_freq) dec_grid_arr = np.arange(-90, 90, grid_freq) x = [] y = [] cuts = [] # Generate points for the celestial parallels grid for dec_grid in dec_grid_arr: # Keep the declination fixed and evaluate all right ascensions ra_grid_plot = np.arange(0, 360, plot_dens) dec_grid_plot = np.zeros_like(ra_grid_plot) + dec_grid # Compute alt/az az_grid_plot, alt_grid_plot = trueRaDec2ApparentAltAz(ra_grid_plot, dec_grid_plot, platepar.JD, \ platepar.lat, platepar.lon, platepar.refraction) # Filter out points below the horizon and outside the FOV filter_arr = (alt_grid_plot >= 0) & (np.degrees(angularSeparation(np.radians(azim_centre), \ np.radians(alt_centre), np.radians(az_grid_plot), np.radians(alt_grid_plot))) <= fov_radius) ra_grid_plot = ra_grid_plot[filter_arr] dec_grid_plot = dec_grid_plot[filter_arr] # Compute image coordinates for every grid celestial parallel x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, platepar.JD, platepar) # Filter out all points outside the image filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & ( y_grid >= 0) & (y_grid <= platepar.Y_res) x_grid = x_grid[filter_arr] y_grid = y_grid[filter_arr] # Add points to the list x.extend(x_grid) y.extend(y_grid) cuts.append(len(x) - 1) # Generate points for the celestial meridian grid for ra_grid in ra_grid_arr: # Keep the RA fixed and evaluate all declinations dec_grid_plot = np.arange(-90, 90 + plot_dens, plot_dens) ra_grid_plot = np.zeros_like(dec_grid_plot) + ra_grid # Compute alt/az az_grid_plot, alt_grid_plot = trueRaDec2ApparentAltAz(ra_grid_plot, dec_grid_plot, platepar.JD, \ platepar.lat, platepar.lon, platepar.refraction) # Filter out points below the horizon filter_arr = (alt_grid_plot >= 0) & (np.degrees(angularSeparation(np.radians(azim_centre), \ np.radians(alt_centre), np.radians(az_grid_plot), np.radians(alt_grid_plot))) <= fov_radius) ra_grid_plot = ra_grid_plot[filter_arr] dec_grid_plot = dec_grid_plot[filter_arr] # Compute image coordinates for every grid celestial parallel x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, platepar.JD, platepar) # Filter out points outside the image filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & ( y_grid >= 0) & (y_grid <= platepar.Y_res) x_grid = x_grid[filter_arr] y_grid = y_grid[filter_arr] x.extend(x_grid) y.extend(y_grid) cuts.append(len(x) - 1) # Generate points for the horizon az_horiz_arr = np.arange(0, 360, plot_dens) alt_horiz_arr = np.zeros_like(az_horiz_arr) ra_horiz_plot, dec_horiz_plot = apparentAltAz2TrueRADec(az_horiz_arr, alt_horiz_arr, platepar.JD, \ platepar.lat, platepar.lon, platepar.refraction) # Filter out all horizon points outside the FOV filter_arr = np.degrees(angularSeparation(np.radians(alt_centre), np.radians(azim_centre), \ np.radians(alt_horiz_arr), np.radians(az_horiz_arr))) <= fov_radius ra_horiz_plot = ra_horiz_plot[filter_arr] dec_horiz_plot = dec_horiz_plot[filter_arr] # Compute image coordinates of the horizon x_horiz, y_horiz = raDecToXYPP(ra_horiz_plot, dec_horiz_plot, platepar.JD, platepar) # Filter out all horizon points outside the image filter_arr = (x_horiz >= 0) & (x_horiz <= platepar.X_res) & ( y_horiz >= 0) & (y_horiz <= platepar.Y_res) x_horiz = x_horiz[filter_arr] y_horiz = y_horiz[filter_arr] x.extend(x_horiz) y.extend(y_horiz) cuts.append(len(x) - 1) r = 15 # adjust this parameter if you see extraneous lines # disconnect lines that are distant (unfinished circles had straight lines completing them) for i in range(len(x) - 1): if (x[i] - x[i + 1])**2 + (y[i] - y[i + 1])**2 > r**2: cuts.append(i) # convert cuts into connect connect = np.full(len(x), 1) if len(connect) > 0: for i in cuts: connect[i] = 0 grid.setData(x=x, y=y, connect=connect)
def computeFlux(config, dir_path, ftpdetectinfo_path, shower_code, dt_beg, dt_end, timebin, mass_index, \ timebin_intdt=0.25, ht_std_percent=5.0, mask=None): """ Compute flux using measurements in the given FTPdetectinfo file. Arguments: config: [Config instance] dir_path: [str] Path to the working directory. ftpdetectinfo_path: [str] Path to a FTPdetectinfo file. shower_code: [str] IAU shower code (e.g. ETA, PER, SDA). dt_beg: [Datetime] Datetime object of the observation beginning. dt_end: [Datetime] Datetime object of the observation end. timebin: [float] Time bin in hours. mass_index: [float] Cumulative mass index of the shower. Keyword arguments: timebin_intdt: [float] Time step for computing the integrated collection area in hours. 15 minutes by default. If smaller than that, only one collection are will be computed. ht_std_percent: [float] Meteor height standard deviation in percent. mask: [Mask object] Mask object, None by default. """ # Get a list of files in the night folder file_list = sorted(os.listdir(dir_path)) # Find and load the platepar file if config.platepar_name in file_list: # Load the platepar platepar = Platepar.Platepar() platepar.read(os.path.join(dir_path, config.platepar_name), use_flat=config.use_flat) else: print("Cannot find the platepar file in the night directory: ", config.platepar_name) return None # # Load FTPdetectinfos # meteor_data = [] # for ftpdetectinfo_path in ftpdetectinfo_list: # if not os.path.isfile(ftpdetectinfo_path): # print('No such file:', ftpdetectinfo_path) # continue # meteor_data += readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) # Load meteor data from the FTPdetectinfo file meteor_data = readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) if not len(meteor_data): print("No meteors in the FTPdetectinfo file!") return None # Find and load recalibrated platepars if config.platepars_recalibrated_name in file_list: with open(os.path.join(dir_path, config.platepars_recalibrated_name)) as f: recalibrated_platepars_dict = json.load(f) print("Recalibrated platepars loaded!") # If the file is not available, apply the recalibration procedure else: recalibrated_platepars_dict = applyRecalibrate(ftpdetectinfo_path, config) print("Recalibrated platepar file not available!") print("Recalibrating...") # Convert the dictionary of recalibrated platepars to a dictionary of Platepar objects recalibrated_platepars = {} for ff_name in recalibrated_platepars_dict: pp = Platepar.Platepar() pp.loadFromDict(recalibrated_platepars_dict[ff_name], use_flat=config.use_flat) recalibrated_platepars[ff_name] = pp # Compute nighly mean of the photometric zero point mag_lev_nightly_mean = np.mean([recalibrated_platepars[ff_name].mag_lev \ for ff_name in recalibrated_platepars]) # Locate and load the mask file if config.mask_file in file_list: mask_path = os.path.join(dir_path, config.mask_file) mask = loadMask(mask_path) print("Using mask:", mask_path) else: print("No mask used!") mask = None # Compute the population index using the classical equation population_index = 10**((mass_index - 1)/2.5) ### SENSOR CHARACTERIZATION ### # Computes FWHM of stars and noise profile of the sensor # File which stores the sensor characterization profile sensor_characterization_file = "flux_sensor_characterization.json" sensor_characterization_path = os.path.join(dir_path, sensor_characterization_file) # Load sensor characterization file if present, so the procedure can be skipped if os.path.isfile(sensor_characterization_path): # Load the JSON file with open(sensor_characterization_path) as f: data = " ".join(f.readlines()) sensor_data = json.loads(data) # Remove the info entry if '-1' in sensor_data: del sensor_data['-1'] else: # Run sensor characterization sensor_data = sensorCharacterization(config, dir_path) # Save to file for posterior use with open(sensor_characterization_path, 'w') as f: # Add an explanation what each entry means sensor_data_save = dict(sensor_data) sensor_data_save['-1'] = {"FF file name": ['median star FWHM', 'median background noise stddev']} # Convert collection areas to JSON out_str = json.dumps(sensor_data_save, indent=4, sort_keys=True) # Save to disk f.write(out_str) # Compute the nighly mean FWHM and noise stddev fwhm_nightly_mean = np.mean([sensor_data[key][0] for key in sensor_data]) stddev_nightly_mean = np.mean([sensor_data[key][1] for key in sensor_data]) ### ### # Perform shower association associations, shower_counts = showerAssociation(config, [ftpdetectinfo_path], shower_code=shower_code, \ show_plot=False, save_plot=False, plot_activity=False) # If there are no shower association, return nothing if not associations: print("No meteors associated with the shower!") return None # Print the list of used meteors peak_mags = [] for key in associations: meteor, shower = associations[key] if shower is not None: # Compute peak magnitude peak_mag = np.min(meteor.mag_array) peak_mags.append(peak_mag) print("{:.6f}, {:3s}, {:+.2f}".format(meteor.jdt_ref, shower.name, peak_mag)) print() # Init the flux configuration flux_config = FluxConfig() ### COMPUTE COLLECTION AREAS ### # Make a file name to save the raw collection areas col_areas_file_name = generateColAreaJSONFileName(platepar.station_code, flux_config.side_points, \ flux_config.ht_min, flux_config.ht_max, flux_config.dht, flux_config.elev_limit) # Check if the collection area file exists. If yes, load the data. If not, generate collection areas if col_areas_file_name in os.listdir(dir_path): col_areas_ht = loadRawCollectionAreas(dir_path, col_areas_file_name) print("Loaded collection areas from:", col_areas_file_name) else: # Compute the collecting areas segments per height col_areas_ht = collectingArea(platepar, mask=mask, side_points=flux_config.side_points, \ ht_min=flux_config.ht_min, ht_max=flux_config.ht_max, dht=flux_config.dht, \ elev_limit=flux_config.elev_limit) # Save the collection areas to file saveRawCollectionAreas(dir_path, col_areas_file_name, col_areas_ht) print("Saved raw collection areas to:", col_areas_file_name) ### ### # Compute the pointing of the middle of the FOV _, ra_mid, dec_mid, _ = xyToRaDecPP([jd2Date(J2000_JD.days)], [platepar.X_res/2], [platepar.Y_res/2], \ [1], platepar, extinction_correction=False) azim_mid, elev_mid = raDec2AltAz(ra_mid[0], dec_mid[0], J2000_JD.days, platepar.lat, platepar.lon) # Compute the range to the middle point ref_ht = 100000 r_mid, _, _, _ = xyHt2Geo(platepar, platepar.X_res/2, platepar.Y_res/2, ref_ht, indicate_limit=True, \ elev_limit=flux_config.elev_limit) ### Compute the average angular velocity to which the flux variation throught the night will be normalized # The ang vel is of the middle of the FOV in the middle of observations # Middle Julian date of the night jd_night_mid = (datetime2JD(dt_beg) + datetime2JD(dt_end))/2 # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_night_mid) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_night_mid, platepar.lat, platepar.lon) # Compute the angular velocity in the middle of the FOV rad_dist_night_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_night_mid = v_init*np.sin(rad_dist_night_mid)/r_mid ### # Compute the average limiting magnitude to which all flux will be normalized # Standard deviation of star PSF, nightly mean (px) star_stddev = fwhm_nightly_mean/2.355 # Compute the theoretical stellar limiting magnitude (nightly average) star_sum = 2*np.pi*(config.k1_det*stddev_nightly_mean + config.j1_det)*star_stddev**2 lm_s_nightly_mean = -2.5*np.log10(star_sum) + mag_lev_nightly_mean # A meteor needs to be visible on at least 4 frames, thus it needs to have at least 4x the mass to produce # that amount of light. 1 magnitude difference scales as -0.4 of log of mass, thus: frame_min_loss = np.log10(config.line_minimum_frame_range_det)/(-0.4) lm_s_nightly_mean += frame_min_loss # Compute apparent meteor magnitude lm_m_nightly_mean = lm_s_nightly_mean - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_night_mid)/(config.fps*r_mid*fwhm_nightly_mean)) \ ) # print("Stellar lim mag using detection thresholds:", lm_s_nightly_mean) print("Apparent meteor limiting magnitude:", lm_m_nightly_mean) ### Apply time-dependent corrections ### sol_data = [] flux_lm_6_5_data = [] # Go through all time bins within the observation period total_time_hrs = (dt_end - dt_beg).total_seconds()/3600 nbins = int(np.ceil(total_time_hrs/timebin)) for t_bin in range(nbins): # Compute bin start and end time bin_dt_beg = dt_beg + datetime.timedelta(hours=timebin*t_bin) bin_dt_end = bin_dt_beg + datetime.timedelta(hours=timebin) if bin_dt_end > dt_end: bin_dt_end = dt_end # Compute bin duration in hours bin_hours = (bin_dt_end - bin_dt_beg).total_seconds()/3600 # Convert to Julian date bin_jd_beg = datetime2JD(bin_dt_beg) bin_jd_end = datetime2JD(bin_dt_end) # Only select meteors in this bin bin_meteors = [] bin_ffs = [] for key in associations: meteor, shower = associations[key] if shower is not None: if (shower.name == shower_code) and (meteor.jdt_ref > bin_jd_beg) \ and (meteor.jdt_ref <= bin_jd_end): bin_meteors.append([meteor, shower]) bin_ffs.append(meteor.ff_name) if len(bin_meteors) > 0: ### Compute the radiant elevation at the middle of the time bin ### jd_mean = (bin_jd_beg + bin_jd_end)/2 # Compute the mean solar longitude sol_mean = np.degrees(jd2SolLonSteyaert(jd_mean)) print() print() print("-- Bin information ---") print("Bin beg:", bin_dt_beg) print("Bin end:", bin_dt_end) print("Sol mid: {:.5f}".format(sol_mean)) print("Meteors:", len(bin_meteors)) # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_mean) # Compute the mean meteor height meteor_ht_beg = heightModel(v_init, ht_type='beg') meteor_ht_end = heightModel(v_init, ht_type='end') meteor_ht = (meteor_ht_beg + meteor_ht_end)/2 # Compute the standard deviation of the height meteor_ht_std = meteor_ht*ht_std_percent/100.0 # Init the Gaussian height distribution meteor_ht_gauss = scipy.stats.norm(meteor_ht, meteor_ht_std) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_mean, platepar.lat, platepar.lon) ### ### ### Weight collection area by meteor height distribution ### # Determine weights for each height weight_sum = 0 weights = {} for ht in col_areas_ht: wt = meteor_ht_gauss.pdf(float(ht)) weight_sum += wt weights[ht] = wt # Normalize the weights so that the sum is 1 for ht in weights: weights[ht] /= weight_sum ### ### # Compute the angular velocity in the middle of the FOV rad_dist_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_mid = v_init*np.sin(rad_dist_mid)/r_mid ### Compute the limiting magnitude ### # Compute the mean star FWHM in the given bin fwhm_bin_mean = np.mean([sensor_data[ff_name][0] for ff_name in bin_ffs]) # Compute the mean background stddev in the given bin stddev_bin_mean = np.mean([sensor_data[ff_name][1] for ff_name in bin_ffs]) # Compute the mean photometric zero point in the given bin mag_lev_bin_mean = np.mean([recalibrated_platepars[ff_name].mag_lev for ff_name in bin_ffs if ff_name in recalibrated_platepars]) # Standard deviation of star PSF, nightly mean (px) star_stddev = fwhm_bin_mean/2.355 # Compute the theoretical stellar limiting magnitude (nightly average) star_sum = 2*np.pi*(config.k1_det*stddev_bin_mean + config.j1_det)*star_stddev**2 lm_s = -2.5*np.log10(star_sum) + mag_lev_bin_mean lm_s += frame_min_loss # Compute apparent meteor magnitude lm_m = lm_s - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_mid)/(config.fps*r_mid*fwhm_bin_mean))\ ) ### ### # Final correction area value (height-weightned) collection_area = 0 # Go through all heights and segment blocks for ht in col_areas_ht: for img_coords in col_areas_ht[ht]: x_mean, y_mean = img_coords # Unpack precomputed values area, azim, elev, sensitivity_ratio, r = col_areas_ht[ht][img_coords] # Compute the angular velocity (rad/s) in the middle of this block rad_dist = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim), np.radians(elev)) ang_vel = v_init*np.sin(rad_dist)/r # Compute the range correction range_correction = (1e5/r)**2 #ang_vel_correction = ang_vel/ang_vel_mid # Compute angular velocity correction relative to the nightly mean ang_vel_correction = ang_vel/ang_vel_night_mid ### Apply corrections correction_ratio = 1.0 # Correct the area for vignetting and extinction correction_ratio *= sensitivity_ratio # Correct for the range correction_ratio *= range_correction # Correct for the radiant elevation correction_ratio *= np.sin(np.radians(radiant_elev)) # Correct for angular velocity correction_ratio *= ang_vel_correction # Add the collection area to the final estimate with the height weight # Raise the correction to the mass index power collection_area += weights[ht]*area*correction_ratio**(mass_index - 1) # Compute the flux at the bin LM (meteors/1000km^2/h) flux = 1e9*len(bin_meteors)/collection_area/bin_hours # Compute the flux scaled to the nightly mean LM flux_lm_nightly_mean = flux*population_index**(lm_m_nightly_mean - lm_m) # Compute the flux scaled to +6.5M flux_lm_6_5 = flux*population_index**(6.5 - lm_m) print("-- Sensor information ---") print("Star FWHM: {:5.2f} px".format(fwhm_bin_mean)) print("Bkg stddev: {:4.1f} ADU".format(stddev_bin_mean)) print("Photom ZP: {:+6.2f} mag".format(mag_lev_bin_mean)) print("Stellar LM: {:+.2f} mag".format(lm_s)) print("-- Flux ---") print("Col area: {:d} km^2".format(int(collection_area/1e6))) print("Ang vel: {:.2f} deg/s".format(np.degrees(ang_vel_mid))) print("LM app: {:+.2f} mag".format(lm_m)) print("Flux: {:.2f} meteors/1000km^2/h".format(flux)) print("to {:+.2f}: {:.2f} meteors/1000km^2/h".format(lm_m_nightly_mean, flux_lm_nightly_mean)) print("to +6.50: {:.2f} meteors/1000km^2/h".format(flux_lm_6_5)) sol_data.append(sol_mean) flux_lm_6_5_data.append(flux_lm_6_5) # Print the results print("Solar longitude, Flux at LM +6.5:") for sol, flux_lm_6_5 in zip(sol_data, flux_lm_6_5_data): print("{:9.5f}, {:8.4f}".format(sol, flux_lm_6_5)) # Plot a histogram of peak magnitudes plt.hist(peak_mags, cumulative=True) plt.show()
def computeFlux(config, dir_path, ftpdetectinfo_path, shower_code, dt_beg, dt_end, timebin, mass_index, \ timebin_intdt=0.25, ht_std_percent=5.0, mask=None, show_plots=True): """ Compute flux using measurements in the given FTPdetectinfo file. Arguments: config: [Config instance] dir_path: [str] Path to the working directory. ftpdetectinfo_path: [str] Path to a FTPdetectinfo file. shower_code: [str] IAU shower code (e.g. ETA, PER, SDA). dt_beg: [Datetime] Datetime object of the observation beginning. dt_end: [Datetime] Datetime object of the observation end. timebin: [float] Time bin in hours. mass_index: [float] Cumulative mass index of the shower. Keyword arguments: timebin_intdt: [float] Time step for computing the integrated collection area in hours. 15 minutes by default. If smaller than that, only one collection are will be computed. ht_std_percent: [float] Meteor height standard deviation in percent. mask: [Mask object] Mask object, None by default. show_plots: [bool] Show flux plots. True by default. Return: [tuple] sol_data, flux_lm_6_5_data - sol_data: [list] Array of solar longitudes (in degrees) of time bins. - flux_lm6_5_data: [list] Array of meteoroid flux at the limiting magnitude of +6.5 in meteors/1000km^2/h. """ # Get a list of files in the night folder file_list = sorted(os.listdir(dir_path)) # Find and load the platepar file if config.platepar_name in file_list: # Load the platepar platepar = Platepar.Platepar() platepar.read(os.path.join(dir_path, config.platepar_name), use_flat=config.use_flat) else: print("Cannot find the platepar file in the night directory: ", config.platepar_name) return None # # Load FTPdetectinfos # meteor_data = [] # for ftpdetectinfo_path in ftpdetectinfo_list: # if not os.path.isfile(ftpdetectinfo_path): # print('No such file:', ftpdetectinfo_path) # continue # meteor_data += readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) # Load meteor data from the FTPdetectinfo file meteor_data = readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) if not len(meteor_data): print("No meteors in the FTPdetectinfo file!") return None # Find and load recalibrated platepars if config.platepars_recalibrated_name in file_list: with open(os.path.join(dir_path, config.platepars_recalibrated_name)) as f: recalibrated_platepars_dict = json.load(f) print("Recalibrated platepars loaded!") # If the file is not available, apply the recalibration procedure else: recalibrated_platepars_dict = applyRecalibrate(ftpdetectinfo_path, config) print("Recalibrated platepar file not available!") print("Recalibrating...") # Convert the dictionary of recalibrated platepars to a dictionary of Platepar objects recalibrated_platepars = {} for ff_name in recalibrated_platepars_dict: pp = Platepar.Platepar() pp.loadFromDict(recalibrated_platepars_dict[ff_name], use_flat=config.use_flat) recalibrated_platepars[ff_name] = pp # Compute nighly mean of the photometric zero point mag_lev_nightly_mean = np.mean([recalibrated_platepars[ff_name].mag_lev \ for ff_name in recalibrated_platepars]) # Locate and load the mask file if config.mask_file in file_list: mask_path = os.path.join(dir_path, config.mask_file) mask = loadMask(mask_path) print("Using mask:", mask_path) else: print("No mask used!") mask = None # Compute the population index using the classical equation population_index = 10**((mass_index - 1)/2.5) # Found to be more consistent when comparing fluxes #population_index = 10**((mass_index - 1)/2.3) # TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!1 ### SENSOR CHARACTERIZATION ### # Computes FWHM of stars and noise profile of the sensor # File which stores the sensor characterization profile sensor_characterization_file = "flux_sensor_characterization.json" sensor_characterization_path = os.path.join(dir_path, sensor_characterization_file) # Load sensor characterization file if present, so the procedure can be skipped if os.path.isfile(sensor_characterization_path): # Load the JSON file with open(sensor_characterization_path) as f: data = " ".join(f.readlines()) sensor_data = json.loads(data) # Remove the info entry if '-1' in sensor_data: del sensor_data['-1'] else: # Run sensor characterization sensor_data = sensorCharacterization(config, dir_path) # Save to file for posterior use with open(sensor_characterization_path, 'w') as f: # Add an explanation what each entry means sensor_data_save = dict(sensor_data) sensor_data_save['-1'] = {"FF file name": ['median star FWHM', 'median background noise stddev']} # Convert collection areas to JSON out_str = json.dumps(sensor_data_save, indent=4, sort_keys=True) # Save to disk f.write(out_str) # Compute the nighly mean FWHM and noise stddev fwhm_nightly_mean = np.mean([sensor_data[key][0] for key in sensor_data]) stddev_nightly_mean = np.mean([sensor_data[key][1] for key in sensor_data]) ### ### # Perform shower association associations, _ = showerAssociation(config, [ftpdetectinfo_path], shower_code=shower_code, \ show_plot=False, save_plot=False, plot_activity=False) # Init the flux configuration flux_config = FluxConfig() # Remove all meteors which begin below the limit height filtered_associations = {} for key in associations: meteor, shower = associations[key] if meteor.beg_alt > flux_config.elev_limit: print("Rejecting:", meteor.jdt_ref) filtered_associations[key] = [meteor, shower] associations = filtered_associations # If there are no shower association, return nothing if not associations: print("No meteors associated with the shower!") return None # Print the list of used meteors peak_mags = [] for key in associations: meteor, shower = associations[key] if shower is not None: # Compute peak magnitude peak_mag = np.min(meteor.mag_array) peak_mags.append(peak_mag) print("{:.6f}, {:3s}, {:+.2f}".format(meteor.jdt_ref, shower.name, peak_mag)) print() ### COMPUTE COLLECTION AREAS ### # Make a file name to save the raw collection areas col_areas_file_name = generateColAreaJSONFileName(platepar.station_code, flux_config.side_points, \ flux_config.ht_min, flux_config.ht_max, flux_config.dht, flux_config.elev_limit) # Check if the collection area file exists. If yes, load the data. If not, generate collection areas if col_areas_file_name in os.listdir(dir_path): col_areas_ht = loadRawCollectionAreas(dir_path, col_areas_file_name) print("Loaded collection areas from:", col_areas_file_name) else: # Compute the collecting areas segments per height col_areas_ht = collectingArea(platepar, mask=mask, side_points=flux_config.side_points, \ ht_min=flux_config.ht_min, ht_max=flux_config.ht_max, dht=flux_config.dht, \ elev_limit=flux_config.elev_limit) # Save the collection areas to file saveRawCollectionAreas(dir_path, col_areas_file_name, col_areas_ht) print("Saved raw collection areas to:", col_areas_file_name) ### ### # Compute the raw collection area at the height of 100 km col_area_100km_raw = 0 col_areas_100km_blocks = col_areas_ht[100000.0] for block in col_areas_100km_blocks: col_area_100km_raw += col_areas_100km_blocks[block][0] print("Raw collection area at height of 100 km: {:.2f} km^2".format(col_area_100km_raw/1e6)) # Compute the pointing of the middle of the FOV _, ra_mid, dec_mid, _ = xyToRaDecPP([jd2Date(J2000_JD.days)], [platepar.X_res/2], [platepar.Y_res/2], \ [1], platepar, extinction_correction=False) azim_mid, elev_mid = raDec2AltAz(ra_mid[0], dec_mid[0], J2000_JD.days, platepar.lat, platepar.lon) # Compute the range to the middle point ref_ht = 100000 r_mid, _, _, _ = xyHt2Geo(platepar, platepar.X_res/2, platepar.Y_res/2, ref_ht, indicate_limit=True, \ elev_limit=flux_config.elev_limit) print("Range at 100 km in the middle of the image: {:.2f} km".format(r_mid/1000)) ### Compute the average angular velocity to which the flux variation throught the night will be normalized # The ang vel is of the middle of the FOV in the middle of observations # Middle Julian date of the night jd_night_mid = (datetime2JD(dt_beg) + datetime2JD(dt_end))/2 # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_night_mid) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_night_mid, platepar.lat, platepar.lon) # Compute the angular velocity in the middle of the FOV rad_dist_night_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_night_mid = v_init*np.sin(rad_dist_night_mid)/r_mid ### # Compute the average limiting magnitude to which all flux will be normalized # Standard deviation of star PSF, nightly mean (px) star_stddev = fwhm_nightly_mean/2.355 # # Compute the theoretical stellar limiting magnitude (nightly average) # star_sum = 2*np.pi*(config.k1_det*stddev_nightly_mean + config.j1_det)*star_stddev**2 # lm_s_nightly_mean = -2.5*np.log10(star_sum) + mag_lev_nightly_mean # Compute the theoretical stellar limiting magnitude using an empirical model (nightly average) lm_s_nightly_mean = stellarLMModel(mag_lev_nightly_mean) # A meteor needs to be visible on at least 4 frames, thus it needs to have at least 4x the mass to produce # that amount of light. 1 magnitude difference scales as -0.4 of log of mass, thus: # frame_min_loss = np.log10(config.line_minimum_frame_range_det)/(-0.4) frame_min_loss = 0.0 # TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!11 print("Frame min loss: {:.2} mag".format(frame_min_loss)) lm_s_nightly_mean += frame_min_loss # Compute apparent meteor magnitude lm_m_nightly_mean = lm_s_nightly_mean - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_night_mid)/(config.fps*r_mid*fwhm_nightly_mean)) \ ) # print("Stellar lim mag using detection thresholds:", lm_s_nightly_mean) print("Apparent meteor limiting magnitude:", lm_m_nightly_mean) ### Apply time-dependent corrections ### # Track values used for flux sol_data = [] flux_lm_6_5_data = [] meteor_num_data = [] effective_collection_area_data = [] radiant_elev_data = [] radiant_dist_mid_data = [] ang_vel_mid_data = [] lm_s_data = [] lm_m_data = [] sensitivity_corr_data = [] range_corr_data = [] radiant_elev_corr_data = [] ang_vel_corr_data = [] total_corr_data = [] # Go through all time bins within the observation period total_time_hrs = (dt_end - dt_beg).total_seconds()/3600 nbins = int(np.ceil(total_time_hrs/timebin)) for t_bin in range(nbins): for subbin in range(flux_config.sub_time_bins): # Compute bin start and end time bin_dt_beg = dt_beg + datetime.timedelta(hours=(timebin*t_bin + timebin*subbin/flux_config.sub_time_bins)) bin_dt_end = bin_dt_beg + datetime.timedelta(hours=timebin) if bin_dt_end > dt_end: bin_dt_end = dt_end # Compute bin duration in hours bin_hours = (bin_dt_end - bin_dt_beg).total_seconds()/3600 # Convert to Julian date bin_jd_beg = datetime2JD(bin_dt_beg) bin_jd_end = datetime2JD(bin_dt_end) jd_mean = (bin_jd_beg + bin_jd_end)/2 # Compute the mean solar longitude sol_mean = np.degrees(jd2SolLonSteyaert(jd_mean)) ### Compute the radiant elevation at the middle of the time bin ### # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_mean) # Compute the mean meteor height meteor_ht_beg = heightModel(v_init, ht_type='beg') meteor_ht_end = heightModel(v_init, ht_type='end') meteor_ht = (meteor_ht_beg + meteor_ht_end)/2 # Compute the standard deviation of the height meteor_ht_std = meteor_ht*ht_std_percent/100.0 # Init the Gaussian height distribution meteor_ht_gauss = scipy.stats.norm(meteor_ht, meteor_ht_std) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_mean, platepar.lat, platepar.lon) # Only select meteors in this bin and not too close to the radiant bin_meteors = [] bin_ffs = [] for key in associations: meteor, shower = associations[key] if shower is not None: if (shower.name == shower_code) and (meteor.jdt_ref > bin_jd_beg) \ and (meteor.jdt_ref <= bin_jd_end): # Filter out meteors ending too close to the radiant if np.degrees(angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), \ np.radians(meteor.end_azim), np.radians(meteor.end_alt))) >= flux_config.rad_dist_min: bin_meteors.append([meteor, shower]) bin_ffs.append(meteor.ff_name) ### ### print() print() print("-- Bin information ---") print("Bin beg:", bin_dt_beg) print("Bin end:", bin_dt_end) print("Sol mid: {:.5f}".format(sol_mean)) print("Radiant elevation: {:.2f} deg".format(radiant_elev)) print("Apparent speed: {:.2f} km/s".format(v_init/1000)) # If the elevation of the radiant is below the limit, skip this bin if radiant_elev < flux_config.rad_elev_limit: print("!!! Mean radiant elevation below {:.2f} deg threshold, skipping time bin!".format(flux_config.rad_elev_limit)) continue # The minimum duration of the time bin should be larger than 50% of the given dt if bin_hours < 0.5*timebin: print("!!! Time bin duration of {:.2f} h is shorter than 0.5x of the time bin!".format(bin_hours)) continue if len(bin_meteors) >= flux_config.meteros_min: print("Meteors:", len(bin_meteors)) ### Weight collection area by meteor height distribution ### # Determine weights for each height weight_sum = 0 weights = {} for ht in col_areas_ht: wt = meteor_ht_gauss.pdf(float(ht)) weight_sum += wt weights[ht] = wt # Normalize the weights so that the sum is 1 for ht in weights: weights[ht] /= weight_sum ### ### col_area_meteor_ht_raw = 0 for ht in col_areas_ht: for block in col_areas_ht[ht]: col_area_meteor_ht_raw += weights[ht]*col_areas_ht[ht][block][0] print("Raw collection area at meteor heights: {:.2f} km^2".format(col_area_meteor_ht_raw/1e6)) # Compute the angular velocity in the middle of the FOV rad_dist_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_mid = v_init*np.sin(rad_dist_mid)/r_mid ### Compute the limiting magnitude ### # Compute the mean star FWHM in the given bin fwhm_bin_mean = np.mean([sensor_data[ff_name][0] for ff_name in bin_ffs]) # Compute the mean background stddev in the given bin stddev_bin_mean = np.mean([sensor_data[ff_name][1] for ff_name in bin_ffs]) # Compute the mean photometric zero point in the given bin mag_lev_bin_mean = np.mean([recalibrated_platepars[ff_name].mag_lev for ff_name in bin_ffs if ff_name in recalibrated_platepars]) # # Standard deviation of star PSF, nightly mean (px) # star_stddev = fwhm_bin_mean/2.355 # Compute the theoretical stellar limiting magnitude (bin average) # star_sum = 2*np.pi*(config.k1_det*stddev_bin_mean + config.j1_det)*star_stddev**2 # lm_s = -2.5*np.log10(star_sum) + mag_lev_bin_mean # Use empirical LM calculation lm_s = stellarLMModel(mag_lev_bin_mean) lm_s += frame_min_loss # ### TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!11 # # Artificialy increase limiting magnitude # lm_s += 1.2 # ##### # Compute apparent meteor magnitude lm_m = lm_s - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_mid)/(config.fps*r_mid*fwhm_bin_mean))) ### ### # Final correction area value (height-weightned) collection_area = 0 # Keep track of the corrections sensitivity_corr_arr = [] range_corr_arr = [] radiant_elev_corr_arr = [] ang_vel_corr_arr = [] total_corr_arr = [] col_area_raw_arr = [] col_area_eff_arr = [] col_area_eff_block_dict = {} # Go through all heights and segment blocks for ht in col_areas_ht: for img_coords in col_areas_ht[ht]: x_mean, y_mean = img_coords # Unpack precomputed values area, azim, elev, sensitivity_ratio, r = col_areas_ht[ht][img_coords] # Compute the angular velocity (rad/s) in the middle of this block rad_dist = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim), np.radians(elev)) ang_vel = v_init*np.sin(rad_dist)/r # If the angular distance from the radiant is less than 15 deg, don't use the block # in the effective collection area if np.degrees(rad_dist) < flux_config.rad_dist_min: area = 0.0 # Compute the range correction range_correction = (1e5/r)**2 #ang_vel_correction = ang_vel/ang_vel_mid # Compute angular velocity correction relative to the nightly mean ang_vel_correction = ang_vel/ang_vel_night_mid ### Apply corrections correction_ratio = 1.0 # Correct the area for vignetting and extinction sensitivity_corr_arr.append(sensitivity_ratio) correction_ratio *= sensitivity_ratio # Correct for the range (cap to an order of magnitude correction) range_correction = max(range_correction, 0.1) range_corr_arr.append(range_correction) correction_ratio *= range_correction # Correct for the radiant elevation (cap to an order of magnitude correction) radiant_elev_correction = np.sin(np.radians(radiant_elev)) radiant_elev_correction = max(radiant_elev_correction, 0.1) radiant_elev_corr_arr.append(radiant_elev_correction) correction_ratio *= radiant_elev_correction # Correct for angular velocity (cap to an order of magnitude correction) ang_vel_correction = min(max(ang_vel_correction, 0.1), 10) correction_ratio *= ang_vel_correction ang_vel_corr_arr.append(ang_vel_correction) # Add the collection area to the final estimate with the height weight # Raise the correction to the mass index power total_correction = correction_ratio**(mass_index - 1) total_correction = min(max(total_correction, 0.1), 10) collection_area += weights[ht]*area*total_correction total_corr_arr.append(total_correction) col_area_raw_arr.append(weights[ht]*area) col_area_eff_arr.append(weights[ht]*area*total_correction) if img_coords not in col_area_eff_block_dict: col_area_eff_block_dict[img_coords] = [] col_area_eff_block_dict[img_coords].append(weights[ht]*area*total_correction) # Compute mean corrections sensitivity_corr_avg = np.mean(sensitivity_corr_arr) range_corr_avg = np.mean(range_corr_arr) radiant_elev_corr_avg = np.mean(radiant_elev_corr_arr) ang_vel_corr_avg = np.mean(ang_vel_corr_arr) total_corr_avg = np.median(total_corr_arr) col_area_raw_sum = np.sum(col_area_raw_arr) col_area_eff_sum = np.sum(col_area_eff_arr) print("Raw collection area at meteor heights (CHECK): {:.2f} km^2".format(col_area_raw_sum/1e6)) print("Eff collection area at meteor heights (CHECK): {:.2f} km^2".format(col_area_eff_sum/1e6)) # ### PLOT HOW THE CORRECTION VARIES ACROSS THE FOV # x_arr = [] # y_arr = [] # col_area_eff_block_arr = [] # for img_coords in col_area_eff_block_dict: # x_mean, y_mean = img_coords # #if x_mean not in x_arr: # x_arr.append(x_mean) # #if y_mean not in y_arr: # y_arr.append(y_mean) # col_area_eff_block_arr.append(np.sum(col_area_eff_block_dict[img_coords])) # x_unique = np.unique(x_arr) # y_unique = np.unique(y_arr) # # plt.pcolormesh(x_arr, y_arr, np.array(col_area_eff_block_arr).reshape(len(x_unique), len(y_unique)).T, shading='auto') # plt.title("TOTAL = " + str(np.sum(col_area_eff_block_arr)/1e6)) # plt.scatter(x_arr, y_arr, c=np.array(col_area_eff_block_arr)/1e6) # #plt.pcolor(np.array(x_arr).reshape(len(x_unique), len(y_unique)), np.array(y_arr).reshape(len(x_unique), len(y_unique)), np.array(col_area_eff_block_arr).reshape(len(x_unique), len(y_unique))/1e6) # plt.colorbar(label="km^2") # plt.gca().invert_yaxis() # plt.show() # ### # Compute the flux at the bin LM (meteors/1000km^2/h) flux = 1e9*len(bin_meteors)/collection_area/bin_hours # Compute the flux scaled to the nightly mean LM flux_lm_nightly_mean = flux*population_index**(lm_m_nightly_mean - lm_m) # Compute the flux scaled to +6.5M flux_lm_6_5 = flux*population_index**(6.5 - lm_m) print("-- Sensor information ---") print("Star FWHM: {:5.2f} px".format(fwhm_bin_mean)) print("Bkg stddev: {:4.1f} ADU".format(stddev_bin_mean)) print("Photom ZP: {:+6.2f} mag".format(mag_lev_bin_mean)) print("Stellar LM: {:+.2f} mag".format(lm_s)) print("-- Flux ---") print("Meteors: {:d}".format(len(bin_meteors))) print("Col area: {:d} km^2".format(int(collection_area/1e6))) print("Ang vel: {:.2f} deg/s".format(np.degrees(ang_vel_mid))) print("LM app: {:+.2f} mag".format(lm_m)) print("Flux: {:.2f} meteors/1000km^2/h".format(flux)) print("to {:+.2f}: {:.2f} meteors/1000km^2/h".format(lm_m_nightly_mean, flux_lm_nightly_mean)) print("to +6.50: {:.2f} meteors/1000km^2/h".format(flux_lm_6_5)) sol_data.append(sol_mean) flux_lm_6_5_data.append(flux_lm_6_5) meteor_num_data.append(len(bin_meteors)) effective_collection_area_data.append(collection_area) radiant_elev_data.append(radiant_elev) radiant_dist_mid_data.append(np.degrees(rad_dist_mid)) ang_vel_mid_data.append(np.degrees(ang_vel_mid)) lm_s_data.append(lm_s) lm_m_data.append(lm_m) sensitivity_corr_data.append(sensitivity_corr_avg) range_corr_data.append(range_corr_avg) radiant_elev_corr_data.append(radiant_elev_corr_avg) ang_vel_corr_data.append(ang_vel_corr_avg) total_corr_data.append(total_corr_avg) # Print the results print("Solar longitude, Flux at LM +6.5:") for sol, flux_lm_6_5 in zip(sol_data, flux_lm_6_5_data): print("{:9.5f}, {:8.4f}".format(sol, flux_lm_6_5)) if show_plots and len(sol_data): # Plot a histogram of peak magnitudes plt.hist(peak_mags, cumulative=True, log=True, bins=len(peak_mags), density=True) # Plot population index r_intercept = -0.7 x_arr = np.linspace(np.min(peak_mags), np.percentile(peak_mags, 60)) plt.plot(x_arr, 10**(np.log10(population_index)*x_arr + r_intercept)) plt.title("r = {:.2f}".format(population_index)) plt.show() # Plot how the derived values change throughout the night fig, axes \ = plt.subplots(nrows=4, ncols=2, sharex=True, figsize=(10, 8)) ((ax_met, ax_lm), (ax_rad_elev, ax_corrs), (ax_rad_dist, ax_col_area), (ax_ang_vel, ax_flux)) = axes fig.suptitle("{:s}, s = {:.2f}, r = {:.2f}".format(shower_code, mass_index, population_index)) ax_met.scatter(sol_data, meteor_num_data) ax_met.set_ylabel("Meteors") ax_rad_elev.plot(sol_data, radiant_elev_data) ax_rad_elev.set_ylabel("Radiant elev (deg)") ax_rad_dist.plot(sol_data, radiant_dist_mid_data) ax_rad_dist.set_ylabel("Radiant dist (deg)") ax_ang_vel.plot(sol_data, ang_vel_mid_data) ax_ang_vel.set_ylabel("Ang vel (deg/s)") ax_ang_vel.set_xlabel("La Sun (deg)") ax_lm.plot(sol_data, lm_s_data, label="Stellar") ax_lm.plot(sol_data, lm_m_data, label="Meteor") ax_lm.set_ylabel("LM") ax_lm.legend() ax_corrs.plot(sol_data, sensitivity_corr_data, label="Sensitivity") ax_corrs.plot(sol_data, range_corr_data, label="Range") ax_corrs.plot(sol_data, radiant_elev_corr_data, label="Rad elev") ax_corrs.plot(sol_data, ang_vel_corr_data, label="Ang vel") ax_corrs.plot(sol_data, total_corr_data, label="Total (median)") ax_corrs.set_ylabel("Corrections") ax_corrs.legend() ax_col_area.plot(sol_data, np.array(effective_collection_area_data)/1e6) ax_col_area.plot(sol_data, len(sol_data)*[col_area_100km_raw/1e6], color='k', \ label="Raw col area at 100 km") ax_col_area.plot(sol_data, len(sol_data)*[col_area_meteor_ht_raw/1e6], color='k', linestyle='dashed', \ label="Raw col area at met ht") ax_col_area.set_ylabel("Eff. col. area (km^2)") ax_col_area.legend() ax_flux.scatter(sol_data, flux_lm_6_5_data) ax_flux.set_ylabel("Flux@+6.5M (met/1000km^2/h)") ax_flux.set_xlabel("La Sun (deg)") plt.tight_layout() plt.show() return sol_data, flux_lm_6_5_data
def generateCalibrationReport(config, night_dir_path, match_radius=2.0, platepar=None, show_graphs=False): """ Given the folder of the night, find the Calstars file, check the star fit and generate a report with the quality of the calibration. The report contains information about both the astrometry and the photometry calibration. Graphs will be saved in the given directory of the night. Arguments: config: [Config instance] night_dir_path: [str] Full path to the directory of the night. Keyword arguments: match_radius: [float] Match radius for star matching between image and catalog stars (px). platepar: [Platepar instance] Use this platepar instead of finding one in the folder. show_graphs: [bool] Show the graphs on the screen. False by default. Return: None """ # Find the CALSTARS file in the given folder calstars_file = None for calstars_file in os.listdir(night_dir_path): if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file): break if calstars_file is None: print('CALSTARS file could not be found in the given directory!') return None # Load the calstars file star_list = readCALSTARS(night_dir_path, calstars_file) ### Load recalibrated platepars, if they exist ### # Find recalibrated platepars file per FF file platepars_recalibrated_file = None for file_name in os.listdir(night_dir_path): if file_name == config.platepars_recalibrated_name: platepars_recalibrated_file = file_name break # Load all recalibrated platepars if the file is available recalibrated_platepars = None if platepars_recalibrated_file: with open(os.path.join(night_dir_path, platepars_recalibrated_file)) as f: recalibrated_platepars = json.load(f) print( 'Loaded recalibrated platepars JSON file for the calibration report...' ) ### ### ### Load the platepar file ### # Find the platepar file in the given directory if it was not given if platepar is None: # Find the platepar file platepar_file = None for file_name in os.listdir(night_dir_path): if file_name == config.platepar_name: platepar_file = file_name break if platepar_file is None: print('The platepar cannot be found in the night directory!') return None # Load the platepar file platepar = Platepar() platepar.read(os.path.join(night_dir_path, platepar_file), use_flat=config.use_flat) ### ### night_name = os.path.split(night_dir_path.strip(os.sep))[1] # Go one mag deeper than in the config lim_mag = config.catalog_mag_limit + 1 # Load catalog stars (load one magnitude deeper) catalog_stars, mag_band_str, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(\ config.star_catalog_path, config.star_catalog_file, lim_mag=lim_mag, \ mag_band_ratios=config.star_catalog_band_ratios) ### Take only those CALSTARS entires for which FF files exist in the folder ### # Get a list of FF files in the folder ff_list = [] for file_name in os.listdir(night_dir_path): if validFFName(file_name): ff_list.append(file_name) # Filter out calstars entries, generate a star dictionary where the keys are JDs of FFs star_dict = {} ff_dict = {} for entry in star_list: ff_name, star_data = entry # Check if the FF from CALSTARS exists in the folder if ff_name not in ff_list: continue dt = getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True) jd = date2JD(*dt) # Add the time and the stars to the dict star_dict[jd] = star_data ff_dict[jd] = ff_name ### ### # If there are no FF files in the directory, don't generate a report if len(star_dict) == 0: print('No FF files from the CALSTARS file in the directory!') return None # If the recalibrated platepars file exists, take the one with the most stars max_jd = 0 using_recalib_platepars = False if recalibrated_platepars is not None: max_stars = 0 for ff_name_temp in recalibrated_platepars: # Compute the Julian date of the FF middle dt = getMiddleTimeFF(ff_name_temp, config.fps, ret_milliseconds=True) jd = date2JD(*dt) # Check that this file exists in CALSTARS and the list of FF files if (jd not in star_dict) or (jd not in ff_dict): continue # Check if the number of stars on this FF file is larger than the before if len(star_dict[jd]) > max_stars: max_jd = jd max_stars = len(star_dict[jd]) # Set a flag to indicate if using recalibrated platepars has failed if max_jd == 0: using_recalib_platepars = False else: print('Using recalibrated platepars, file:', ff_dict[max_jd]) using_recalib_platepars = True # Select the platepar where the FF file has the most stars platepar_dict = recalibrated_platepars[ff_dict[max_jd]] platepar = Platepar() platepar.loadFromDict(platepar_dict, use_flat=config.use_flat) filtered_star_dict = {max_jd: star_dict[max_jd]} # Match stars on the image with the stars in the catalog n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \ filtered_star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag) max_matched_stars = n_matched # Otherwise take the optimal FF file for evaluation if (recalibrated_platepars is None) or (not using_recalib_platepars): # If there are more than a set number of FF files to evaluate, choose only the ones with most stars on # the image if len(star_dict) > config.calstars_files_N: # Find JDs of FF files with most stars on them top_nstars_indices = np.argsort([len(x) for x in star_dict.values()])[::-1][:config.calstars_files_N \ - 1] filtered_star_dict = {} for i in top_nstars_indices: filtered_star_dict[list(star_dict.keys())[i]] = list( star_dict.values())[i] star_dict = filtered_star_dict # Match stars on the image with the stars in the catalog n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \ star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag) # If no recalibrated platepars where found, find the image with the largest number of matched stars if (not using_recalib_platepars) or (max_jd == 0): max_jd = 0 max_matched_stars = 0 for jd in matched_stars: _, _, distances = matched_stars[jd] if len(distances) > max_matched_stars: max_jd = jd max_matched_stars = len(distances) # If there are no matched stars, use the image with the largest number of detected stars if max_matched_stars <= 2: max_jd = max(star_dict, key=lambda x: len(star_dict[x])) distances = [np.inf] # Take the FF file with the largest number of matched stars ff_name = ff_dict[max_jd] # Load the FF file ff = readFF(night_dir_path, ff_name) img_h, img_w = ff.avepixel.shape dpi = 200 plt.figure(figsize=(ff.avepixel.shape[1] / dpi, ff.avepixel.shape[0] / dpi), dpi=dpi) # Take the average pixel img = ff.avepixel # Slightly adjust the levels img = Image.adjustLevels(img, np.percentile(img, 1.0), 1.3, np.percentile(img, 99.99)) plt.imshow(img, cmap='gray', interpolation='nearest') legend_handles = [] # Plot detected stars for img_star in star_dict[max_jd]: y, x, _, _ = img_star rect_side = 5 * match_radius square_patch = plt.Rectangle((x - rect_side/2, y - rect_side/2), rect_side, rect_side, color='g', \ fill=False, label='Image stars') plt.gca().add_artist(square_patch) legend_handles.append(square_patch) # If there are matched stars, plot them if max_matched_stars > 2: # Take the solution with the largest number of matched stars image_stars, matched_catalog_stars, distances = matched_stars[max_jd] # Plot matched stars for img_star in image_stars: x, y, _, _ = img_star circle_patch = plt.Circle((y, x), radius=3*match_radius, color='y', fill=False, \ label='Matched stars') plt.gca().add_artist(circle_patch) legend_handles.append(circle_patch) ### Plot match residuals ### # Compute preducted positions of matched image stars from the catalog x_predicted, y_predicted = raDecToXYPP(matched_catalog_stars[:, 0], \ matched_catalog_stars[:, 1], max_jd, platepar) img_y, img_x, _, _ = image_stars.T delta_x = x_predicted - img_x delta_y = y_predicted - img_y # Compute image residual and angle of the error res_angle = np.arctan2(delta_y, delta_x) res_distance = np.sqrt(delta_x**2 + delta_y**2) # Calculate coordinates of the beginning of the residual line res_x_beg = img_x + 3 * match_radius * np.cos(res_angle) res_y_beg = img_y + 3 * match_radius * np.sin(res_angle) # Calculate coordinates of the end of the residual line res_x_end = img_x + 100 * np.cos(res_angle) * res_distance res_y_end = img_y + 100 * np.sin(res_angle) * res_distance # Plot the 100x residuals for i in range(len(x_predicted)): res_plot = plt.plot([res_x_beg[i], res_x_end[i]], [res_y_beg[i], res_y_end[i]], color='orange', \ lw=0.5, label='100x residuals') legend_handles.append(res_plot[0]) ### ### else: distances = [np.inf] # If there are no matched stars, plot large text in the middle of the screen plt.text(img_w / 2, img_h / 2, "NO MATCHED STARS!", color='r', alpha=0.5, fontsize=20, ha='center', va='center') ### Plot positions of catalog stars to the limiting magnitude of the faintest matched star + 1 mag ### # Find the faintest magnitude among matched stars if max_matched_stars > 2: faintest_mag = np.max(matched_catalog_stars[:, 2]) + 1 else: # If there are no matched stars, use the limiting magnitude from config faintest_mag = config.catalog_mag_limit + 1 # Estimate RA,dec of the centre of the FOV _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(max_jd)], [platepar.X_res / 2], [platepar.Y_res / 2], [1], platepar) RA_c = RA_c[0] dec_c = dec_c[0] fov_radius = np.hypot(*computeFOVSize(platepar)) # Get stars from the catalog around the defined center in a given radius _, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c, fov_radius, faintest_mag) ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T # Compute image positions of all catalog stars that should be on the image x_catalog, y_catalog = raDecToXYPP(ra_catalog, dec_catalog, max_jd, platepar) # Filter all catalog stars outside the image temp_arr = np.c_[x_catalog, y_catalog, mag_catalog] temp_arr = temp_arr[temp_arr[:, 0] >= 0] temp_arr = temp_arr[temp_arr[:, 0] <= ff.avepixel.shape[1]] temp_arr = temp_arr[temp_arr[:, 1] >= 0] temp_arr = temp_arr[temp_arr[:, 1] <= ff.avepixel.shape[0]] x_catalog, y_catalog, mag_catalog = temp_arr.T # Plot catalog stars on the image cat_stars_handle = plt.scatter(x_catalog, y_catalog, c='none', marker='D', lw=1.0, alpha=0.4, \ s=((4.0 + (faintest_mag - mag_catalog))/3.0)**(2*2.512), edgecolor='r', label='Catalog stars') legend_handles.append(cat_stars_handle) ### ### # Add info text in the corner info_text = ff_dict[max_jd] + '\n' \ + "Matched stars within {:.1f} px radius: {:d}/{:d} \n".format(match_radius, max_matched_stars, \ len(star_dict[max_jd])) \ + "Median distance = {:.2f} px\n".format(np.median(distances)) \ + "Catalog lim mag = {:.1f}".format(lim_mag) plt.text(10, 10, info_text, bbox=dict(facecolor='black', alpha=0.5), va='top', ha='left', fontsize=4, \ color='w', family='monospace') legend = plt.legend(handles=legend_handles, prop={'size': 4}, loc='upper right') legend.get_frame().set_facecolor('k') legend.get_frame().set_edgecolor('k') for txt in legend.get_texts(): txt.set_color('w') ### Add FOV info (centre, size) ### # Mark FOV centre plt.scatter(platepar.X_res / 2, platepar.Y_res / 2, marker='+', s=20, c='r', zorder=4) # Compute FOV centre alt/az azim_centre, alt_centre = raDec2AltAz(max_jd, platepar.lon, platepar.lat, RA_c, dec_c) # Compute FOV size fov_h, fov_v = computeFOVSize(platepar) # Compute the rotation wrt. horizon rot_horizon = rotationWrtHorizon(platepar) fov_centre_text = "Azim = {:6.2f}$\\degree$\n".format(azim_centre) \ + "Alt = {:6.2f}$\\degree$\n".format(alt_centre) \ + "Rot h = {:6.2f}$\\degree$\n".format(rot_horizon) \ + "FOV h = {:6.2f}$\\degree$\n".format(fov_h) \ + "FOV v = {:6.2f}$\\degree$".format(fov_v) \ plt.text(10, platepar.Y_res - 10, fov_centre_text, bbox=dict(facecolor='black', alpha=0.5), \ va='bottom', ha='left', fontsize=4, color='w', family='monospace') ### ### # Plot RA/Dec gridlines # addEquatorialGrid(plt, platepar, max_jd) plt.axis('off') plt.gca().get_xaxis().set_visible(False) plt.gca().get_yaxis().set_visible(False) plt.xlim([0, ff.avepixel.shape[1]]) plt.ylim([ff.avepixel.shape[0], 0]) # Remove the margins plt.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0) plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_astrometry.jpg'), \ bbox_inches='tight', pad_inches=0, dpi=dpi) if show_graphs: plt.show() else: plt.clf() plt.close() if max_matched_stars > 2: ### PHOTOMETRY FIT ### # If a flat is used, set the vignetting coeff to 0 if config.use_flat: platepar.vignetting_coeff = 0.0 # Extact intensities and mangitudes star_intensities = image_stars[:, 2] catalog_mags = matched_catalog_stars[:, 2] # Compute radius of every star from image centre radius_arr = np.hypot(image_stars[:, 0] - img_h / 2, image_stars[:, 1] - img_w / 2) # Fit the photometry on automated star intensities (use the fixed vignetting coeff, use robust fit) photom_params, fit_stddev, fit_resid, star_intensities, radius_arr, catalog_mags = \ photometryFitRobust(star_intensities, radius_arr, catalog_mags, \ fixed_vignetting=platepar.vignetting_coeff) photom_offset, _ = photom_params ### ### ### PLOT PHOTOMETRY ### # Note: An almost identical code exists in RMS.Astrometry.SkyFit in the PlateTool.photometry function dpi = 130 fig_p, (ax_p, ax_r) = plt.subplots(nrows=2, facecolor=None, figsize=(6.0, 7.0), dpi=dpi, \ gridspec_kw={'height_ratios':[2, 1]}) # Plot raw star intensities ax_p.scatter(-2.5 * np.log10(star_intensities), catalog_mags, s=5, c='r', alpha=0.5, label="Raw") # If a flat is used, disregard the vignetting if not config.use_flat: # Plot intensities of image stars corrected for vignetting lsp_corr_arr = np.log10(correctVignetting(star_intensities, radius_arr, \ platepar.vignetting_coeff)) ax_p.scatter(-2.5*lsp_corr_arr, catalog_mags, s=5, c='b', alpha=0.5, \ label="Corrected for vignetting") # Plot photometric offset from the platepar x_min, x_max = ax_p.get_xlim() y_min, y_max = ax_p.get_ylim() x_min_w = x_min - 3 x_max_w = x_max + 3 y_min_w = y_min - 3 y_max_w = y_max + 3 photometry_info = "Platepar: {:+.1f}*LSP + {:.2f} +/- {:.2f}".format(platepar.mag_0, \ platepar.mag_lev, platepar.mag_lev_stddev) \ + "\nVignetting coeff = {:.5f}".format(platepar.vignetting_coeff) \ + "\nGamma = {:.2f}".format(platepar.gamma) # Plot the photometry calibration from the platepar logsum_arr = np.linspace(x_min_w, x_max_w, 10) ax_p.plot(logsum_arr, logsum_arr + platepar.mag_lev, label=photometry_info, linestyle='--', \ color='k', alpha=0.5) # Plot the fitted photometry calibration fit_info = "Fit: {:+.1f}*LSP + {:.2f} +/- {:.2f}".format( -2.5, photom_offset, fit_stddev) ax_p.plot(logsum_arr, logsum_arr + photom_offset, label=fit_info, linestyle='--', color='b', alpha=0.75) ax_p.legend() ax_p.set_ylabel("Catalog magnitude ({:s})".format(mag_band_str)) ax_p.set_xlabel("Uncalibrated magnitude") # Set wider axis limits ax_p.set_xlim(x_min_w, x_max_w) ax_p.set_ylim(y_min_w, y_max_w) ax_p.invert_yaxis() ax_p.invert_xaxis() ax_p.grid() ### Plot photometry vs radius ### img_diagonal = np.hypot(img_h / 2, img_w / 2) # Plot photometry residuals (including vignetting) ax_r.scatter(radius_arr, fit_resid, c='b', alpha=0.75, s=5, zorder=3) # Plot a zero line ax_r.plot(np.linspace(0, img_diagonal, 10), np.zeros(10), linestyle='dashed', alpha=0.5, \ color='k') # Plot only when no flat is used if not config.use_flat: # Plot radius from centre vs. fit residual fit_resids_novignetting = catalog_mags - photomLine((np.array(star_intensities), \ np.array(radius_arr)), photom_offset, 0.0) ax_r.scatter(radius_arr, fit_resids_novignetting, s=5, c='r', alpha=0.5, zorder=3) px_sum_tmp = 1000 radius_arr_tmp = np.linspace(0, img_diagonal, 50) # Plot vignetting loss curve vignetting_loss = 2.5*np.log10(px_sum_tmp) \ - 2.5*np.log10(correctVignetting(px_sum_tmp, radius_arr_tmp, \ platepar.vignetting_coeff)) ax_r.plot(radius_arr_tmp, vignetting_loss, linestyle='dotted', alpha=0.5, color='k') ax_r.grid() ax_r.set_ylabel("Fit residuals (mag)") ax_r.set_xlabel("Radius from centre (px)") ax_r.set_xlim(0, img_diagonal) ### ### plt.tight_layout() plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_photometry.png'), dpi=150) if show_graphs: plt.show() else: plt.clf() plt.close()
def add_fffits_metadata(ff_filename, config, platepars_recalibrated, fallback_platepar): """ Add FITS metadata and WCS to FF files generated by RMS Args: ff_filename (str): full or relative path to FF file config (RMS.Config): config instance platepars_recalibrated (dict): dictionary with recalibrated platepars fallback_platepar (RMS.Platepar): platepar with fitted stars Returns: None """ ff_basename = os.path.basename(ff_filename) platepar_recalibrated = Platepar() try: platepar_data = platepars_recalibrated[ff_basename] with open("platepar_tmp.cal", "w") as f: json.dump(platepar_data, f) platepar_recalibrated.read("platepar_tmp.cal") except (FileNotFoundError, KeyError): platepar_recalibrated = fallback_platepar logger.warning(f"Using non-recalibrated platepar for {ff_basename}") fftime = getMiddleTimeFF(ff_basename, config.fps) fit_xy = np.array(fallback_platepar.star_list)[:, 1:3] _, fit_ra, fit_dec, _ = xyToRaDecPP([fftime] * len(fit_xy), fit_xy[:, 0], fit_xy[:, 1], [1] * len(fit_xy), platepar_recalibrated, extinction_correction=False) x0 = platepar_recalibrated.X_res / 2 y0 = platepar_recalibrated.Y_res / 2 _, ra0, dec0, _ = xyToRaDecPP([fftime], [x0], [y0], [1], platepar_recalibrated, extinction_correction=False) w = fit_wcs(fit_xy[:, 0], fit_xy[:, 1], fit_ra, fit_dec, x0, y0, ra0[0], dec0[0], 5, projection="ZEA") hdu_list = fits.open(ff_filename, scale_back=True) obstime = Time(filenameToDatetime(ff_basename)) header_meta = {} header_meta["OBSERVER"] = config.stationID.strip() header_meta["INSTRUME"] = "Global Meteor Network" header_meta["MJD-OBS"] = obstime.mjd header_meta["DATE-OBS"] = obstime.fits header_meta["NFRAMES"] = 256 header_meta["EXPTIME"] = 256 / config.fps header_meta["SITELONG"] = round(config.longitude, 2) header_meta["SITELAT"] = round(config.latitude, 2) for hdu in hdu_list: if hdu.header[ "NAXIS"] == 0: # First header is not an image so should not get WCS new_header = Header() else: new_header = w.to_fits(relax=True)[0].header for key, value in header_meta.items(): new_header.append((key, value)) for key, value in new_header.items(): if key in hdu.header: continue hdu.header[key] = value hdu_list.writeto(ff_filename, overwrite=True)