Пример #1
0
def plotFieldsums(dir_path, config):
    """ Plots a graph of all intensity sums from FS*.bin files in the given directory. 
    
    Arguments:
        dir_path: [str] Path to the directory which containes the FS*.bin files.
        config: [Config structure] Configuration.

    Return:
        None
    """

    time_data = []
    intensity_data_peak = []
    intensity_data_avg = []

    # Get all fieldsum files in the directory
    for file_name in sorted(os.listdir(dir_path)):

        # Check if it is the fieldsum file
        if ('FS' in file_name) and ('_fieldsum.bin' in file_name):

            # Read the field sums
            _, intensity_array = readFieldIntensitiesBin(dir_path, file_name)

            # Extract the date and time from the FF file
            dt = filenameToDatetime(file_name)

            # Take the peak intensity value
            intensity_data_peak.append(np.max(intensity_array))

            # Take the average intensity value
            intensity_data_avg.append(np.mean(intensity_array))

            time_data.append(dt)

    # If there are no fieldsums, do nothing
    if not time_data:
        return False

    ### Plot the raw intensity over time ###
    ##########################################################################################################

    # Plot peak intensitites
    plt.plot(time_data,
             intensity_data_peak,
             color='r',
             linewidth=0.5,
             zorder=3,
             label='Peak')

    # Plot average intensitites
    plt.plot(time_data,
             intensity_data_avg,
             color='k',
             linewidth=0.5,
             zorder=3,
             label='Average')

    plt.gca().set_yscale('log')

    plt.xlim(np.min(time_data), np.max(time_data))
    plt.ylim(np.min(intensity_data_avg), np.max(intensity_data_peak))

    plt.xlabel('Time')
    plt.ylabel('ADU')

    # Rotate x ticks so they do not overlap
    plt.xticks(rotation=30)

    plt.grid(color='0.9', which='both')

    plt.title('Peak field sums for ' + os.path.basename(dir_path))

    plt.tight_layout()

    plt.legend()

    plt.savefig(os.path.join(dir_path, str(config.stationID) + '_' + os.path.basename(dir_path) \
        + '_fieldsums.png'), dpi=300)

    plt.clf()
    plt.close()

    ##########################################################################################################

    ### Plot intensities without the average value
    ##########################################################################################################

    intensity_data_peak = np.array(intensity_data_peak)
    intensity_data_avg = np.array(intensity_data_avg)

    # Calculate the difference between the peak values and the average values per every FF file
    intensity_data_noavg = intensity_data_peak - intensity_data_avg

    plt.plot(time_data,
             intensity_data_noavg,
             color='k',
             linewidth=0.5,
             zorder=3)

    plt.gca().set_yscale('log')

    plt.xlim(np.min(time_data), np.max(time_data))

    plt.xlabel('Time')
    plt.ylabel('Peak ADU - average')

    # Rotate x ticks so they do not overlap
    plt.xticks(rotation=30)

    plt.grid(color='0.9', which='both')

    plt.title('Deaveraged field sums for ' + os.path.basename(dir_path))

    plt.tight_layout()


    plt.savefig(os.path.join(dir_path, str(config.stationID) + '_' + os.path.basename(dir_path) \
        + '_fieldsums_noavg.png'), dpi=300)

    plt.clf()
    plt.close()
Пример #2
0
def applyPlateparToCentroids(ff_name,
                             fps,
                             meteor_meas,
                             platepar,
                             add_calstatus=False):
    """ Given the meteor centroids and a platepar file, compute meteor astrometry and photometry (RA/Dec,
        alt/az, mag).
    Arguments:
        ff_name: [str] Name of the FF file with the meteor.
        fps: [float] Frames per second of the video.
        meteor_meas: [list] A list of [calib_status, frame_n, x, y, ra, dec, azim, elev, inten, mag].
        platepar: [Platepar instance] Platepar which will be used for astrometry and photometry.
    Keyword arguments:
        add_calstatus: [bool] Add a column with calibration status at the beginning. False by default.
    Return:
        meteor_picks: [ndarray] A numpy 2D array of: [frames, X_data, Y_data, RA_data, dec_data, az_data,
        alt_data, level_data, magnitudes]
    """

    meteor_meas = np.array(meteor_meas)

    # Add a line which is indicating the calibration status
    if add_calstatus:
        meteor_meas = np.c_[np.ones((meteor_meas.shape[0], 1)), meteor_meas]

    # Remove all entries where levels are equal to or smaller than 0, unless all are zero
    level_data = meteor_meas[:, 8]
    if np.any(level_data):
        meteor_meas = meteor_meas[level_data > 0, :]

    # Extract frame number, x, y, intensity
    frames = meteor_meas[:, 1]
    X_data = meteor_meas[:, 2]
    Y_data = meteor_meas[:, 3]
    level_data = meteor_meas[:, 8]

    # Get the beginning time of the FF file
    time_beg = filenameToDatetime(ff_name)

    # Calculate time data of every point
    time_data = []
    for frame_n in frames:
        t = time_beg + datetime.timedelta(seconds=frame_n / fps)
        time_data.append([
            t.year, t.month, t.day, t.hour, t.minute, t.second,
            int(t.microsecond / 1000)
        ])

    # Convert image cooredinates to RA and Dec, and do the photometry
    JD_data, RA_data, dec_data, magnitudes = xyToRaDecPP(np.array(time_data), X_data, Y_data, \
        level_data, platepar)

    # Compute azimuth and altitude of centroids
    az_data = np.zeros_like(RA_data)
    alt_data = np.zeros_like(RA_data)

    for i in range(len(az_data)):

        jd = JD_data[i]
        ra_tmp = RA_data[i]
        dec_tmp = dec_data[i]

        # Alt and az are kept in the J2000 epoch, which is the CAMS standard!
        az_tmp, alt_tmp = trueRaDec2ApparentAltAz(ra_tmp, dec_tmp, jd,
                                                  platepar.lat, platepar.lon)

        az_data[i] = az_tmp
        alt_data[i] = alt_tmp

    # print(ff_name, cam_code, meteor_No, fps)
    # print(X_data, Y_data)
    # print(RA_data, dec_data)
    # print('------------------------------------------')

    # Construct the meteor measurements array
    meteor_picks = np.c_[frames, X_data, Y_data, RA_data, dec_data, az_data, alt_data, level_data, \
        magnitudes]

    return meteor_picks
Пример #3
0
def generateTimelapse(dir_path, nodel):

    t1 = datetime.datetime.utcnow()

    # Load the font for labeling
    try:
        font = ImageFont.truetype("/usr/share/fonts/dejavu/DejaVuSans.ttf", 18)
    except:
        font = ImageFont.load_default()

    # Create temporary directory
    dir_tmp_path = os.path.join(dir_path, "temp_img_dir")

    if os.path.exists(dir_tmp_path):
        shutil.rmtree(dir_tmp_path)
        print("Deleted directory : " + dir_tmp_path)

    mkdirP(dir_tmp_path)
    print("Created directory : " + dir_tmp_path)

    print("Preparing files for the timelapse...")
    c = 0

    ff_list = [
        ff_name for ff_name in sorted(os.listdir(dir_path))
        if validFFName(ff_name)
    ]

    for file_name in ff_list:

        # Read the FF file
        ff = readFF(dir_path, file_name)

        # Skip the file if it could not be read
        if ff is None:
            continue

        # Get the timestamp from the FF name
        timestamp = filenameToDatetime(file_name).strftime("%Y-%m-%d %H:%M:%S")

        # Get id cam from the file name
        # e.g.  FF499_20170626_020520_353_0005120.bin
        # or FF_CA0001_20170626_020520_353_0005120.fits

        file_split = file_name.split('_')

        # Check the number of list elements, and the new fits format has one more underscore
        i = 0
        if len(file_split[0]) == 2:
            i = 1
        camid = file_split[i]

        # Make a filename for the image, continuous count %04d
        img_file_name = 'temp_{:04d}.jpg'.format(c)

        img = ff.maxpixel

        # Draw text to image
        font = cv2.FONT_HERSHEY_SIMPLEX
        text = camid + " " + timestamp + " UTC"
        cv2.putText(img, text, (10, ff.nrows - 6), font, 0.4, (255, 255, 255),
                    1, cv2.LINE_AA)

        # Save the labelled image to disk
        cv2.imwrite(os.path.join(dir_tmp_path, img_file_name), img,
                    [cv2.IMWRITE_JPEG_QUALITY, 100])

        c = c + 1

        # Print elapsed time
        if c % 30 == 0:
            print("{:>5d}/{:>5d}, Elapsed: {:s}".format(c + 1, len(ff_list), \
                str(datetime.datetime.utcnow() - t1)), end="\r")
            sys.stdout.flush()

    # If running on Linux, use avconv
    if platform.system() == 'Linux':

        # If avconv is not found, try using ffmpeg. In case of using ffmpeg,
        # use parameter -nostdin to avoid it being stuck waiting for user input
        software_name = "avconv"
        nostdin = ""
        print("Checking if avconv is available...")
        if os.system(software_name + " --help > /dev/null"):
            software_name = "ffmpeg"
            nostdin = " -nostdin "

        # Construct the command for avconv
        mp4_path = os.path.join(dir_path, os.path.basename(dir_path) + ".mp4")
        temp_img_path = os.path.basename(
            dir_tmp_path) + os.sep + "temp_%04d.jpg"
        com = "cd " + dir_path + ";" \
            + software_name + nostdin + " -v quiet -r "+ str(fps) +" -y -i " + temp_img_path \
            + " -vcodec libx264 -pix_fmt yuv420p -crf 25 -movflags faststart -g 15 -vf \"hqdn3d=4:3:6:4.5,lutyuv=y=gammaval(0.77)\" " \
            + mp4_path

        print("Creating timelapse using {:s}...".format(software_name))
        print(com)
        subprocess.call([com], shell=True)

    # If running on Windows, use ffmpeg.exe
    elif platform.system() == 'Windows':

        # ffmpeg.exe path
        root = os.path.dirname(__file__)
        ffmpeg_path = os.path.join(root, "ffmpeg.exe")

        # Construct the ecommand for ffmpeg
        mp4_path = os.path.basename(dir_path) + ".mp4"
        temp_img_path = os.path.join(os.path.basename(dir_tmp_path),
                                     "temp_%04d.jpg")
        com = ffmpeg_path + " -v quiet -r " + str(
            fps
        ) + " -i " + temp_img_path + " -c:v libx264 -pix_fmt yuv420p -an -crf 25 -g 15 -vf \"hqdn3d=4:3:6:4.5,lutyuv=y=gammaval(0.77)\" -movflags faststart -y " + mp4_path

        print("Creating timelapse using ffmpeg...")
        print(com)
        subprocess.call(com, shell=True, cwd=dir_path)

    else:
        print(
            "generateTimelapse only works on Linux or Windows the video could not be encoded"
        )

    #Delete temporary directory and files inside
    if os.path.exists(dir_tmp_path) and not nodel:
        shutil.rmtree(dir_tmp_path)
        print("Deleted temporary directory : " + dir_tmp_path)

    print("Total time:", datetime.datetime.utcnow() - t1)
Пример #4
0
def FFtoFrames(file_path,
               out_dir,
               file_format,
               deinterlace_mode,
               first_frame=0,
               last_frame=255):
    #########################

    # Load the configuration file
    config = cr.parse(".config")

    # Read the deinterlace
    #   -1 - no deinterlace
    #    0 - odd first
    #    1 - even first

    if deinterlace_mode not in (-1, 0, 1):
        print('Unknown deinterlace mode:', deinterlace_mode)
        sys.exit()

    # Check if the file exists
    if not os.path.isfile(file_path):

        print('The file {:s} does not exist!'.format(file_path))
        sys.exit()

    # Check if the output directory exists, make it if it doesn't
    if not os.path.exists(out_dir):

        print('Making directory: out_dir')
        mkdirP(out_dir)

    # Open the FF file
    dir_path, file_name = os.path.split(file_path)
    ff = readFF(dir_path, file_name)

    # Take the FPS from the FF file, if available
    if hasattr(ff, 'fps'):
        fps = ff.fps

    # Take the FPS from the config file, if it was not given as an argument
    if fps is None:
        fps = config.fps

    # Try to read the number of frames from the FF file itself
    if ff.nframes > 0:
        nframes = ff.nframes

    else:
        nframes = 256

    # Construct a file name for saving
    if file_format == 'pngm':

        # If the METAL type PNG file is given, make the file name 'dump'
        file_name_saving = 'dump'

    else:

        file_name_saving = file_name.replace('.fits', '').replace('.bin', '')

    frame_name_time_list = []

    # Get the initial time of the FF file
    ff_dt = filenameToDatetime(file_name)

    # Go through all frames
    for i in range(first_frame, last_frame + 1):
        # Reconstruct individual frames

        frame = reconstructFrame(ff, i, avepixel=True)
        # Deinterlace the frame if necessary, odd first
        if deinterlace_mode == 0:

            frame_odd = deinterlaceOdd(frame)
            frame_name, frame_dt = saveFrame(frame_odd,
                                             i,
                                             out_dir,
                                             file_name_saving,
                                             file_format,
                                             ff_dt,
                                             fps,
                                             half_frame=0)
            frame_name_time_list.append([frame_name, frame_dt])

            frame_even = deinterlaceEven(frame)
            frame_name, frame_dt = saveFrame(frame_even,
                                             i,
                                             out_dir,
                                             file_name_saving,
                                             file_format,
                                             ff_dt,
                                             fps,
                                             half_frame=1)
            frame_name_time_list.append([frame_name, frame_dt])

        # Even first
        elif deinterlace_mode == 1:

            frame_even = deinterlaceEven(frame)
            frame_name, frame_dt = saveFrame(frame_even,
                                             i,
                                             out_dir,
                                             file_name_saving,
                                             file_format,
                                             ff_dt,
                                             fps,
                                             half_frame=0)
            frame_name_time_list.append([frame_name, frame_dt])

            frame_odd = deinterlaceOdd(frame)
            frame_name, frame_dt = saveFrame(frame_odd,
                                             i,
                                             out_dir,
                                             file_name_saving,
                                             file_format,
                                             ff_dt,
                                             fps,
                                             half_frame=1)
            frame_name_time_list.append([frame_name, frame_dt])

        # No deinterlace
        else:
            frame_name, frame_dt = saveFrame(frame, i - first_frame, out_dir,
                                             file_name_saving, file_format,
                                             ff_dt, fps)
            frame_name_time_list.append([frame_name, frame_dt])

    # If the frames are saved for METAL, the times have to be given in a separate file
    if file_format == 'pngm':

        with open(os.path.join(out_dir, 'frtime.txt'), 'w') as f:

            # Write all frames and times in a file
            for frame_name, frame_dt in frame_name_time_list:
                # 20180117:01:08:29.8342
                f.write('{:s} {:s}\n'.format(
                    frame_name, frame_dt.strftime("%Y%m%d:%H:%M:%S.%f")))

    return frame_name_time_list
Пример #5
0
def applyAstrometryFTPdetectinfo(dir_path,
                                 ftp_detectinfo_file,
                                 platepar_file,
                                 UT_corr=0):
    """ Use the given platepar to calculate the celestial coordinates of detected meteors from a FTPdetectinfo
        file and save the updates values.

    Arguments:
        dir_path: [str] Path to the night.
        ftp_detectinfo_file: [str] Name of the FTPdetectinfo file.
        platepar_file: [str] Name of the platepar file.

    Keyword arguments:
        UT_corr: [float] Difference of time from UTC in hours.

    Return:
        None
    """

    # Save a copy of the uncalibrated FTPdetectinfo
    ftp_detectinfo_copy = "".join(
        ftp_detectinfo_file.split('.')[:-1]) + "_uncalibrated.txt"

    # Back up the original FTPdetectinfo, only if a backup does not exist already
    if not os.path.isfile(os.path.join(dir_path, ftp_detectinfo_copy)):
        shutil.copy2(os.path.join(dir_path, ftp_detectinfo_file),
                     os.path.join(dir_path, ftp_detectinfo_copy))

    # Load the platepar
    platepar = Platepar()
    platepar.read(os.path.join(dir_path, platepar_file))

    # Load the FTPdetectinfo file
    meteor_data = readFTPdetectinfo(dir_path, ftp_detectinfo_file)

    # List for final meteor data
    meteor_list = []

    # Go through every meteor
    for meteor in meteor_data:

        ff_name, cam_code, meteor_No, n_segments, fps, hnr, mle, binn, px_fm, rho, phi, meteor_meas = meteor

        meteor_meas = np.array(meteor_meas)

        # Extract frame number, x, y, intensity
        frames = meteor_meas[:, 1]
        X_data = meteor_meas[:, 2]
        Y_data = meteor_meas[:, 3]
        level_data = meteor_meas[:, 8]

        # Get the beginning time of the FF file
        time_beg = filenameToDatetime(ff_name)

        # Calculate time data of every point
        time_data = []
        for frame_n in frames:
            t = time_beg + datetime.timedelta(seconds=frame_n / fps)
            time_data.append([
                t.year, t.month, t.day, t.hour, t.minute, t.second,
                int(t.microsecond / 1000)
            ])

        # Convert image cooredinates to RA and Dec, and do the photometry
        JD_data, RA_data, dec_data, magnitudes = XY2CorrectedRADecPP(np.array(time_data), X_data, Y_data, \
            level_data, platepar)

        # Compute azimuth and altitude of centroids
        az_data = np.zeros_like(RA_data)
        alt_data = np.zeros_like(RA_data)

        for i in range(len(az_data)):

            jd = JD_data[i]
            ra_tmp = RA_data[i]
            dec_tmp = dec_data[i]

            az_tmp, alt_tmp = raDec2AltAz(jd, platepar.lon, platepar.lat,
                                          ra_tmp, dec_tmp)

            az_data[i] = az_tmp
            alt_data[i] = alt_tmp

        # print(ff_name, cam_code, meteor_No, fps)
        # print(X_data, Y_data)
        # print(RA_data, dec_data)
        # print('------------------------------------------')

        # Construct the meteor measurements array
        meteor_picks = np.c_[frames, X_data, Y_data, RA_data, dec_data, az_data, alt_data, level_data, \
            magnitudes]

        # Add the calculated values to the final list
        meteor_list.append([ff_name, meteor_No, rho, phi, meteor_picks])

    # Calibration string to be written to the FTPdetectinfo file
    calib_str = 'Calibrated with RMS on: ' + str(
        datetime.datetime.utcnow()) + ' UTC'

    # If no meteors were detected, set dummpy parameters
    if len(meteor_list) == 0:
        cam_code = ''
        fps = 0

    # Save the updated FTPdetectinfo
    writeFTPdetectinfo(meteor_list,
                       dir_path,
                       ftp_detectinfo_file,
                       dir_path,
                       cam_code,
                       fps,
                       calibration=calib_str,
                       celestial_coords_given=True)
Пример #6
0
def plotFieldsums(dir_path, config):
    """ Plots a graph of all intensity sums from FS*.bin files in the given directory. 
    
    Arguments:
        dir_path: [str] Path to the directory which containes the FS*.bin files.
        config: [Config structure] Configuration.

    Return:
        None
    """

    time_data = []
    intensity_data_peak = []
    intensity_data_avg = []

    # Get all fieldsum files in the directory
    for file_name in sorted(os.listdir(dir_path)):

        # Check if it is the fieldsum file
        if ('FS' in file_name) and ('_fieldsum.bin' in file_name):

            # Try reading the intensities sum, because the file might be corrupted
            try:
                # Read the field sums
                _, intensity_array = readFieldIntensitiesBin(dir_path, file_name)

            except TypeError:
                print('File {:s} is corrupted!'.format(file_name))

            # Extract the date and time from the FF file
            dt = filenameToDatetime(file_name)

            # Take the peak intensity value
            intensity_data_peak.append(np.max(intensity_array))

            # Take the average intensity value
            intensity_data_avg.append(np.mean(intensity_array))


            time_data.append(dt)


    # If there are no fieldsums, do nothing
    if not time_data:
        return False


    ### Plot the raw intensity over time ###
    ##########################################################################################################

    plt.figure()
    
    # Plot peak intensitites
    plt.plot(time_data, intensity_data_peak, color='r', linewidth=0.5, zorder=3, label='Peak')

    # Plot average intensitites
    plt.plot(time_data, intensity_data_avg, color='k', linewidth=0.5, zorder=3, label='Average')

    plt.gca().set_yscale('log')

    plt.xlim(np.min(time_data), np.max(time_data))
    plt.ylim(np.min(intensity_data_avg), np.max(intensity_data_peak))

    plt.xlabel('Time')
    plt.ylabel('ADU')

    # Rotate x ticks so they do not overlap
    plt.xticks(rotation=30)

    plt.grid(color='0.9', which='both')

    plt.title('Peak field sums for ' + os.path.basename(dir_path))

    plt.tight_layout()

    plt.legend()

    plt.savefig(os.path.join(dir_path, str(config.stationID) + '_' + os.path.basename(dir_path) \
        + '_fieldsums.png'), dpi=300)

    plt.clf()
    plt.close()

    ##########################################################################################################


    ### Plot intensities without the average value
    ##########################################################################################################

    intensity_data_peak = np.array(intensity_data_peak)
    intensity_data_avg = np.array(intensity_data_avg)

    # Calculate the difference between the peak values and the average values per every FF file
    intensity_data_noavg = intensity_data_peak - intensity_data_avg

    plt.figure()

    plt.plot(time_data, intensity_data_noavg, color='k', linewidth=0.5, zorder=3)

    plt.gca().set_yscale('log')

    plt.xlim(np.min(time_data), np.max(time_data))

    plt.xlabel('Time')
    plt.ylabel('Peak ADU - average')

    # Rotate x ticks so they do not overlap
    plt.xticks(rotation=30)

    plt.grid(color='0.9', which='both')

    plt.title('Deaveraged field sums for ' + os.path.basename(dir_path))


    plt.tight_layout()


    plt.savefig(os.path.join(dir_path, str(config.stationID) + '_' + os.path.basename(dir_path) \
        + '_fieldsums_noavg.png'), dpi=300)

    plt.clf()
    plt.close()
Пример #7
0
    # Construct a file name for saving
    if file_format == 'pngm':

        # If the METAL type PNG file is given, make the file name 'dump'
        file_name_saving = 'dump'

    else:

        file_name_saving = file_name.replace('.fits', '').replace('.bin', '')


    frame_name_time_list = []

    # Get the initial time of the FF file
    ff_dt = filenameToDatetime(file_name)

    # Go through all frames
    for i in range(nframes):

        # Reconstruct individual frames
        frame = reconstructFrame(ff, i, avepixel=True)

        # Deinterlace the frame if necessary, odd first
        if deinterlace_mode == 0:

            frame_odd = deinterlaceOdd(frame)
            frame_name, frame_dt = saveFrame(frame_odd, i, out_dir, file_name_saving, file_format, ff_dt, fps, half_frame=0)
            frame_name_time_list.append([frame_name, frame_dt])

            frame_even = deinterlaceEven(frame)
Пример #8
0
def generateMP4s(dir_path, ftpfile_name):
    t1 = datetime.datetime.utcnow()

    # Load the font for labeling
    try:
        font = ImageFont.truetype("/usr/share/fonts/dejavu/DejaVuSans.ttf", 18)
    except:
        font = ImageFont.load_default()

    print("Preparing files for the timelapse...")
    # load the ftpfile so we know which frames we want
    meteor_list = FTPdetectinfo.readFTPdetectinfo(dir_path, ftpfile_name)
    for meteor in meteor_list:
        ff_name, _, _, n_segments, _, _, _, _, _, _, _, \
            meteor_meas = meteor
        # determine which frames we want

        first_frame = int(meteor_meas[0][1]) - 30
        last_frame = first_frame + 60
        if first_frame < 0:
            first_frame = 0
        if (n_segments > 1):
            lastseg = int(n_segments) - 1
            last_frame = int(meteor_meas[lastseg][1]) + 30
        #if last_frame > 255 :
        #    last_frame = 255
        if last_frame < first_frame + 60:
            last_frame = first_frame + 60

        print(ff_name, ' frames ', first_frame, last_frame)

        # Read the FF file
        ff = readFF(dir_path, ff_name)

        # Skip the file if it could not be read
        if ff is None:
            continue

        # Create temporary directory
        dir_tmp_path = os.path.join(dir_path, "temp_img_dir")

        if os.path.exists(dir_tmp_path):
            shutil.rmtree(dir_tmp_path)
            print("Deleted directory : " + dir_tmp_path)

        mkdirP(dir_tmp_path)
        print("Created directory : " + dir_tmp_path)

        # extract the individual frames
        f2f.FFtoFrames(dir_path + '/' + ff_name, dir_tmp_path, 'jpg', -1,
                       first_frame, last_frame)

        # Get the timestamp from the FF name
        timestamp = filenameToDatetime(ff_name).strftime("%Y-%m-%d %H:%M:%S")

        # Get id cam from the file name
        # e.g.  FF499_20170626_020520_353_0005120.bin
        # or FF_CA0001_20170626_020520_353_0005120.fits

        file_split = ff_name.split('_')

        # Check the number of list elements, and the new fits format has one more underscore
        i = 0
        if len(file_split[0]) == 2:
            i = 1
        camid = file_split[i]

        # add datestamp to each frame
        jpg_list = [jpg_name for jpg_name in sorted(os.listdir(dir_tmp_path))]
        for img_file_name in jpg_list:
            img = cv2.imread(os.path.join(dir_tmp_path, img_file_name))

            # Draw text to image
            font = cv2.FONT_HERSHEY_SIMPLEX
            text = camid + " " + timestamp + " UTC"
            cv2.putText(img, text, (10, ff.nrows - 6), font, 0.4,
                        (255, 255, 255), 1, cv2.LINE_AA)

            # Save the labelled image to disk
            cv2.imwrite(os.path.join(dir_tmp_path, img_file_name), img,
                        [cv2.IMWRITE_JPEG_QUALITY, 100])

        ffbasename = os.path.splitext(ff_name)[0]
        mp4_path = ffbasename + ".mp4"
        temp_img_path = os.path.join(dir_tmp_path, ffbasename + "_%03d.jpg")

        # If running on Windows, use ffmpeg.exe
        if platform.system() == 'Windows':

            # ffmpeg.exe path
            root = os.path.dirname(__file__)
            ffmpeg_path = os.path.join(root, "ffmpeg.exe")
            # Construct the ecommand for ffmpeg
            com = ffmpeg_path + " -y -f image2 -pattern_type sequence -i " + temp_img_path + " " + mp4_path
            print("Creating timelapse using ffmpeg...")
        else:
            # If avconv is not found, try using ffmpeg
            software_name = "avconv"
            print("Checking if avconv is available...")
            if os.system(software_name + " --help > /dev/null"):
                software_name = "ffmpeg"
                # Construct the ecommand for ffmpeg
                com = software_name + " -y -f image2 -pattern_type sequence -i " + temp_img_path + " " + mp4_path
                print("Creating timelapse using ffmpeg...")
            else:
                print("Creating timelapse using avconv...")
                com = "cd " + dir_path + ";" \
                    + software_name + " -v quiet -r 30 -y -i " + temp_img_path \
                    + " -vcodec libx264 -pix_fmt yuv420p -crf 25 -movflags faststart -g 15 -vf \"hqdn3d=4:3:6:4.5,lutyuv=y=gammaval(0.97)\" " \
                    + mp4_path

        #print(com)
        subprocess.call(com, shell=True, cwd=dir_path)

        #Delete temporary directory and files inside
        if os.path.exists(dir_tmp_path):
            try:
                shutil.rmtree(dir_tmp_path)
            except:
                # may occasionally fail due to ffmpeg thread still terminating
                # so catch this and wait a bit
                time.sleep(2)
                shutil.rmtree(dir_tmp_path)

            print("Deleted temporary directory : " + dir_tmp_path)

    print("Total time:", datetime.datetime.utcnow() - t1)
Пример #9
0
    print("Preparing files for the timelapse...")
    c = 0

    ff_list = [ff_name for ff_name in sorted(os.listdir(dir_path)) if validFFName(ff_name)]

    for file_name in ff_list:

        # Read the FF file
        ff = readFF(dir_path, file_name)

        # Skip the file if it could not be read
        if ff is None:
            continue

        # Get the timestamp from the FF name
        timestamp = filenameToDatetime(file_name).strftime("%Y-%m-%d %H:%M:%S")
		
        # Get id cam from the file name
            # e.g.  FF499_20170626_020520_353_0005120.bin
            # or FF_CA0001_20170626_020520_353_0005120.fits

        file_split = file_name.split('_')

        # Check the number of list elements, and the new fits format has one more underscore
        i = 0
        if len(file_split[0]) == 2:
            i = 1
        camid = file_split[i]

        # Make a filename for the image, continuous count %04d
        img_file_name = 'temp_{:04d}.jpg'.format(c)
Пример #10
0
def showerAssociation(config, ftpdetectinfo_list, shower_code=None, show_plot=False, save_plot=False, \
    plot_activity=False):
    """ Do single station shower association based on radiant direction and height. 
    
    Arguments:
        config: [Config instance]
        ftpdetectinfo_list: [list] A list of paths to FTPdetectinfo files.

    Keyword arguments:
        shower_code: [str] Only use this one shower for association (e.g. ETA, PER, SDA). None by default,
            in which case all active showers will be associated.
        show_plot: [bool] Show the plot on the screen. False by default.
        save_plot: [bool] Save the plot in the folder with FTPdetectinfos. False by default.
        plot_activity: [bool] Whether to plot the shower activity plot of not. False by default.

    Return:
        associations, shower_counts: [tuple]
            - associations: [dict] A dictionary where the FF name and the meteor ordinal number on the FF
                file are keys, and the associated Shower object are values.
            - shower_counts: [list] A list of shower code and shower count pairs.
    """

    # Load the list of meteor showers
    shower_list = loadShowers(config.shower_path, config.shower_file_name)

    # Load FTPdetectinfos
    meteor_data = []
    for ftpdetectinfo_path in ftpdetectinfo_list:

        if not os.path.isfile(ftpdetectinfo_path):
            print('No such file:', ftpdetectinfo_path)
            continue

        meteor_data += readFTPdetectinfo(*os.path.split(ftpdetectinfo_path))

    if not len(meteor_data):
        return {}, []

    # Dictionary which holds FF names as keys and meteor measurements + associated showers as values
    associations = {}

    for meteor in meteor_data:

        ff_name, cam_code, meteor_No, n_segments, fps, hnr, mle, binn, px_fm, rho, phi, meteor_meas = meteor

        # Skip very short meteors
        if len(meteor_meas) < 4:
            continue

        # Check if the data is calibrated
        if not meteor_meas[0][0]:
            print(
                'Data is not calibrated! Meteors cannot be associated to showers!'
            )
            break

        # Init container for meteor observation
        meteor_obj = MeteorSingleStation(cam_code, config.latitude,
                                         config.longitude, ff_name)

        # Infill the meteor structure
        for entry in meteor_meas:

            calib_status, frame_n, x, y, ra, dec, azim, elev, inten, mag = entry

            # Compute the Julian data of every point
            jd = datetime2JD(
                filenameToDatetime(ff_name) +
                datetime.timedelta(seconds=float(frame_n) / fps))

            meteor_obj.addPoint(jd, ra, dec, mag)

        # Fit the great circle and compute the geometrical parameters
        meteor_obj.fitGC()

        # Skip all meteors with beginning heights below 15 deg
        if meteor_obj.beg_alt < 15:
            continue

        # Go through all showers in the list and find the best match
        best_match_shower = None
        best_match_dist = np.inf
        for shower_entry in shower_list:

            # Extract shower parameters
            shower = Shower(shower_entry)

            # If the shower code was given, only check this one shower
            if shower_code is not None:
                if shower.name.lower() != shower_code.lower():
                    continue

            ### Solar longitude filter

            # If the shower doesn't have a stated beginning or end, check if the meteor is within a preset
            # threshold solar longitude difference
            if np.any(np.isnan([shower.lasun_beg, shower.lasun_end])):

                shower.lasun_beg = (shower.lasun_max -
                                    config.shower_lasun_threshold) % 360
                shower.lasun_end = (shower.lasun_max +
                                    config.shower_lasun_threshold) % 360

            # Filter out all showers which are not active
            if not isAngleBetween(np.radians(shower.lasun_beg),
                                  np.radians(meteor_obj.lasun),
                                  np.radians(shower.lasun_end)):

                continue

            ### ###

            ### Radiant filter ###

            # Assume a fixed meteor height for an approximate apparent radiant
            meteor_fixed_ht = 100000  # 100 km
            shower.computeApparentRadiant(config.latitude, config.longitude, meteor_obj.jdt_ref, \
                meteor_fixed_ht=meteor_fixed_ht)

            # Compute the angle between the meteor radiant and the great circle normal
            radiant_separation = meteor_obj.angularSeparationFromGC(
                shower.ra, shower.dec)

            # Make sure the meteor is within the radiant distance threshold
            if radiant_separation > config.shower_max_radiant_separation:
                continue

            # Compute angle between the meteor's beginning and end, and the shower radiant
            shower.radiant_vector = vectNorm(
                raDec2Vector(shower.ra, shower.dec))
            begin_separation = np.degrees(angularSeparationVect(shower.radiant_vector, \
                meteor_obj.meteor_begin_cartesian))
            end_separation = np.degrees(angularSeparationVect(shower.radiant_vector, \
                meteor_obj.meteor_end_cartesian))

            # Make sure the beginning of the meteor is closer to the radiant than it's end
            if begin_separation > end_separation:
                continue

            ### ###

            ### Height filter ###

            # Estimate the limiting meteor height from the velocity (meters)
            filter_beg_ht = heightModel(shower.v_init, ht_type='beg')
            filter_end_ht = heightModel(shower.v_init, ht_type='end')

            ### Estimate the meteor beginning height with +/- 1 frame, otherwise some short meteor may get
            ###   rejected

            meteor_obj_orig = copy.deepcopy(meteor_obj)

            # Shorter
            meteor_obj_m1 = copy.deepcopy(meteor_obj_orig)
            meteor_obj_m1.duration -= 1.0 / config.fps
            meteor_beg_ht_m1 = estimateMeteorHeight(config, meteor_obj_m1,
                                                    shower)

            # Nominal
            meteor_beg_ht = estimateMeteorHeight(config, meteor_obj_orig,
                                                 shower)

            # Longer
            meteor_obj_p1 = copy.deepcopy(meteor_obj_orig)
            meteor_obj_p1.duration += 1.0 / config.fps
            meteor_beg_ht_p1 = estimateMeteorHeight(config, meteor_obj_p1,
                                                    shower)

            meteor_obj = meteor_obj_orig

            ### ###

            # If all heights (even those with +/- 1 frame) are outside the height range, reject the meteor
            if ((meteor_beg_ht_p1 < filter_end_ht) or (meteor_beg_ht_p1 > filter_beg_ht)) and \
                ((meteor_beg_ht    < filter_end_ht) or (meteor_beg_ht    > filter_beg_ht)) and \
                ((meteor_beg_ht_m1 < filter_end_ht) or (meteor_beg_ht_m1 > filter_beg_ht)):

                continue

            ### ###

            # Compute the radiant elevation above the horizon
            shower.azim, shower.elev = raDec2AltAz(shower.ra, shower.dec, meteor_obj.jdt_ref, \
                config.latitude, config.longitude)

            # Take the shower that's closest to the great circle if there are multiple candidates
            if radiant_separation < best_match_dist:
                best_match_dist = radiant_separation
                best_match_shower = copy.deepcopy(shower)

        # If a shower is given and the match is not this shower, skip adding the meteor to the list
        # If no specific shower is give for association, add all meteors
        if ((shower_code is not None) and
            (best_match_shower is not None)) or (shower_code is None):

            # Store the associated shower
            associations[(ff_name,
                          meteor_No)] = [meteor_obj, best_match_shower]

    # Find shower frequency and sort by count
    shower_name_list_temp = []
    shower_list_temp = []
    for key in associations:
        _, shower = associations[key]

        if shower is None:
            shower_name = '...'
        else:
            shower_name = shower.name

        shower_name_list_temp.append(shower_name)
        shower_list_temp.append(shower)

    _, unique_showers_indices = np.unique(shower_name_list_temp,
                                          return_index=True)
    unique_shower_names = np.array(
        shower_name_list_temp)[unique_showers_indices]
    unique_showers = np.array(shower_list_temp)[unique_showers_indices]
    shower_counts = [[shower_obj, shower_name_list_temp.count(shower_name)] for shower_obj, \
        shower_name in zip(unique_showers, unique_shower_names)]
    shower_counts = sorted(shower_counts, key=lambda x: x[1], reverse=True)

    # Create a plot of showers
    if show_plot or save_plot:
        # Generate consistent colours
        colors_by_name = makeShowerColors(shower_list)

        def get_shower_color(shower):
            try:
                return colors_by_name[shower.name] if shower else "0.4"
            except KeyError:
                return 'gray'

        # Init the figure
        plt.figure()

        # Init subplots depending on if the activity plot is done as well
        if plot_activity:
            gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
            ax_allsky = plt.subplot(gs[0], facecolor='black')
            ax_activity = plt.subplot(gs[1], facecolor='black')
        else:
            ax_allsky = plt.subplot(111, facecolor='black')

        # Init the all-sky plot
        allsky_plot = AllSkyPlot(ax_handle=ax_allsky)

        # Plot all meteors
        for key in associations:

            meteor_obj, shower = associations[key]

            ### Plot the observed meteor points ###
            color = get_shower_color(shower)
            allsky_plot.plot(meteor_obj.ra_array,
                             meteor_obj.dec_array,
                             color=color,
                             linewidth=1,
                             zorder=4)

            # Plot the peak of shower meteors a different color
            peak_color = 'blue'
            if shower is not None:
                peak_color = 'tomato'

            allsky_plot.scatter(meteor_obj.ra_array[-1], meteor_obj.dec_array[-1], c=peak_color, marker='+', \
                s=5, zorder=5)

            ### ###

            ### Plot fitted great circle points ###

            # Find the GC phase angle of the beginning of the meteor
            gc_beg_phase = meteor_obj.findGCPhase(
                meteor_obj.ra_array[0], meteor_obj.dec_array[0])[0] % 360

            # If the meteor belongs to a shower, find the GC phase which ends at the shower
            if shower is not None:
                gc_end_phase = meteor_obj.findGCPhase(shower.ra,
                                                      shower.dec)[0] % 360

                # Fix 0/360 wrap
                if abs(gc_end_phase - gc_beg_phase) > 180:
                    if gc_end_phase > gc_beg_phase:
                        gc_end_phase -= 360
                    else:
                        gc_beg_phase -= 360

                gc_alpha = 1.0

            else:

                # If it's a sporadic, find the direction to which the meteor should extend
                gc_end_phase = meteor_obj.findGCPhase(meteor_obj.ra_array[-1], \
                    meteor_obj.dec_array[-1])[0]%360

                # Find the correct direction
                if (gc_beg_phase - gc_end_phase) % 360 > (gc_end_phase -
                                                          gc_beg_phase) % 360:
                    gc_end_phase = gc_beg_phase - 170

                else:
                    gc_end_phase = gc_beg_phase + 170

                gc_alpha = 0.7

            # Store great circle beginning and end phase
            meteor_obj.gc_beg_phase = gc_beg_phase
            meteor_obj.gc_end_phase = gc_end_phase

            # Get phases 180 deg before the meteor
            phase_angles = np.linspace(gc_end_phase, gc_beg_phase, 100) % 360

            # Compute RA/Dec of points on the great circle
            ra_gc, dec_gc = meteor_obj.sampleGC(phase_angles)

            # Cull all points below the horizon
            azim_gc, elev_gc = raDec2AltAz(ra_gc, dec_gc, meteor_obj.jdt_ref, config.latitude, \
                config.longitude)
            temp_arr = np.c_[ra_gc, dec_gc]
            temp_arr = temp_arr[elev_gc > 0]
            ra_gc, dec_gc = temp_arr.T

            # Plot the great circle fitted on the radiant
            gc_color = get_shower_color(shower)
            allsky_plot.plot(ra_gc,
                             dec_gc,
                             linestyle='dotted',
                             color=gc_color,
                             alpha=gc_alpha,
                             linewidth=1)

            # Plot the point closest to the shower radiant
            if shower is not None:
                allsky_plot.plot(ra_gc[0],
                                 dec_gc[0],
                                 color='r',
                                 marker='+',
                                 ms=5,
                                 mew=1)

                # Store shower radiant point
                meteor_obj.radiant_ra = ra_gc[0]
                meteor_obj.radiant_dec = dec_gc[0]

            ### ###

        ### Plot all showers ###

        # Find unique showers and their apparent radiants computed at highest radiant elevation
        # (otherwise the apparent radiants can be quite off)
        shower_dict = {}
        for key in associations:
            meteor_obj, shower = associations[key]

            if shower is None:
                continue

            # If the shower name is in dict, find the shower with the highest radiant elevation
            if shower.name in shower_dict:
                if shower.elev > shower_dict[shower.name].elev:
                    shower_dict[shower.name] = shower

            else:
                shower_dict[shower.name] = shower

        # Plot the location of shower radiants
        for shower_name in shower_dict:

            shower = shower_dict[shower_name]

            heading_arr = np.linspace(0, 360, 50)

            # Compute coordinates on a circle around the given RA, Dec
            ra_circle, dec_circle = sphericalPointFromHeadingAndDistance(shower.ra, shower.dec, \
                heading_arr, config.shower_max_radiant_separation)

            # Plot the shower circle
            allsky_plot.plot(ra_circle,
                             dec_circle,
                             color=colors_by_name[shower_name])

            # Plot the shower name
            x_text, y_text = allsky_plot.raDec2XY(shower.ra, shower.dec)
            allsky_plot.ax.text(x_text, y_text, shower.name, color='w', size=8, va='center', \
                ha='center', zorder=6)

        # Plot station name and solar longiutde range
        allsky_plot.ax.text(-180,
                            89,
                            "{:s}".format(cam_code),
                            color='w',
                            family='monospace')

        # Get a list of JDs of meteors
        jd_list = [associations[key][0].jdt_ref for key in associations]

        if len(jd_list):

            # Get the range of solar longitudes
            jd_min = min(jd_list)
            sol_min = np.degrees(jd2SolLonSteyaert(jd_min))
            jd_max = max(jd_list)
            sol_max = np.degrees(jd2SolLonSteyaert(jd_max))

            # Plot the date and solar longitude range
            date_sol_beg = u"Beg: {:s} (sol = {:.2f}\u00b0)".format(
                jd2Date(jd_min, dt_obj=True).strftime("%Y%m%d %H:%M:%S"),
                sol_min)
            date_sol_end = u"End: {:s} (sol = {:.2f}\u00b0)".format(
                jd2Date(jd_max, dt_obj=True).strftime("%Y%m%d %H:%M:%S"),
                sol_max)

            allsky_plot.ax.text(-180,
                                85,
                                date_sol_beg,
                                color='w',
                                family='monospace')
            allsky_plot.ax.text(-180,
                                81,
                                date_sol_end,
                                color='w',
                                family='monospace')
            allsky_plot.ax.text(-180,
                                77,
                                "-" * len(date_sol_end),
                                color='w',
                                family='monospace')

            # Plot shower counts
            for i, (shower, count) in enumerate(shower_counts):

                if shower is not None:
                    shower_name = shower.name
                else:
                    shower_name = "..."

                allsky_plot.ax.text(-180, 73 - i*4, "{:s}: {:d}".format(shower_name, count), color='w', \
                    family='monospace')

            ### ###

            # Plot yearly meteor shower activity
            if plot_activity:

                # Plot the activity diagram
                generateActivityDiagram(config, shower_list, ax_handle=ax_activity, \
                    sol_marker=[sol_min, sol_max], colors=colors_by_name)

        # Save plot and text file
        if save_plot:

            dir_path, ftpdetectinfo_name = os.path.split(ftpdetectinfo_path)
            ftpdetectinfo_base_name = ftpdetectinfo_name.replace(
                'FTPdetectinfo_', '').replace('.txt', '')
            plot_name = ftpdetectinfo_base_name + '_radiants.png'

            # Increase figure size
            allsky_plot.fig.set_size_inches(18, 9, forward=True)

            allsky_plot.beautify()

            plt.savefig(os.path.join(dir_path, plot_name),
                        dpi=100,
                        facecolor='k')

            # Save the text file with shower info
            if len(jd_list):
                with open(
                        os.path.join(dir_path, ftpdetectinfo_base_name +
                                     "_radiants.txt"), 'w') as f:

                    # Print station code
                    f.write("# RMS single station association\n")
                    f.write("# \n")
                    f.write("# Station: {:s}\n".format(cam_code))

                    # Print date range
                    f.write(
                        "#                    Beg          |            End            \n"
                    )
                    f.write(
                        "#      -----------------------------------------------------\n"
                    )
                    f.write("# Date | {:24s} | {:24s} \n".format(jd2Date(jd_min, \
                        dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f"), jd2Date(jd_max, \
                        dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f")))
                    f.write("# Sol  | {:>24.2f} | {:>24.2f} \n".format(
                        sol_min, sol_max))

                    # Write shower counts
                    f.write("# \n")
                    f.write("# Shower counts:\n")
                    f.write("# --------------\n")
                    f.write("# Code, Count, IAU link\n")

                    for i, (shower, count) in enumerate(shower_counts):

                        if shower is not None:
                            shower_name = shower.name

                            # Create link to the IAU database of showers
                            iau_link = "https://www.ta3.sk/IAUC22DB/MDC2007/Roje/pojedynczy_obiekt.php?kodstrumienia={:05d}".format(
                                shower.iau_code)

                        else:
                            shower_name = "..."
                            iau_link = "None"

                        f.write("# {:>4s}, {:>5d}, {:s}\n".format(
                            shower_name, count, iau_link))

                    f.write("# \n")
                    f.write("# Meteor parameters:\n")
                    f.write("# ------------------\n")
                    f.write(
                        "#          Date And Time,      Beg Julian date,     La Sun, Shower, RA beg, Dec beg, RA end, Dec end, RA rad, Dec rad, GC theta0,  GC phi0, GC beg phase, GC end phase,  Mag\n"
                    )

                    # Create a sorted list of meteor associations by time
                    associations_list = [
                        associations[key] for key in associations
                    ]
                    associations_list = sorted(associations_list,
                                               key=lambda x: x[0].jdt_ref)

                    # Write out meteor parameters
                    for meteor_obj, shower in associations_list:

                        # Find peak magnitude
                        if np.any(meteor_obj.mag_array):
                            peak_mag = "{:+.1f}".format(
                                np.min(meteor_obj.mag_array))

                        else:
                            peak_mag = "None"

                        if shower is not None:

                            f.write("{:24s}, {:20.12f}, {:>10.6f}, {:>6s}, {:6.2f}, {:+7.2f}, {:6.2f}, {:+7.2f}, {:6.2f}, {:+7.2f}, {:9.3f}, {:8.3f}, {:12.3f}, {:12.3f}, {:4s}\n".format(jd2Date(meteor_obj.jdt_ref, dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f"), \
                                meteor_obj.jdt_ref, meteor_obj.lasun, shower.name, \
                                meteor_obj.ra_array[0]%360, meteor_obj.dec_array[0], \
                                meteor_obj.ra_array[-1]%360, meteor_obj.dec_array[-1], \
                                meteor_obj.radiant_ra%360, meteor_obj.radiant_dec, \
                                np.degrees(meteor_obj.theta0), np.degrees(meteor_obj.phi0), \
                                meteor_obj.gc_beg_phase, meteor_obj.gc_end_phase, peak_mag))

                        else:
                            f.write("{:24s}, {:20.12f}, {:>10.6f}, {:>6s}, {:6.2f}, {:+7.2f}, {:6.2f}, {:+7.2f}, {:>6s}, {:>7s}, {:9.3f}, {:8.3f}, {:12.3f}, {:12.3f}, {:4s}\n".format(jd2Date(meteor_obj.jdt_ref, dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f"), \
                                meteor_obj.jdt_ref, meteor_obj.lasun, '...', meteor_obj.ra_array[0]%360, \
                                meteor_obj.dec_array[0], meteor_obj.ra_array[-1]%360, \
                                meteor_obj.dec_array[-1], "None", "None", np.degrees(meteor_obj.theta0), \
                                np.degrees(meteor_obj.phi0), meteor_obj.gc_beg_phase, \
                                meteor_obj.gc_end_phase, peak_mag))

        if show_plot:
            allsky_plot.show()

        else:
            plt.clf()
            plt.close()

    return associations, shower_counts
Пример #11
0
def add_fffits_metadata(ff_filename, config, platepars_recalibrated,
                        fallback_platepar):
    """
    Add FITS metadata and WCS to FF files generated by RMS

    Args:
        ff_filename (str): full or relative path to FF file
        config (RMS.Config): config instance
        platepars_recalibrated (dict): dictionary with recalibrated platepars
        fallback_platepar (RMS.Platepar): platepar with fitted stars

    Returns:
        None
    """
    ff_basename = os.path.basename(ff_filename)
    platepar_recalibrated = Platepar()
    try:
        platepar_data = platepars_recalibrated[ff_basename]
        with open("platepar_tmp.cal", "w") as f:
            json.dump(platepar_data, f)
        platepar_recalibrated.read("platepar_tmp.cal")
    except (FileNotFoundError, KeyError):
        platepar_recalibrated = fallback_platepar
        logger.warning(f"Using non-recalibrated platepar for {ff_basename}")

    fftime = getMiddleTimeFF(ff_basename, config.fps)

    fit_xy = np.array(fallback_platepar.star_list)[:, 1:3]

    _, fit_ra, fit_dec, _ = xyToRaDecPP([fftime] * len(fit_xy),
                                        fit_xy[:, 0],
                                        fit_xy[:, 1], [1] * len(fit_xy),
                                        platepar_recalibrated,
                                        extinction_correction=False)

    x0 = platepar_recalibrated.X_res / 2
    y0 = platepar_recalibrated.Y_res / 2
    _, ra0, dec0, _ = xyToRaDecPP([fftime], [x0], [y0], [1],
                                  platepar_recalibrated,
                                  extinction_correction=False)
    w = fit_wcs(fit_xy[:, 0],
                fit_xy[:, 1],
                fit_ra,
                fit_dec,
                x0,
                y0,
                ra0[0],
                dec0[0],
                5,
                projection="ZEA")

    hdu_list = fits.open(ff_filename, scale_back=True)
    obstime = Time(filenameToDatetime(ff_basename))

    header_meta = {}
    header_meta["OBSERVER"] = config.stationID.strip()
    header_meta["INSTRUME"] = "Global Meteor Network"
    header_meta["MJD-OBS"] = obstime.mjd
    header_meta["DATE-OBS"] = obstime.fits
    header_meta["NFRAMES"] = 256
    header_meta["EXPTIME"] = 256 / config.fps
    header_meta["SITELONG"] = round(config.longitude, 2)
    header_meta["SITELAT"] = round(config.latitude, 2)

    for hdu in hdu_list:
        if hdu.header[
                "NAXIS"] == 0:  # First header is not an image so should not get WCS
            new_header = Header()
        else:
            new_header = w.to_fits(relax=True)[0].header

        for key, value in header_meta.items():
            new_header.append((key, value))

        for key, value in new_header.items():
            if key in hdu.header:
                continue
            hdu.header[key] = value

    hdu_list.writeto(ff_filename, overwrite=True)