Example #1
0
def dumpAsEvFiles(dir_path, file_name):
    """ Dump the given pickle file as UWO-style ev_* file. """

    # Load the pickles trajectory
    traj = loadPickle(dir_path, file_name)

    # Dump the results as a UWO-style ev file

    year, month, day, hour, minute, second, _ = jd2Date(traj.jdt_ref)

    for i, obs in enumerate(traj.observations):

        # Construct file name
        date_str = "{:4d}{:02d}{:02d}_{:02d}{:02d}{:02d}A_{:s}".format(year, month, day, hour, minute, second, \
            obs.station_id)

        ev_file_name = 'ev_' + date_str + '.txt'

        # Convert azimuth and altitude to theta/tphi
        theta_data = np.pi / 2.0 - obs.elev_data
        phi_data = (np.pi / 2.0 - obs.azim_data) % (2 * np.pi)

        # Write the ev_* file
        writeEvFile(dir_path, ev_file_name, traj.jdt_ref, str(i), obs.lat,
                    obs.lon, obs.ele, obs.time_data, theta_data, phi_data)
Example #2
0
    def __repr__(self):

        out_str = ''

        out_str += 'Station ID = ' + str(self.station_id) + '\n'
        out_str += 'JD ref = {:f}'.format(self.jdt_ref) + '\n'
        out_str += 'DT ref = {:s}'.format(jd2Date(self.jdt_ref, \
            dt_obj=True).strftime("%Y/%m/%d-%H%M%S.%f")) + '\n'
        out_str += 'Lat = {:f}, Lon = {:f}, Ht = {:f} m'.format(np.degrees(self.latitude), 
            np.degrees(self.longitude), self.height) + '\n'
        out_str += 'FPS = {:f}'.format(self.fps) + '\n'

        out_str += 'Points:\n'
        out_str += 'Time, X, Y, azimuth, elevation, RA, Dec, Mag:\n'

        for point_time, x, y, azim, elev, ra, dec, mag in zip(self.time_data, self.x_data, self.y_data, \
            self.azim_data, self.elev_data, self.ra_data, self.dec_data, self.mag_data):

            if mag is None:
                mag = 0

            out_str += '{:.4f}, {:.2f}, {:.2f}, {:.2f}, {:.2f}, {:.2f}, {:+.2f}, {:.2f}\n'.format(point_time,\
                x, y, np.degrees(azim), np.degrees(elev), np.degrees(ra), np.degrees(dec), mag)


        return out_str
Example #3
0
    def getTrajTimePairs(self, traj_reduced, unpaired_observations,
                         max_toffset):
        """ Find unpaired observations which are close in time to the given trajectory. """

        found_traj_obs_pairs = []

        # Compute the middle time of the trajectory as reference time
        traj_mid_dt = jd2Date(
            (traj_reduced.rbeg_jd + traj_reduced.rend_jd) / 2, dt_obj=True)

        # Go through all unpaired observations
        for met_obs in unpaired_observations:

            # Skip all stations that are already participating in the trajectory solution
            if (met_obs.station_code in traj_reduced.participating_stations) or \
                (met_obs.station_code in traj_reduced.ignored_stations):

                continue

            # Take observations which are within the given time window from the trajectory
            if abs((met_obs.mean_dt -
                    traj_mid_dt).total_seconds()) <= max_toffset:
                found_traj_obs_pairs.append(met_obs)

        return found_traj_obs_pairs
Example #4
0
    def generateTrajOutputDirectoryPath(self, traj, make_dirs=False):
        """ Generate a path to the trajectory output directory. 
        
        Keyword arguments:
            make_dirs: [bool] Make the tree of output directories. False by default.
        """

        # Generate a list of station codes
        if isinstance(traj, TrajectoryReduced):
            # If the reducted trajectory object is given
            station_list = traj.participating_stations

        else:
            # If the full trajectory object is given
            station_list = [
                obs.station_id for obs in traj.observations
                if obs.ignore_station is False
            ]

        # Datetime of the reference trajectory time
        dt = jd2Date(traj.jdt_ref, dt_obj=True)

        # Year directory
        year_dir = dt.strftime("%Y")

        # Month directory
        month_dir = dt.strftime("%Y%m")

        # Date directory
        date_dir = dt.strftime("%Y%m%d")

        # Name of the trajectory directory
        traj_dir = dt.strftime("%Y%m%d_%H%M%S.%f")[:-3] + "_" \
            + "_".join(list(set([stat_id[:2] for stat_id in station_list])))

        # Path to the year directory
        out_path = os.path.join(self.dir_path, OUTPUT_TRAJ_DIR, year_dir)
        if make_dirs:
            mkdirP(out_path)

        # Path to the year directory
        out_path = os.path.join(out_path, month_dir)
        if make_dirs:
            mkdirP(out_path)

        # Path to the date directory
        out_path = os.path.join(out_path, date_dir)
        if make_dirs:
            mkdirP(out_path)

        # Path too the trajectory directory
        out_path = os.path.join(out_path, traj_dir)
        if make_dirs:
            mkdirP(out_path)

        return out_path
Example #5
0
def saveJSON(dir_path, meteor_list):
    """ Save observations in the RMS JSON format. 
    
    Arguments:
        dir_path: [str] Path to where the JSON files will be saved.
        meteor_list: [list of MeteorObservation objects]

    """

    for meteor in meteor_list:

        # Construct the file name
        dt = jd2Date(meteor.jdt_ref, dt_obj=True)

        json_name = "{:s}_{:s}_picks.json".format(
            dt.strftime("%Y%m%d_%H%M%S.%f"), meteor.station_id)

        # Init JSON dict
        json_dict = {}

        json_dict["fps"] = meteor.fps
        json_dict["jdt_ref"] = meteor.jdt_ref
        json_dict["meastype"] = 1  # ra/dec

        json_dict["centroids_labels"] = [
            "Time (s)", "X (px)", "Y (px)", "RA (deg)", "Dec (deg)",
            "Summed intensity", "Magnitude"
        ]

        # Construct station info
        station = {}
        station["lat"] = np.degrees(meteor.latitude)
        station["lon"] = np.degrees(meteor.longitude)
        station["elev"] = meteor.height
        station["station_id"] = meteor.station_id
        json_dict["station"] = station

        # Construct the JSON data
        centroids = np.c_[meteor.time_data, meteor.x_data, meteor.y_data, np.degrees(meteor.ra_data), \
            np.degrees(meteor.dec_data), np.ones_like(meteor.time_data), meteor.mag_data]
        centroids = centroids.tolist()

        # Sort centroids by relative time
        centroids = sorted(centroids, key=lambda x: x[0])

        json_dict["centroids"] = centroids

        # Save the JSON file
        with open(os.path.join(dir_path, json_name), 'w') as f:
            json.dump(json_dict, f, indent=4, sort_keys=True)
    def generateTrajOutputDirectoryPath(self, traj):
        """ Generate a path to the trajectory output directory. """

        # Generate a list of station codes
        if isinstance(traj, TrajectoryReduced):
            # If the reducted trajectory object is given
            station_list = traj.participating_stations

        else:
            # If the full trajectory object is given
            station_list = [
                obs.station_id for obs in traj.observations
                if obs.ignore_station is False
            ]

        return os.path.join(self.dir_path, OUTPUT_TRAJ_DIR, \
            jd2Date(traj.jdt_ref, dt_obj=True).strftime("%Y%m%d_%H%M%S.%f")[:-3] + "_" \
            + "_".join(list(set([stat_id[:2] for stat_id in station_list]))))
Example #7
0
    def __repr__(self):

        out_str  = ""
        out_str += "StationData object:\n"
        out_str += "    JD ref: {:s}\n".format(str(self.jd_ref))
        out_str += "    Time ref: {:s}\n".format(jd2Date(self.jd_ref, \
            dt_obj=True).strftime("%Y-%m-%d %H:%M:%S.%f"))
        out_str += "    Lat: {:.6f} deg\n".format(np.degrees(self.lat))
        out_str += "    Lon: {:.6f} deg\n".format(np.degrees(self.lon))
        out_str += "    Ele: {:.2f} m\n".format(self.height)
        out_str += "\n"
        out_str +="   Time,     Theta,        Phi,    Mag\n"
        for t, th, phi, mag in zip(self.time_data, np.degrees(self.theta_data), np.degrees(self.phi_data), \
            self.mag_data):

            out_str += "{:7.3f}, {:9.6f}, {:10.6f}, {:+6.2f}\n".format(t, th, phi, mag)


        return out_str
Example #8
0
    def saveTrajectoryResults(self, traj, save_plots):
        """ Save trajectory results to the disk. """

        # Generate the name for the output directory (add list of country codes at the end)
        output_dir = os.path.join(self.dir_path, OUTPUT_TRAJ_DIR, \
            jd2Date(traj.jdt_ref, dt_obj=True).strftime("%Y%m%d_%H%M%S.%f")[:-3] + "_" \
            + "_".join(list(set([obs.station_id[:2] for obs in traj.observations]))))

        # Save the report
        traj.saveReport(output_dir,
                        traj.file_name + '_report.txt',
                        uncertainties=traj.uncertainties,
                        verbose=False)

        # Save the picked trajectory structure
        savePickle(traj, output_dir, traj.file_name + '_trajectory.pickle')

        # Save the plots
        if save_plots:
            traj.save_results = True
            traj.savePlots(output_dir, traj.file_name, show_plots=False)
            traj.save_results = False
Example #9
0
    def __repr__(self, uncertainties=None, v_init_ht=None):
        """ String to be printed out when the Orbit object is printed. """

        out_str = ""
        #out_str +=  "--------------------\n"

        # Check if the orbit was calculated
        if self.ra_g is not None:
            out_str += "  JD dynamic   = {:20.12f} \n".format(self.jd_dyn)
            out_str += "  LST apparent = {:.10f} deg\n".format(
                np.degrees(self.lst_ref))

        ### Apparent radiant in ECI ###

        out_str += "Radiant (apparent in ECI which includes Earth's rotation, epoch of date):\n"
        out_str += "  R.A.      = {:s} deg\n".format(valueFormat("{:>9.5f}", self.ra, "{:.5f}", \
            uncertainties, 'ra', deg=True))
        out_str += "  Dec       = {:s} deg\n".format(valueFormat("{:>+9.5f}", self.dec, "{:.5f}", \
            uncertainties, 'dec', deg=True))
        out_str += "  Azimuth   = {:s} deg\n".format(valueFormat("{:>9.5f}", self.azimuth_apparent, \
            "{:.5f}", uncertainties, 'azimuth_apparent', deg=True))
        out_str += "  Elevation = {:s} deg\n".format(valueFormat("{:>+9.5f}", self.elevation_apparent, \
            "{:.5f}", uncertainties, 'elevation_apparent', deg=True))
        out_str += "  Vavg      = {:s} km/s\n".format(valueFormat("{:>9.5f}", self.v_avg, "{:.5f}", \
            uncertainties, 'v_avg', multi=1.0/1000))

        if v_init_ht is not None:
            v_init_ht_str = ' (average above {:.2f} km)'.format(v_init_ht)
        else:
            v_init_ht_str = ''

        out_str += "  Vinit     = {:s} km/s{:s}\n".format(valueFormat("{:>9.5f}", self.v_init, "{:.5f}", \
            uncertainties, 'v_init', multi=1.0/1000), v_init_ht_str)

        ### ###

        ### Apparent radiant in ECEF (no rotation included) ###

        out_str += "Radiant (apparent ground-fixed, epoch of date):\n"
        out_str += "  R.A.      = {:s} deg\n".format(valueFormat("{:>9.5f}", self.ra_norot, "{:.5f}", \
            uncertainties, 'ra_norot', deg=True))
        out_str += "  Dec       = {:s} deg\n".format(valueFormat("{:>+9.5f}", self.dec_norot, "{:.5f}", \
            uncertainties, 'dec_norot', deg=True))
        out_str += "  Azimuth   = {:s} deg\n".format(valueFormat("{:>9.5f}", self.azimuth_apparent_norot, \
            "{:.5f}", uncertainties, 'azimuth_apparent_norot', deg=True))
        out_str += "  Elevation = {:s} deg\n".format(valueFormat("{:>+9.5f}", self.elevation_apparent_norot, \
            "{:.5f}", uncertainties, 'elevation_apparent_norot', deg=True))
        out_str += "  Vavg      = {:s} km/s\n".format(valueFormat("{:>9.5f}", self.v_avg_norot, "{:.5f}", \
            uncertainties, 'v_avg_norot', multi=1.0/1000))
        out_str += "  Vinit     = {:s} km/s{:s}\n".format(valueFormat("{:>9.5f}", self.v_init_norot, \
            "{:.5f}", uncertainties, 'v_init_norot', multi=1.0/1000), v_init_ht_str)

        ### ###

        # Check if the orbital elements could be calculated, and write them out
        if self.ra_g is not None:

            out_str += "Radiant (geocentric, J2000):\n"
            out_str += "  R.A.   = {:s} deg\n".format(valueFormat("{:>9.5f}", self.ra_g, '{:.5f}', \
                uncertainties, 'ra_g', deg=True))
            out_str += "  Dec    = {:s} deg\n".format(valueFormat("{:>+9.5f}", self.dec_g, '{:.5f}', \
                uncertainties, 'dec_g', deg=True))
            out_str += "  Vg     = {:s} km/s\n".format(valueFormat("{:>9.5f}", self.v_g, '{:.5f}', \
                uncertainties, 'v_g', multi=1.0/1000))
            out_str += "  Vinf   = {:s} km/s\n".format(valueFormat("{:>9.5f}", self.v_inf, '{:.5f}', \
                uncertainties, 'v_inf', multi=1.0/1000))
            out_str += "  Zc     = {:s} deg\n".format(valueFormat("{:>9.5f}", self.zc, '{:.5f}', \
                uncertainties, 'zc', deg=True))
            out_str += "  Zg     = {:s} deg\n".format(valueFormat("{:>9.5f}", self.zg, '{:.5f}', \
                uncertainties, 'zg', deg=True))
            out_str += "Radiant (ecliptic geocentric, J2000):\n"
            out_str += "  Lg     = {:s} deg\n".format(valueFormat("{:>9.5f}", self.L_g, '{:.5f}', \
                uncertainties, 'L_g', deg=True))
            out_str += "  Bg     = {:s} deg\n".format(valueFormat("{:>+9.5f}", self.B_g, '{:.5f}', \
                uncertainties, 'B_g', deg=True))
            out_str += "  Vh     = {:s} km/s\n".format(valueFormat("{:>9.5f}", self.v_h, '{:.5f}', \
                uncertainties, 'v_h', multi=1/1000.0))
            out_str += "Radiant (ecliptic heliocentric, J2000):\n"
            out_str += "  Lh     = {:s} deg\n".format(valueFormat("{:>9.5f}", self.L_h, '{:.5f}', \
                uncertainties, 'L_h', deg=True))
            out_str += "  Bh     = {:s} deg\n".format(valueFormat("{:>+9.5f}", self.B_h, '{:.5f}', \
                uncertainties, 'B_h', deg=True))
            out_str += "  Vh_x   = {:s} km/s\n".format(valueFormat("{:>9.5f}", self.v_h_x, '{:.5f}', \
                uncertainties, 'v_h_x'))
            out_str += "  Vh_y   = {:s} km/s\n".format(valueFormat("{:>9.5f}", self.v_h_y, '{:.5f}', \
                uncertainties, 'v_h_y'))
            out_str += "  Vh_z   = {:s} km/s\n".format(valueFormat("{:>9.5f}", self.v_h_z, '{:.5f}', \
                uncertainties, 'v_h_z'))
            out_str += "Orbit:\n"
            out_str += "  La Sun = {:s} deg\n".format(valueFormat("{:>10.6f}", self.la_sun, '{:.6f}', \
                uncertainties, 'la_sun', deg=True))
            out_str += "  a      = {:s} AU\n".format(valueFormat("{:>10.6f}", self.a, '{:.6f}', \
                uncertainties, 'a'))
            out_str += "  e      = {:s}\n".format(valueFormat("{:>10.6f}", self.e, '{:.6f}', \
                uncertainties, 'e'))
            out_str += "  i      = {:s} deg\n".format(valueFormat("{:>10.6f}", self.i, '{:.6f}', \
                uncertainties, 'i', deg=True))
            out_str += "  peri   = {:s} deg\n".format(valueFormat("{:>10.6f}", self.peri, '{:.6f}', \
                uncertainties, 'peri', deg=True))
            out_str += "  node   = {:s} deg\n".format(valueFormat("{:>10.6f}", self.node, '{:.6f}', \
                uncertainties, 'node', deg=True))
            out_str += "  Pi     = {:s} deg\n".format(valueFormat("{:>10.6f}", self.pi, '{:.6f}', \
                uncertainties, 'pi', deg=True))
            if hasattr(self, 'b'):
                out_str += "  b      = {:s} deg\n".format(valueFormat("{:>10.6f}", self.b, '{:.6f}', \
                uncertainties, 'b', deg=True))
            out_str += "  q      = {:s} AU\n".format(valueFormat("{:>10.6f}", self.q, '{:.6f}', \
                uncertainties, 'q'))
            out_str += "  f      = {:s} deg\n".format(valueFormat("{:>10.6f}", self.true_anomaly, '{:.6f}', \
                uncertainties, 'true_anomaly', deg=True))
            out_str += "  M      = {:s} deg\n".format(valueFormat("{:>10.6f}", self.mean_anomaly, '{:.6f}', \
                uncertainties, 'mean_anomaly', deg=True))
            out_str += "  Q      = {:s} AU\n".format(valueFormat("{:>10.6f}", self.Q, '{:.6f}', \
                uncertainties, 'Q'))
            out_str += "  n      = {:s} deg/day\n".format(valueFormat("{:>10.6f}", self.n, '{:.6f}', \
                uncertainties, 'n', deg=True))
            out_str += "  T      = {:s} years\n".format(valueFormat("{:>10.6f}", self.T, '{:.6f}', \
                uncertainties, 'T'))

            if self.last_perihelion is not None:
                out_str += "  Last perihelion JD = {:s}\n".format(valueFormat("{:.4f}", \
                    self.last_perihelion, "{:.4f}", uncertainties, 'last_perihelion', \
                    callable_val=datetime2JD), callable_ci=None)
                out_str += "  Last perihelion dt = {:s}\n".format(valueFormat("{:s}", \
                    self.last_perihelion, "{:.4f} days", uncertainties, 'last_perihelion', \
                    callable_val=lambda x: datetime.datetime.strftime(x, "%Y-%m-%d %H:%M:%S"), \
                    callable_ci=lambda x: datetime.datetime.strftime(jd2Date(x, dt_obj=True), \
                        "%Y-%m-%d %H:%M:%S")))
            else:
                out_str += "  Last perihelion JD = NaN \n"
                out_str += "  Last perihelion dt = NaN \n"

            out_str += "  Tj     = {:s}\n".format(valueFormat("{:>10.6f}", self.Tj, '{:.6f}', \
                uncertainties, 'Tj'))

            out_str += "Shower association:\n"

            # Perform shower association
            shower_obj = associateShower(self.la_sun, self.L_g, self.B_g,
                                         self.v_g)
            if shower_obj is None:
                shower_no = -1
                shower_code = '...'
            else:
                shower_no = shower_obj.IAU_no
                shower_code = shower_obj.IAU_code

            out_str += "  IAU No.  = {:>4d}\n".format(shower_no)
            out_str += "  IAU code = {:>4s}\n".format(shower_code)

        return out_str
def projectNarrowPicks(dir_path, met, traj, traj_uncert, metal_mags,
                       frag_info):
    """ Projects picks done in the narrow-field to the given trajectory. """

    # Adjust initial velocity
    frag_v_init = traj.v_init + frag_info.v_init_adjust

    # List for computed values to be stored in a file
    computed_values = []

    # Generate the file name prefix from the time (take from trajectory)
    file_name_prefix = traj.file_name

    # List that holds datetimes of fragmentations, used for the light curve plot
    fragmentations_datetime = []

    # Go through picks from all sites
    for site_no in met.picks:

        # Extract site exact plate
        exact = met.exact_plates[site_no]

        # Extract site picks
        picks = np.array(met.picks[site_no])

        # Skip the site if there are no picks
        if not len(picks):
            continue

        print()
        print('Processing site:', site_no)

        # Find unique fragments
        fragments = np.unique(picks[:, 1])

        # If the fragmentation dictionary is empty, generate one
        if frag_info.frag_dict is None:
            frag_info.frag_dict = {
                float(i): i + 1
                for i in range(len(fragments))
            }

        # A list with results of finding the closest point on the trajectory
        cpa_list = []

        # Go thorugh all fragments and calculate the coordinates of the closest points on the trajectory and
        # the line of sight
        for frag in fragments:

            # Take only those picks from current fragment
            frag_picks = picks[picks[:, 1] == frag]

            # Sort by frame
            frag_picks = frag_picks[np.argsort(frag_picks[:, 0])]

            # Extract Unix timestamp
            ts = frag_picks[:, 11]
            tu = frag_picks[:, 12]

            # Extract theta, phi
            theta = np.radians(frag_picks[:, 4])
            phi = np.radians(frag_picks[:, 5])

            # Calculate azimuth +E of N
            azim = (np.pi / 2.0 - phi) % (2 * np.pi)

            # Calculate elevation
            elev = np.pi / 2.0 - theta

            # Calculate Julian date from Unix timestamp
            jd_data = np.array([unixTime2JD(s, u) for s, u in zip(ts, tu)])

            # Convert azim/elev to RA/Dec
            ra, dec = altAz2RADec_vect(azim, elev, jd_data, exact.lat,
                                       exact.lon)

            # Convert RA/Dec to ECI direction vector
            x_eci, y_eci, z_eci = raDec2ECI(ra, dec)

            # Convert station geocoords to ECEF coordinates
            x_stat_vect, y_stat_vect, z_stat_vect = geo2Cartesian_vect(exact.lat, exact.lon, exact.elev, \
                jd_data)

            # Find closest points of aproach for all measurements
            for jd, x, y, z, x_stat, y_stat, z_stat in np.c_[jd_data, x_eci, y_eci, z_eci, x_stat_vect, \
                y_stat_vect, z_stat_vect]:

                # Find the closest point of approach of every narrow LoS to the wide trajectory
                obs_cpa, rad_cpa, d = findClosestPoints(np.array([x_stat, y_stat, z_stat]), \
                    np.array([x, y, z]), traj.state_vect_mini, traj.radiant_eci_mini)

                # Calculate the height of each fragment for the given time
                rad_lat, rad_lon, height = cartesian2Geo(jd, *rad_cpa)

                cpa_list.append(
                    [frag, jd, obs_cpa, rad_cpa, d, rad_lat, rad_lon, height])

        # Find the coordinates of the first point in time on the trajectory and the first JD
        first_jd_indx = np.argmin([entry[1] for entry in cpa_list])
        jd_ref = cpa_list[first_jd_indx][1]
        rad_cpa_ref = cpa_list[first_jd_indx][3]

        print(jd_ref)

        # Set the beginning time to the beginning of the widefield trajectory
        ref_beg_time = (traj.jdt_ref - jd_ref) * 86400

        length_list = []
        decel_list = []

        # Go through all fragments and calculate the length from the reference point
        for frag in fragments:

            # Select only the data points of the current fragment
            cpa_data = [entry for entry in cpa_list if entry[0] == frag]

            # Lengths of the current fragment
            length_frag = []

            # Go through all projected points on the trajectory
            for entry in cpa_data:

                jd = entry[1]
                rad_cpa = entry[3]
                rad_lat = entry[5]
                rad_lon = entry[6]
                height = entry[7]

                # Calculate the distance from the first point on the trajectory and the given point
                dist = vectMag(rad_cpa - rad_cpa_ref)

                # Calculate the time in seconds
                time_sec = (jd - jd_ref) * 24 * 3600

                length_frag.append([time_sec, dist, rad_lat, rad_lon, height])
                length_list.append(
                    [frag, time_sec, dist, rad_lat, rad_lon, height])

            ### Fit the deceleration model to the length ###
            ##################################################################################################

            length_frag = np.array(length_frag)

            # Extract JDs and lengths into individual arrays
            time_data, length_data, lat_data, lon_data, height_data = length_frag.T

            if frag_info.fit_full_exp_model:

                # Fit the full exp deceleration model

                # First guess of the lag parameters
                p0 = [
                    frag_v_init, 0, 0, traj.jacchia_fit[0], traj.jacchia_fit[1]
                ]

                # Length residuals function
                def _lenRes(params, time_data, length_data):
                    return np.sum(
                        (length_data -
                         exponentialDeceleration(time_data, *params))**2)

                # Fit an exponential to the data
                res = scipy.optimize.basinhopping(_lenRes, p0, \
                    minimizer_kwargs={"method": "BFGS", 'args':(time_data, length_data)}, \
                    niter=1000)
                decel_fit = res.x

            else:

                # Fit only the deceleration parameters

                # First guess of the lag parameters
                p0 = [0, 0, traj.jacchia_fit[0], traj.jacchia_fit[1]]

                # Length residuals function
                def _lenRes(params, time_data, length_data, v_init):
                    return np.sum((length_data - exponentialDeceleration(
                        time_data, v_init, *params))**2)

                # Fit an exponential to the data
                res = scipy.optimize.basinhopping(_lenRes, p0, \
                    minimizer_kwargs={"method": "Nelder-Mead", 'args':(time_data, length_data, frag_v_init)}, \
                    niter=100)
                decel_fit = res.x

                # Add the velocity to the deceleration fit
                decel_fit = np.append(np.array([frag_v_init]), decel_fit)

            decel_list.append(decel_fit)

            print('---------------')
            print('Fragment', frag_info.frag_dict[frag], 'fit:')
            print(decel_fit)

            # plt.plot(time_data, length_data, label='Observed')
            # plt.plot(time_data, exponentialDeceleration(time_data, *decel_fit), label='fit')
            # plt.legend()
            # plt.xlabel('Time (s)')
            # plt.ylabel('Length (m)')
            # plt.title('Fragment {:d} fit'.format(frag_info.frag_dict[frag]))
            # plt.show()

            # # Plot the residuals
            # plt.plot(time_data, length_data - exponentialDeceleration(time_data, *decel_fit))
            # plt.xlabel('Time (s)')
            # plt.ylabel('Length O - C (m)')
            # plt.title('Fragment {:d} fit residuals'.format(frag_info.frag_dict[frag]))
            # plt.show()

            ##################################################################################################

        # Generate a unique color for every fragment
        colors = plt.cm.rainbow(np.linspace(0, 1, len(fragments)))

        # Create a dictionary for every fragment-color pair
        colors_frags = {frag: color for frag, color in zip(fragments, colors)}

        # Make sure lags start at 0
        offset_vel_max = 0

        # Plot the positions of fragments from the beginning to the end
        # Calculate and plot the lag of all fragments
        for frag, decel_fit in zip(fragments, decel_list):

            # Select only the data points of the current fragment
            length_frag = [entry for entry in length_list if entry[0] == frag]

            # Find the last time of the fragment appearance
            last_time = max([entry[1] for entry in length_frag])

            # Extract the observed data
            _, time_data, length_data, lat_data, lon_data, height_data = np.array(
                length_frag).T

            # Plot the positions of fragments from the first time to the end, using fitted parameters
            # The lag is calculated by subtracting an "average" velocity length from the observed length
            time_array = np.linspace(ref_beg_time, last_time, 1000)
            plt.plot(exponentialDeceleration(time_array, *decel_fit) - exponentialDeceleration(time_array, \
                frag_v_init, 0, offset_vel_max, 0, 0), time_array, linestyle='--', color=colors_frags[frag], \
                linewidth=0.75)

            # Plot the observed data
            fake_lag = length_data - exponentialDeceleration(
                time_data, frag_v_init, 0, offset_vel_max, 0, 0)
            plt.plot(fake_lag,
                     time_data,
                     color=colors_frags[frag],
                     linewidth=0.75)

            # Plot the fragment number at the end of each lag
            plt.text(fake_lag[-1] - 10, time_data[-1] + 0.02, str(frag_info.frag_dict[frag]), color=colors_frags[frag], \
                size=7, va='center', ha='right')

            # Check if the fragment has a fragmentation point and plot it
            if site_no in frag_info.fragmentation_points:
                if frag_info.frag_dict[frag] in frag_info.fragmentation_points[
                        site_no]:

                    # Get the lag of the fragmentation point
                    frag_point_time, fragments_list = frag_info.fragmentation_points[
                        site_no][frag_info.frag_dict[frag]]
                    frag_point_lag = exponentialDeceleration(frag_point_time, *decel_fit) \
                        - exponentialDeceleration(frag_point_time, frag_v_init, 0, offset_vel_max, 0, 0)

                    fragments_list = list(map(str, fragments_list))

                    # Save the fragmentation time in the list for light curve plot
                    fragmentations_datetime.append([jd2Date(jd_ref + frag_point_time/86400, dt_obj=True), \
                        fragments_list])

                    # Plot the fragmentation point
                    plt.scatter(frag_point_lag, frag_point_time, s=20, zorder=4, color=colors_frags[frag], \
                        edgecolor='k', linewidth=0.5, label='Fragmentation: ' + ",".join(fragments_list))

        # Plot reference time
        plt.title('Reference time: ' + str(jd2Date(jd_ref, dt_obj=True)))

        plt.gca().invert_yaxis()
        plt.grid(color='0.9')

        plt.xlabel('Lag (m)')
        plt.ylabel('Time (s)')

        plt.ylim(ymax=ref_beg_time)

        plt.legend()

        plt.savefig(os.path.join(dir_path, file_name_prefix \
            + '_fragments_deceleration_site_{:s}.png'.format(str(site_no))), dpi=300)

        plt.show()

        time_min = np.inf
        time_max = -np.inf
        ht_min = np.inf
        ht_max = -np.inf

        ### PLOT DYNAMIC PRESSURE FOR EVERY FRAGMENT
        for frag, decel_fit in zip(fragments, decel_list):

            # Select only the data points of the current fragment
            length_frag = [entry for entry in length_list if entry[0] == frag]

            # Extract the observed data
            _, time_data, length_data, lat_data, lon_data, height_data = np.array(
                length_frag).T

            # Fit a linear dependance of time vs. height
            line_fit, _ = scipy.optimize.curve_fit(lineFunc, time_data,
                                                   height_data)

            # Get the time and height limits
            time_min = min(time_min, min(time_data))
            time_max = max(time_max, max(time_data))
            ht_min = min(ht_min, min(height_data))
            ht_max = max(ht_max, max(height_data))

            ### CALCULATE OBSERVED DYN PRESSURE

            # Get the velocity at every point in time
            velocities = exponentialDecelerationVel(time_data, *decel_fit)

            # Calculate the dynamic pressure
            dyn_pressure = dynamicPressure(lat_data, lon_data, height_data,
                                           jd_ref, velocities)

            ###

            # Plot Observed height vs. dynamic pressure
            plt.plot(dyn_pressure / 10**3,
                     height_data / 1000,
                     color=colors_frags[frag],
                     zorder=3,
                     linewidth=0.75)

            # Plot the fragment number at the end of each lag
            plt.text(dyn_pressure[-1]/10**3, height_data[-1]/1000 - 0.02, str(frag_info.frag_dict[frag]), \
                color=colors_frags[frag], size=7, va='top', zorder=3)

            ### CALCULATE MODELLED DYN PRESSURE

            time_array = np.linspace(ref_beg_time, max(time_data), 1000)

            # Calculate the modelled height
            height_array = lineFunc(time_array, *line_fit)

            # Get the time and height limits
            time_min = min(time_min, min(time_array))
            time_max = max(time_max, max(time_array))
            ht_min = min(ht_min, min(height_array))
            ht_max = max(ht_max, max(height_array))

            # Get the atmospheric densities at every heights
            atm_dens_model = getAtmDensity_vect(np.zeros_like(time_array) + np.mean(lat_data), \
                np.zeros_like(time_array) + np.mean(lon_data), height_array, jd_ref)

            # Get the velocity at every point in time
            velocities_model = exponentialDecelerationVel(
                time_array, *decel_fit)

            # Calculate the dynamic pressure
            dyn_pressure_model = atm_dens_model * DRAG_COEFF * velocities_model**2

            ###

            # Plot Modelled height vs. dynamic pressure
            plt.plot(dyn_pressure_model/10**3, height_array/1000, color=colors_frags[frag], zorder=3, \
                linewidth=0.75, linestyle='--')

            # Check if the fragment has a fragmentation point and plot it
            if site_no in frag_info.fragmentation_points:
                if frag_info.frag_dict[frag] in frag_info.fragmentation_points[
                        site_no]:

                    # Get the lag of the fragmentation point
                    frag_point_time, fragments_list = frag_info.fragmentation_points[
                        site_no][frag_info.frag_dict[frag]]

                    # Get the fragmentation height
                    frag_point_height = lineFunc(frag_point_time, *line_fit)

                    # Calculate the velocity at fragmentation
                    frag_point_velocity = exponentialDecelerationVel(
                        frag_point_time, *decel_fit)

                    # Calculate the atm. density at the fragmentation point
                    frag_point_atm_dens = getAtmDensity(np.mean(lat_data), np.mean(lon_data), frag_point_height, \
                        jd_ref)

                    # Calculate the dynamic pressure at fragmentation in kPa
                    frag_point_dyn_pressure = frag_point_atm_dens * DRAG_COEFF * frag_point_velocity**2
                    frag_point_dyn_pressure /= 10**3

                    # Compute height in km
                    frag_point_height_km = frag_point_height / 1000

                    fragments_list = map(str, fragments_list)

                    # Plot the fragmentation point
                    plt.scatter(frag_point_dyn_pressure, frag_point_height_km, s=20, zorder=5, \
                        color=colors_frags[frag], edgecolor='k', linewidth=0.5, \
                        label='Fragmentation: ' + ",".join(fragments_list))

                    ### Plot the errorbar

                    # Compute the lower veloicty estimate
                    stddev_multiplier = 2.0

                    # Check if the uncertainty exists
                    if traj_uncert.v_init is None:
                        v_init_uncert = 0
                    else:
                        v_init_uncert = traj_uncert.v_init

                    # Compute the range of velocities
                    lower_vel = frag_point_velocity - stddev_multiplier * v_init_uncert
                    higher_vel = frag_point_velocity + stddev_multiplier * v_init_uncert

                    # Assume the atmosphere density can vary +/- 25% (Gunther's analysis)
                    lower_atm_dens = 0.75 * frag_point_atm_dens
                    higher_atm_dens = 1.25 * frag_point_atm_dens

                    # Compute lower and higher range for dyn pressure in kPa
                    lower_frag_point_dyn_pressure = (
                        lower_atm_dens * DRAG_COEFF * lower_vel**2) / 10**3
                    higher_frag_point_dyn_pressure = (
                        higher_atm_dens * DRAG_COEFF * higher_vel**2) / 10**3

                    # Compute errors
                    lower_error = abs(frag_point_dyn_pressure -
                                      lower_frag_point_dyn_pressure)
                    higher_error = abs(frag_point_dyn_pressure -
                                       higher_frag_point_dyn_pressure)

                    print(frag_point_dyn_pressure, frag_point_height_km, [
                        lower_frag_point_dyn_pressure,
                        higher_frag_point_dyn_pressure
                    ])

                    # Plot the errorbar
                    plt.errorbar(frag_point_dyn_pressure, frag_point_height_km, \
                        xerr=[[lower_error], [higher_error]], fmt='--', capsize=5, zorder=4, \
                        color=colors_frags[frag], label='+/- 25% $\\rho_{atm}$, 2$\\sigma_v$ ')

                    # Save the computed fragmentation values to list
                    # Site, Reference JD, Relative time, Fragment ID, Height, Dyn pressure, Dyn pressure lower \
                    #   bound, Dyn pressure upper bound
                    computed_values.append([site_no, jd_ref, frag_point_time, frag_info.frag_dict[frag], \
                        frag_point_height_km, frag_point_dyn_pressure, lower_frag_point_dyn_pressure, \
                        higher_frag_point_dyn_pressure])

                    ######

        # Plot reference time
        plt.title('Reference time: ' + str(jd2Date(jd_ref, dt_obj=True)))

        plt.xlabel('Dynamic pressure (kPa)')
        plt.ylabel('Height (km)')

        plt.ylim([ht_min / 1000, ht_max / 1000])

        # Remove repeating labels and plot the legend
        handles, labels = plt.gca().get_legend_handles_labels()
        by_label = OrderedDict(zip(labels, handles))
        plt.legend(by_label.values(), by_label.keys())

        plt.grid(color='0.9')

        # Create the label for seconds
        ax2 = plt.gca().twinx()
        ax2.set_ylim([time_max, time_min])
        ax2.set_ylabel('Time (s)')

        plt.savefig(os.path.join(dir_path, file_name_prefix \
            + '_fragments_dyn_pressures_site_{:s}.png'.format(str(site_no))), dpi=300)

        plt.show()

        ### PLOT DYNAMICS MASSES FOR ALL FRAGMENTS
        for frag, decel_fit in zip(fragments, decel_list):

            # Select only the data points of the current fragment
            length_frag = [entry for entry in length_list if entry[0] == frag]

            # Extract the observed data
            _, time_data, length_data, lat_data, lon_data, height_data = np.array(
                length_frag).T

            # Fit a linear dependance of time vs. height
            line_fit, _ = scipy.optimize.curve_fit(lineFunc, time_data,
                                                   height_data)

            ### CALCULATE OBSERVED DYN MASS

            # Get the velocity at every point in time
            velocities = exponentialDecelerationVel(time_data, *decel_fit)

            decelerations = np.abs(
                exponentialDecelerationDecel(time_data, *decel_fit))

            # Calculate the dynamic mass
            dyn_mass = dynamicMass(frag_info.bulk_density, lat_data, lon_data, height_data, jd_ref, \
                velocities, decelerations)

            ###

            # Plot Observed height vs. dynamic pressure
            plt.plot(dyn_mass * 1000,
                     height_data / 1000,
                     color=colors_frags[frag],
                     zorder=3,
                     linewidth=0.75)

            # Plot the fragment number at the end of each lag
            plt.text(dyn_mass[-1]*1000, height_data[-1]/1000 - 0.02, str(frag_info.frag_dict[frag]), \
                color=colors_frags[frag], size=7, va='top', zorder=3)

            ### CALCULATE MODELLED DYN MASS

            time_array = np.linspace(ref_beg_time, max(time_data), 1000)

            # Calculate the modelled height
            height_array = lineFunc(time_array, *line_fit)

            # Get the velocity at every point in time
            velocities_model = exponentialDecelerationVel(
                time_array, *decel_fit)

            # Get the deceleration
            decelerations_model = np.abs(
                exponentialDecelerationDecel(time_array, *decel_fit))

            # Calculate the modelled dynamic mass
            dyn_mass_model = dynamicMass(frag_info.bulk_density,
                np.zeros_like(time_array) + np.mean(lat_data),
                np.zeros_like(time_array) + np.mean(lon_data), height_array, jd_ref, \
                velocities_model, decelerations_model)

            ###

            # Plot Modelled height vs. dynamic mass
            plt.plot(dyn_mass_model*1000, height_array/1000, color=colors_frags[frag], zorder=3, \
                linewidth=0.75, linestyle='--', \
                label='Frag {:d} initial dyn mass = {:.1e} g'.format(frag_info.frag_dict[frag], \
                    1000*dyn_mass_model[0]))

        # Plot reference time
        plt.title('Reference time: ' + str(jd2Date(jd_ref, dt_obj=True)) \
            + ', $\\rho_m = ${:d} $kg/m^3$'.format(frag_info.bulk_density))

        plt.xlabel('Dynamic mass (g)')
        plt.ylabel('Height (km)')

        plt.ylim([ht_min / 1000, ht_max / 1000])

        # Remove repeating labels and plot the legend
        handles, labels = plt.gca().get_legend_handles_labels()
        by_label = OrderedDict(zip(labels, handles))
        plt.legend(by_label.values(), by_label.keys())

        plt.grid(color='0.9')

        # Create the label for seconds
        ax2 = plt.gca().twinx()
        ax2.set_ylim([time_max, time_min])
        ax2.set_ylabel('Time (s)')

        plt.savefig(os.path.join(dir_path, file_name_prefix \
            + '_fragments_dyn_mass_site_{:s}.png'.format(str(site_no))), dpi=300)

        plt.show()

    # Plot the light curve if the METAL .met file was given
    if (metal_mags is not None):

        # Make sure there are lightcurves in the data
        if len(metal_mags):

            lc_min = np.inf
            lc_max = -np.inf

            # Plot the lightcurves
            for site_entry in metal_mags:

                site_id, time, mags = site_entry

                # Track the minimum and maximum magnitude
                lc_min = np.min([lc_min, np.min(mags)])
                lc_max = np.max([lc_max, np.max(mags)])

                plt.plot(time,
                         mags,
                         marker='+',
                         label='Site: ' + str(site_id),
                         zorder=4,
                         linewidth=1)

            # Plot times of fragmentation
            for frag_dt, fragments_list in fragmentations_datetime:

                # Plot the lines of fragmentation
                y_arr = np.linspace(lc_min, lc_max, 10)
                x_arr = [frag_dt] * len(y_arr)

                plt.plot(x_arr, y_arr, linestyle='--', zorder=4, \
                    label='Fragmentation: ' + ",".join(fragments_list))

            plt.xlabel('Time (UTC)')
            plt.ylabel('Absolute magnitude (@100km)')

            plt.grid()

            plt.gca().invert_yaxis()

            plt.legend()

            ### Format the X axis datetimes
            import matplotlib

            def formatDT(x, pos=None):

                x = matplotlib.dates.num2date(x)

                # Add date to the first tick
                if pos == 0:
                    fmt = '%D %H:%M:%S.%f'
                else:
                    fmt = '%H:%M:%S.%f'

                label = x.strftime(fmt)[:-3]
                label = label.rstrip("0")
                label = label.rstrip(".")

                return label

            from matplotlib.ticker import FuncFormatter

            plt.gca().xaxis.set_major_formatter(FuncFormatter(formatDT))
            plt.gca().xaxis.set_minor_formatter(FuncFormatter(formatDT))

            ###

            plt.tight_layout()

            # Save the figure
            plt.savefig(os.path.join(dir_path, file_name_prefix + '_fragments_light_curve_comparison.png'), \
                dpi=300)

            plt.show()

    # Save the computed values to file
    with open(
            os.path.join(dir_path, file_name_prefix +
                         "_fragments_dyn_pressure_info.txt"), 'w') as f:

        # Site, Reference JD, Relative time, Fragment ID, Height, Dyn pressure, Dyn pressure lower \
        #   bound, Dyn pressure upper bound

        # Write the header
        f.write(
            "# Site,               Ref JD,  Rel time, Frag ID, Ht (km),  DP (kPa),   DP low,  DP high\n"
        )

        # Write computed values for every fragment
        for entry in computed_values:
            f.write(
                " {:>5s}, {:20.12f}, {:+8.6f}, {:7d}, {:7.3f}, {:9.2f}, {:8.2f}, {:8.2f}\n"
                .format(*entry))
Example #11
0
def writeEvFile(dir_path, file_name, jdt_ref, station_id, lat, lon, ele, time_data, theta_data, phi_data, 
    mag_data=None):
    """ Write a UWO style ev_* file.

    Arguments:
        dir_path: [str] Path to the directory where the file will be saved.
        file_name: [str] Name of the file.
        jdt_ref: [float] Julian date for which the time in time_data is 0.
        station_id: [str] Name of the station
        lat: [float] Latitude +N of the station (radians).
        lon: [float] Longitude +E of the station, (radians).
        ele: [float] Height above sea level (meters).
        time_data: [list of floats] A list of times of observations in seconds, where t = 0s is at jdt_ref.
        theta_data: [list of floats]: A list of zenith angles of observations (radians)
        phi_data: [list of floats] A list of azimuth (+N of due E) of observations (radians).

    """

    # Convert Julian date to date string
    year, month, day, hour, minute, second, millisecond = jd2Date(jdt_ref)
    date_str = "{:4d}{:02d}{:02d} {:02d}:{:02d}:{:02d}.{:03d}".format(year, month, day, hour, minute, second, \
        int(millisecond))

    # Convert JD to unix time
    unix_time = jd2UnixTime(jdt_ref)


    # Check if the magnitude data was given
    if mag_data is None:
        mag_data = np.zeros_like(time_data)


    with open(os.path.join(dir_path, file_name), 'w') as f:

        f.write("#\n")
        f.write("#   version : WMPL\n")
        f.write("#    num_fr : {:d}\n".format(len(time_data)))
        f.write("#    num_tr : 1\n")
        f.write("#      time : {:s} UTC\n".format(date_str))
        f.write("#      unix : {:f}\n".format(unix_time))
        f.write("#       ntp : LOCK 83613 1068718 130\n")
        f.write("#       seq : 0\n")
        f.write("#       mul : 0 [A]\n")
        f.write("#      site : {:s}\n".format(station_id))
        f.write("#    latlon : {:9.6f} {:+10.6f} {:.1f}\n".format(np.degrees(lat), np.degrees(lon), ele))
        f.write("#      text : WMPL generated\n")
        f.write("#    stream : KT\n")
        f.write("#     plate : none\n")
        f.write("#      geom : 0 0\n")
        f.write("#    filter : 0\n")
        f.write("#\n")
        #f.write("#  fr    time    sum     seq       cx       cy     th        phi      lsp     mag  flag   bak    max\n")
        f.write("#  fr    time    sum     seq       cx       cy     th        phi      lsp     mag  flag\n")


        for fr, (t, theta, phi, mag) in enumerate(zip(time_data, theta_data, phi_data, mag_data)):

            sum_ = 0
            seq = 0
            cx = 0.0
            cy = 0.0
            lsp = 0.0
            flag = "0000"
            bak = 0.0
            max_ = 0.0

            f.write("{:5d} ".format(fr))
            f.write("{:7.3f} ".format(t))
            
            f.write("{:6d} ".format(sum_))
            f.write("{:7d} ".format(seq))
            
            f.write("{:8.3f} ".format(cx))
            f.write("{:8.3f} ".format(cy))
            
            f.write("{:9.5f} ".format(np.degrees(theta)))
            f.write("{:10.5f} ".format(np.degrees(phi)))

            f.write("{:7.3f} ".format(lsp))
            f.write("{:6.2f} ".format(mag))

            f.write("{:5s} ".format(flag))

            #f.write("{:6.2} ".format(bak))
            #f.write("{:6.2} ".format(max_))

            f.write("\n")
Example #12
0
def solveTrajectoryEv(ev_file_list, solver='original', velmodel=3, **kwargs):
        """ Runs the trajectory solver on UWO style ev file. 
    
        Arguments:
            ev_file_list: [list] A list of paths to ev files.


        Keyword arguments:
            solver: [str] Trajectory solver to use:
                - 'original' (default) - "in-house" trajectory solver implemented in Python
                - 'gural' - Pete Gural's PSO solver
            velmodel: [int] Velocity propagation model for the Gural solver
                0 = constant   v(t) = vinf
                1 = linear     v(t) = vinf - |acc1| * t
                2 = quadratic  v(t) = vinf - |acc1| * t + acc2 * t^2
                3 = exponent   v(t) = vinf - |acc1| * |acc2| * exp( |acc2| * t ) (default)


        Return:
            traj: [Trajectory instance] Solved trajectory
        """


        # Check that there are at least two stations present
        if len(ev_file_list) < 2:
            print('ERROR! The list of ev files does not contain multistation data!')

            return False


        # Load the ev file
        station_data_list = []
        for ev_file_path in ev_file_list:
            
            # Store the ev file contants into a StationData object
            sd = readEvFile(*os.path.split(ev_file_path))

            # Skip bad ev files
            if sd is None:
                print("Skipping {:s}, bad ev file!".format(ev_file_path))
                continue

            station_data_list.append(sd)


        # Check that there are at least two good stations present
        if len(station_data_list) < 2:
            print('ERROR! The list of ev files does not contain at least 2 good ev files!')

            return False


        # Normalize all times to earliest reference Julian date
        jdt_ref = min([sd_temp.jd_ref for sd_temp in station_data_list])
        for sd in station_data_list:
            for i in range(len(sd.time_data)):
                sd.time_data[i] += (sd.jd_ref - jdt_ref)*86400
            
            sd.jd_ref = jdt_ref


        for sd in station_data_list:
            print(sd)


        # Get the base path of these ev files
        root_path = os.path.dirname(ev_file_list[0])

        # Create a new output directory
        dir_path = os.path.join(root_path, jd2Date(jdt_ref, dt_obj=True).strftime("traj_%Y%m%d_%H%M%S.%f"))
        mkdirP(dir_path)


        if solver == 'original':

            # Init the new trajectory solver object
            traj = Trajectory(jdt_ref, output_dir=dir_path, meastype=4, **kwargs)

        elif solver.startswith('gural'):

            # Extract velocity model is given
            try:
                velmodel = int(solver[-1])

            except: 
                # Default to the exponential model
                velmodel = 3

            # Select extra keyword arguments that are present only for the gural solver
            gural_keys = ['max_toffset', 'nummonte', 'meastype', 'verbose', 'show_plots']
            gural_kwargs = {key: kwargs[key] for key in gural_keys if key in kwargs}

            # Init the new Gural trajectory solver object
            traj = GuralTrajectory(len(station_data_list), jdt_ref, velmodel, verbose=1, \
                output_dir=dir_path, meastype=4, **gural_kwargs)


        # Infill trajectories from each site
        for sd in station_data_list:

            # MC solver
            if solver == 'original':

                traj.infillTrajectory(sd.phi_data, sd.theta_data, sd.time_data, sd.lat, sd.lon, sd.height, \
                    station_id=sd.station_id, magnitudes=sd.mag_data)
            
            # Gural solver
            else:
                traj.infillTrajectory(sd.phi_data, sd.theta_data, sd.time_data, sd.lat, sd.lon, sd.height)


        print('Filling done!')


        # Solve the trajectory
        traj = traj.run()


        # Copy the ev files into the output directory
        for ev_file_path in ev_file_list:
            shutil.copy2(ev_file_path, os.path.join(dir_path, os.path.basename(ev_file_path)))



        return traj
Example #13
0
def writeMiligInputFile(jdt_ref,
                        meteor_list,
                        file_path,
                        convergation_fact=1.0):
    """ Write the MILIG input file. 

    Arguments:
        jdt_ref: [float] reference Julian date.
        meteor_list: [list] A list of StationData objects
        file_path: [str] Path to the MILIG input file which will be written.

    Keyword arguments:
        convergation_fact: [float] Convergation control factor. Iteration is stopped when increments of all 
            parameters are smaller than a fixed value. This factor scales those fixed values such that 
            tolerance can be increased or decreased. By default, evcorr sets this to 0.01 (stricter 
            tolerances) and METAL uses 1.0 (default tolerances).

    Return:
        None
    """

    # Take the first station's longitude for the GST calculation
    lon = meteor_list[0].lon

    # Calculate the Greenwich Mean Time
    _, gst = jd2LST(jdt_ref, np.degrees(lon))

    datetime_obj = jd2Date(jdt_ref, dt_obj=True)

    with open(file_path, 'w') as f:

        datetime_str = datetime_obj.strftime("%Y%m%d%H%M%S.%f")[:16]

        # Write the first line with the date, GST and Convergation control factor
        f.write(datetime_str +
                '{:10.3f}{:10.3f}\n'.format(gst, convergation_fact))

        # Go through every meteor
        for meteor in meteor_list:

            # Write station ID and meteor coordinates. The weight of the station is set to 1
            f.write("{:3d}{:+10.5f}{:10.6f}{:5.3f}{:5.2f}\n".format(
                int(meteor.station_id), np.degrees(meteor.lon),
                np.degrees(meteor.lat), meteor.height / 1000.0, 1.0))

            # Go through every point in the meteor
            for i, (azim, zangle, t) in enumerate(zip(meteor.azim_data, meteor.zangle_data, \
                meteor.time_data)):

                last_pick = 0

                # If this is the last point, last_pick is 9
                if i == len(meteor.time_data) - 1:
                    last_pick = 9

                # Write individual meteor points. If the 4th column is 1, the point will be ignored.
                if t < 0:
                    time_format = "{:+8.5f}"
                else:
                    time_format = "{:8.6f}"
                f.write(("{:9.5f}{:8.5}{:3d}{:3d}" + time_format + "\n").format(np.degrees(azim), \
                    np.degrees(zangle), last_pick, 0, t))

        # Flag indicating that the meteor data ends here
        f.write('-1\n')

        # Initial aproximations
        f.write(' 0.0 0.0 0.0 0.0 0.0 0.0 0.0\n')

        # Optional parameters
        f.write('RFIX\n \n')
Example #14
0
def writeOrbitSummaryFile(dir_path, traj_list, P_0m=1210):
    """ Given a list of trajectory files, generate CSV file with the orbit summary. """
    def _uncer(traj,
               str_format,
               std_name,
               multi=1.0,
               deg=False,
               max_val=None,
               max_val_format="{:7.1e}"):
        """ Internal function. Returns the formatted uncertanty, if the uncertanty is given. If not,
            it returns nothing. 

        Arguments:
            traj: [Trajectory instance]
            str_format: [str] String format for the unceertanty.
            std_name: [str] Name of the uncertanty attribute, e.g. if it is 'x', then the uncertanty is 
                stored in uncertainties.x.
    
        Keyword arguments:
            multi: [float] Uncertanty multiplier. 1.0 by default. This is used to scale the uncertanty to
                different units (e.g. from m/s to km/s).
            deg: [bool] Converet radians to degrees if True. False by defualt.
            max_val: [float] Larger number to use the given format. If the value is larger than that, the
                max_val_format is used.
            max_val_format: [str]
            """

        if deg:
            multi *= np.degrees(1.0)

        if traj.uncertainties is not None:
            if hasattr(traj.uncertainties, std_name):

                # Get the value
                val = getattr(traj.uncertainties, std_name) * multi

                # If the value is too big, use scientific notation
                if max_val is not None:
                    if val > max_val:
                        str_format = max_val_format

                return str_format.format(val)

        return "None"

    # Sort trajectories by Julian date
    traj_list = sorted(traj_list, key=lambda x: x.jdt_ref)

    delimiter = "; "

    out_str = ""
    out_str += "# Summary generated on {:s} UTC\n\r".format(
        str(datetime.datetime.utcnow()))

    header = [
        "   Beginning      ", "       Beginning          ", "  IAU", " IAU",
        "  Sol lon ", "  App LST ", "  RAgeo  ", "  +/-  ", "  DECgeo ",
        "  +/-  ", " LAMgeo  ", "  +/-  ", "  BETgeo ", "  +/-  ", "   Vgeo  ",
        "   +/- ", " LAMhel  ", "  +/-  ", "  BEThel ", "  +/-  ", "   Vhel  ",
        "   +/- ", "      a    ", "  +/-  ", "     e    ", "  +/-  ",
        "     i    ", "  +/-  ", "   peri   ", "   +/-  ", "   node   ",
        "   +/-  ", "    Pi    ", "  +/-  ", "     q    ", "  +/-  ",
        "     f    ", "  +/-  ", "     M    ", "  +/-  ", "      Q    ",
        "  +/-  ", "     n    ", "  +/-  ", "     T    ", "  +/-  ",
        "TisserandJ", "  +/-  ", "  RAapp  ", "  +/-  ", "  DECapp ",
        "  +/-  ", " Azim +E ", "  +/-  ", "   Elev  ", "  +/-  ", "  Vinit  ",
        "   +/- ", "   Vavg  ", "   +/- ", "   LatBeg   ", "  +/-  ",
        "   LonBeg   ", "  +/-  ", "  HtBeg ", "  +/-  ", "   LatEnd   ",
        "  +/-  ", "   LonEnd   ", "  +/-  ", "  HtEnd ", "  +/-  ",
        "Duration", " Peak ", " Peak Ht", " Mass kg", "  Qc ", "MedianFitErr",
        "Beg in", "End in", " Num", "     Participating    "
    ]
    head_2 = [
        "  Julian date     ", "        UTC Time          ", "   No", "code",
        "    deg   ", "    deg   ", "   deg   ", " sigma ", "   deg   ",
        " sigma ", "   deg   ", " sigma ", "    deg  ", " sigma ", "   km/s  ",
        "  sigma", "   deg   ", " sigma ", "    deg  ", " sigma ", "   km/s  ",
        "  sigma", "     AU    ", " sigma ", "          ", " sigma ",
        "   deg    ", " sigma ", "    deg   ", "  sigma ", "    deg   ",
        "  sigma ", "   deg    ", " sigma ", "    AU    ", " sigma ",
        "   deg    ", " sigma ", "    deg   ", " sigma ", "     AU    ",
        " sigma ", "  deg/day ", " sigma ", "   years  ", " sigma ",
        "          ", " sigma ", "   deg   ", " sigma ", "   deg   ",
        " sigma ", "of N  deg", " sigma ", "    deg  ", " sigma ", "   km/s  ",
        "  sigma", "   km/s  ", "  sigma", "   +N deg   ", " sigma ",
        "   +E deg   ", " sigma ", "    km  ", " sigma ", "   +N deg   ",
        " sigma ", "   +E deg   ", " sigma ", "    km  ", " sigma ",
        "  sec   ", "AbsMag", "    km  ", "tau=0.7%", " deg ", "   arcsec   ",
        "  FOV ", "  FOV ", "stat", "        stations      "
    ]
    out_str += "# {:s}\n\r".format(delimiter.join(header))
    out_str += "# {:s}\n\r".format(delimiter.join(head_2))

    # Add a horizontal line
    out_str += "# {:s}\n\r".format("; ".join(
        ["-" * len(entry) for entry in header]))

    # Write lines of data
    for traj in traj_list:

        line_info = []

        line_info.append("{:20.12f}".format(traj.jdt_ref))
        line_info.append("{:26s}".format(
            str(jd2Date(traj.jdt_ref, dt_obj=True))))

        # Perform shower association
        shower_obj = associateShowerTraj(traj)
        if shower_obj is None:
            shower_no = -1
            shower_code = '...'
        else:
            shower_no = shower_obj.IAU_no
            shower_code = shower_obj.IAU_code

        line_info.append("{:>5d}".format(shower_no))
        line_info.append("{:>4s}".format(shower_code))

        # Geocentric radiant (equatorial and ecliptic)
        line_info.append("{:>10.6f}".format(np.degrees(traj.orbit.la_sun)))
        line_info.append("{:>10.6f}".format(np.degrees(traj.orbit.lst_ref)))
        line_info.append("{:>9.5f}".format(np.degrees(traj.orbit.ra_g)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'ra_g', deg=True, max_val=100.0)))
        line_info.append("{:>+9.5f}".format(np.degrees(traj.orbit.dec_g)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'dec_g', deg=True, max_val=100.0)))
        line_info.append("{:>9.5f}".format(np.degrees(traj.orbit.L_g)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'L_g', deg=True, max_val=100.0)))
        line_info.append("{:>+9.5f}".format(np.degrees(traj.orbit.B_g)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'B_g', deg=True, max_val=100.0)))
        line_info.append("{:>9.5f}".format(traj.orbit.v_g / 1000))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'v_g', multi=1.0 / 1000)))

        # Ecliptic heliocentric radiant
        line_info.append("{:>9.5f}".format(np.degrees(traj.orbit.L_h)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'L_h', deg=True, max_val=100.0)))
        line_info.append("{:>+9.5f}".format(np.degrees(traj.orbit.B_h)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'B_h', deg=True, max_val=100.0)))
        line_info.append("{:>9.5f}".format(traj.orbit.v_h / 1000))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'v_h', multi=1.0 / 1000)))

        # Orbital elements
        if abs(traj.orbit.a) < 1000:
            line_info.append("{:>11.6f}".format(traj.orbit.a))
        else:
            line_info.append("{:>11.2e}".format(traj.orbit.a))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.4f}', 'a', max_val=100.0)))
        line_info.append("{:>10.6f}".format(traj.orbit.e))
        line_info.append("{:>7s}".format(_uncer(traj, '{:.4f}', 'e')))
        line_info.append("{:>10.6f}".format(np.degrees(traj.orbit.i)))
        line_info.append("{:>7s}".format(_uncer(traj, '{:.4f}', 'i',
                                                deg=True)))
        line_info.append("{:>10.6f}".format(np.degrees(traj.orbit.peri)))
        line_info.append("{:>8s}".format(
            _uncer(traj, '{:.4f}', 'peri', deg=True)))
        line_info.append("{:>10.6f}".format(np.degrees(traj.orbit.node)))
        line_info.append("{:>8s}".format(
            _uncer(traj, '{:.4f}', 'node', deg=True)))
        line_info.append("{:>10.6f}".format(np.degrees(traj.orbit.pi)))
        line_info.append("{:>7s}".format(_uncer(traj, '{:.4f}', 'pi',
                                                deg=True)))
        line_info.append("{:>10.6f}".format(traj.orbit.q))
        line_info.append("{:>7s}".format(_uncer(traj, '{:.4f}', 'q')))
        line_info.append("{:>10.6f}".format(np.degrees(
            traj.orbit.true_anomaly)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.4f}', 'true_anomaly', deg=True)))
        line_info.append("{:>10.6f}".format(np.degrees(
            traj.orbit.mean_anomaly)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.4f}', 'mean_anomaly', deg=True)))
        if abs(traj.orbit.Q) < 1000:
            line_info.append("{:>11.6f}".format(traj.orbit.Q))
        else:
            line_info.append("{:>11.4e}".format(traj.orbit.Q))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.4f}', 'Q', max_val=100.0)))
        line_info.append("{:>10.6f}".format(np.degrees(traj.orbit.n)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.4f}', 'n', deg=True, max_val=100.0)))
        if traj.orbit.T < 1000:
            line_info.append("{:>10.6f}".format(traj.orbit.T))
        else:
            line_info.append("{:>10.4e}".format(traj.orbit.T))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.4f}', 'T', max_val=100.0)))
        line_info.append("{:>10.6f}".format(traj.orbit.Tj))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.4f}', 'Tj', max_val=100.0)))

        # Apparent radiant
        line_info.append("{:>9.5f}".format(np.degrees(traj.orbit.ra_norot)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'ra_norot', deg=True, max_val=100.0)))
        line_info.append("{:>+9.5f}".format(np.degrees(traj.orbit.dec_norot)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'dec_norot', deg=True, max_val=100.0)))
        line_info.append("{:>9.5f}".format(
            np.degrees(traj.orbit.azimuth_apparent_norot)))
        line_info.append("{:>7s}".format(
            _uncer(traj,
                   '{:7.4f}',
                   'azimuth_apparent',
                   deg=True,
                   max_val=100.0)))
        line_info.append("{:>9.5f}".format(
            np.degrees(traj.orbit.elevation_apparent_norot)))
        line_info.append("{:>7s}".format(
            _uncer(traj,
                   '{:7.4f}',
                   'elevation_apparent',
                   deg=True,
                   max_val=100.0)))
        line_info.append("{:>9.5f}".format(traj.orbit.v_init_norot / 1000))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'v_init', multi=1.0 / 1000)))
        line_info.append("{:>9.5f}".format(traj.orbit.v_avg_norot / 1000))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:7.4f}', 'v_avg', multi=1.0 / 1000)))

        # Begin/end point
        line_info.append("{:>12.6f}".format(np.degrees(traj.rbeg_lat)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.4f}', 'rbeg_lat', deg=True)))
        line_info.append("{:>12.6f}".format(np.degrees(traj.rbeg_lon)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.4f}', 'rbeg_lon', deg=True)))
        line_info.append("{:>8.4f}".format(traj.rbeg_ele / 1000))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.2f}', 'rbeg_ele', multi=1.0 / 1000)))
        line_info.append("{:>12.6f}".format(np.degrees(traj.rend_lat)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.4f}', 'rend_lat', deg=True)))
        line_info.append("{:>12.6f}".format(np.degrees(traj.rend_lon)))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.4f}', 'rend_lon', deg=True)))
        line_info.append("{:>8.4f}".format(traj.rend_ele / 1000))
        line_info.append("{:>7s}".format(
            _uncer(traj, '{:.2f}', 'rend_ele', multi=1.0 / 1000)))

        # Compute the duration
        duration = max([np.max(obs.time_data[obs.ignore_list == 0]) for obs in traj.observations \
            if obs.ignore_station == False])

        # Compute the peak magnitude and the peak height
        peak_mags = [np.min(obs.absolute_magnitudes[obs.ignore_list == 0]) for obs in traj.observations \
            if obs.ignore_station == False]
        peak_mag = np.min(peak_mags)
        peak_ht = [obs.model_ht[np.argmin(obs.absolute_magnitudes[obs.ignore_list == 0])] for obs in traj.observations \
            if obs.ignore_station == False][np.argmin(peak_mags)]

        ### Compute the mass

        time_mag_arr = []
        avg_t_diff_max = 0
        for obs in traj.observations:

            # Skip ignored stations
            if obs.ignore_station:
                continue

            # If there are not magnitudes for this site, skip it
            if obs.absolute_magnitudes is None:
                continue

            # Compute average time difference
            avg_t_diff_max = max(
                avg_t_diff_max,
                np.median(obs.time_data[1:] - obs.time_data[:-1]))

            for t, mag in zip(obs.time_data, obs.absolute_magnitudes):
                if (mag is not None) and (not np.isnan(mag)):
                    time_mag_arr.append([t, mag])

        # Compute the mass
        time_mag_arr = np.array(sorted(time_mag_arr, key=lambda x: x[0]))
        time_arr, mag_arr = time_mag_arr.T

        # Average out the magnitudes
        time_arr, mag_arr = averageClosePoints(time_arr, mag_arr,
                                               avg_t_diff_max)

        # Compute the photometry mass
        mass = calcMass(np.array(time_arr),
                        np.array(mag_arr),
                        traj.orbit.v_avg_norot,
                        P_0m=P_0m)

        ###

        # Meteor parameters (duration, peak magnitude, integrated intensity, Q angle)
        line_info.append("{:8.2f}".format(duration))
        line_info.append("{:+6.2f}".format(peak_mag))
        line_info.append("{:>8.4f}".format(peak_ht / 1000))
        line_info.append("{:8.2e}".format(mass))

        # Convergence angle
        line_info.append("{:5.2f}".format(
            np.degrees(traj.best_conv_inter.conv_angle)))

        # Median fit error in arcsec
        line_info.append("{:12.2f}".format(3600*np.degrees(np.median([obs.ang_res_std for obs \
            in traj.observations if not obs.ignore_station]))))

        # Meteor begins inside the FOV
        fov_beg = None
        fov_beg_list = [obs.fov_beg for obs in traj.observations if (obs.ignore_station == False) \
            and hasattr(obs, "fov_beg")]
        if len(fov_beg_list) > 0:
            fov_beg = np.any(fov_beg_list)

        line_info.append("{:>6s}".format(str(fov_beg)))

        # Meteor ends inside the FOV
        fov_end = None
        fov_end_list = [obs.fov_end for obs in traj.observations if (obs.ignore_station == False) \
            and hasattr(obs, "fov_end")]
        if len(fov_end_list) > 0:
            fov_end = np.any(fov_end_list)

        line_info.append("{:>6s}".format(str(fov_end)))

        # Participating stations
        participating_stations = sorted([obs.station_id for obs in traj.observations \
            if obs.ignore_station == False])
        line_info.append("{:>4d}".format(len(participating_stations)))
        line_info.append("{:s}".format(",".join(participating_stations)))

        out_str += delimiter.join(line_info) + "\n\r"

    # Save the file to a trajectory summary
    traj_summary_path = os.path.join(dir_path, TRAJ_SUMMARY_FILE)
    with open(traj_summary_path, 'w') as f:
        f.write(out_str)

    print("Trajectory summary saved to:", traj_summary_path)
Example #15
0
def solveTrajectoryGeneric(jdt_ref, meteor_list, dir_path, solver='original', **kwargs):
    """ Feed the list of meteors in the trajectory solver and run it. 
    
    Arguments:
        jdt_ref: [float] Reference Julian date for all objects in meteor_list.
        meteor_list: [list] A list of MeteorObservation objects.
        dir_path: [str] Path to the data directory.

    Keyword arguments:
        solver: [str] Solver choice:
            - "original" is the Monte Carlo solver
            - "gural" is the Gural solver (through C++ bindings)
        **kwargs: Keyword arguments for the trajectory solver.

    """

    # Create name of output directory
    output_dir = os.path.join(dir_path, jd2Date(jdt_ref, dt_obj=True).strftime("%Y%m%d-%H%M%S.%f"))


    # Init the trajectory solver
    if solver == 'original':
        traj = Trajectory(jdt_ref, output_dir=output_dir, meastype=1, **kwargs)

    elif solver.lower().startswith('gural'):
        velmodel = solver.lower().strip('gural')
        if len(velmodel) == 1:
            velmodel = int(velmodel)
        else:
            velmodel = 0

        traj = GuralTrajectory(len(meteor_list), jdt_ref, velmodel=velmodel, meastype=1, verbose=1, 
            output_dir=output_dir)

    else:
        print('No such solver:', solver)
        return 


    # Add meteor observations to the solver
    for meteor in meteor_list:

        if solver == 'original':

            comment = ''
            if hasattr(meteor, "ff_name"):
                comment = meteor.ff_name

            traj.infillTrajectory(meteor.ra_data, meteor.dec_data, meteor.time_data, meteor.latitude, 
                meteor.longitude, meteor.height, station_id=meteor.station_id, \
                magnitudes=meteor.mag_data, comment=comment)

        elif solver.lower().startswith('gural'):

            # Extract velocity model is given
            try:
                velmodel = int(solver[-1])

            except: 
                # Default to the exponential model
                velmodel = 3

            traj.infillTrajectory(meteor.ra_data, meteor.dec_data, meteor.time_data, meteor.latitude, 
                meteor.longitude, meteor.height)


    # Solve the trajectory
    traj = traj.run()

    return traj
Example #16
0
    def loadUnpairedObservations(self, processing_list, dt_range=None):
        """ Load unpaired meteor observations, i.e. observations that are not a part of any trajectory. """

        # Go through folders for processing
        unpaired_met_obs_list = []
        prev_station = None
        station_count = 1
        for station_code, rel_proc_path, proc_path, night_dt in processing_list:

            # Check that the night datetime is within the given range of times, if the range is given
            if (dt_range is not None) and (night_dt is not None):
                dt_beg, dt_end = dt_range

                # Skip all folders which are outside the limits
                if (night_dt < dt_beg) or (night_dt > dt_end):
                    continue

            ftpdetectinfo_name = None
            platepar_recalibrated_name = None

            # Skip files, only take directories
            if os.path.isfile(proc_path):
                continue

            print()
            print("Processing station:", station_code)

            # Find FTPdetectinfo and platepar files
            for name in os.listdir(proc_path):

                # Find FTPdetectinfo
                if name.startswith("FTPdetectinfo") and name.endswith('.txt') and \
                    (not "backup" in name) and (not "uncalibrated" in name):
                    ftpdetectinfo_name = name
                    continue

                if name == "platepars_all_recalibrated.json":

                    try:
                        # Try loading the recalibrated platepars
                        with open(os.path.join(proc_path, name)) as f:
                            platepars_recalibrated_dict = json.load(f)
                            platepar_recalibrated_name = name
                            continue

                    except:
                        pass

            # Skip these observations if no data files were found inside
            if (ftpdetectinfo_name is None) or (platepar_recalibrated_name is
                                                None):
                print("  Skipping {:s} due to missing data files...".format(
                    rel_proc_path))

                # Add the folder to the list of processed folders
                self.db.addProcessedDir(station_code, rel_proc_path)

                continue

            if station_code != prev_station:
                station_count += 1
                prev_station = station_code

            # Save database to mark those with missing data files (only every 50th station, to speed things up)
            if (station_count % 50 == 0) and (station_code != prev_station):
                self.saveDatabase()

            # Load platepars
            with open(os.path.join(proc_path,
                                   platepar_recalibrated_name)) as f:
                platepars_recalibrated_dict = json.load(f)

            # If all files exist, init the meteor container object
            cams_met_obs_list = self.initMeteorObs(station_code, os.path.join(proc_path, \
                ftpdetectinfo_name), platepars_recalibrated_dict)

            # Format the observation object to the one required by the trajectory correlator
            added_count = 0
            for cams_met_obs in cams_met_obs_list:

                # Get the platepar
                if cams_met_obs.ff_name in platepars_recalibrated_dict:
                    pp_dict = platepars_recalibrated_dict[cams_met_obs.ff_name]
                else:
                    continue

                pp = PlateparDummy(**pp_dict)

                # Skip observations which weren't recalibrated
                if hasattr(pp, "auto_recalibrated"):
                    if not pp.auto_recalibrated:
                        print("    Skipping {:s}, not recalibrated!".format(
                            cams_met_obs.ff_name))
                        continue

                # Init meteor data
                meteor_data = []
                for entry in zip(cams_met_obs.frames, cams_met_obs.time_data, cams_met_obs.x_data,\
                    cams_met_obs.y_data, cams_met_obs.azim_data, cams_met_obs.elev_data, \
                    cams_met_obs.ra_data, cams_met_obs.dec_data, cams_met_obs.mag_data):

                    frame, time_rel, x, y, azim, alt, ra, dec, mag = entry

                    met_point = MeteorPointRMS(frame, time_rel, x, y, np.degrees(ra), np.degrees(dec), \
                        np.degrees(azim), np.degrees(alt), mag)

                    meteor_data.append(met_point)

                # Init the new meteor observation object
                met_obs = MeteorObsRMS(station_code, jd2Date(cams_met_obs.jdt_ref, dt_obj=True), pp, \
                    meteor_data, rel_proc_path, ff_name=cams_met_obs.ff_name)

                # Skip bad observations
                if met_obs.bad_data:
                    continue

                # Add only unpaired observations
                if not self.db.checkObsIfPaired(met_obs):

                    # print(" ", station_code, met_obs.reference_dt, rel_proc_path)
                    added_count += 1

                    unpaired_met_obs_list.append(met_obs)

            print("  Added {:d} observations!".format(added_count))

        print()
        print("  Finished loading unpaired observations!")
        self.saveDatabase()

        return unpaired_met_obs_list
Example #17
0
def getAtmDensity(lat, lon, height, jd):
    """ For the given heights, returns the atmospheric density from NRLMSISE-00 model. 
    
    More info: https://github.com/magnific0/nrlmsise-00/blob/master/nrlmsise-00.h

    Arguments:
        lat: [float] Latitude in radians.
        lon: [float] Longitude in radians.
        height: [float] Height in meters.
        jd: [float] Julian date.

    Return:
        [float] Atmosphere density in kg/m^3.

    """

    # Init the input array
    inp = nrlmsise_input()

    # Convert the given Julian date to datetime
    dt = jd2Date(jd, dt_obj=True)

    # Get the day of year
    doy = dt.timetuple().tm_yday

    # Get the second in day
    midnight = dt.replace(hour=0, minute=0, second=0, microsecond=0)
    sec = (dt - midnight).seconds

    # Calculate the Local sidreal time (degrees)
    lst, _ = jd2LST(jd, np.degrees(lon))

    ### INPUT PARAMETERS ###
    ##########################################################################################################
    # Set year (no effect)
    inp.year = 0

    # Day of year
    inp.doy = doy

    # Seconds in a day
    inp.sec = sec

    # Altitude in kilometers
    inp.alt = height / 1000.0

    # Geodetic latitude (deg)
    inp.g_lat = np.degrees(lat)

    # Geodetic longitude (deg)
    inp.g_long = np.degrees(lon)

    # Local apparent solar time (hours)
    inp.lst = lst / 15

    # f107, f107A, and ap effects are neither large nor well established below 80 km and these parameters
    # should be set to 150., 150., and 4. respectively.

    # 81 day average of 10.7 cm radio flux (centered on DOY)
    inp.f107A = 150

    # Daily 10.7 cm radio flux for previous day
    inp.f107 = 150

    # Magnetic index (daily)
    inp.ap = 4

    ##########################################################################################################

    # Init the flags array
    flags = nrlmsise_flags()

    # Set output in kilograms and meters
    flags.switches[0] = 1

    # Set all switches to ON
    for i in range(1, 24):
        flags.switches[i] = 1

    # Array containing the following magnetic values:
    #   0 : daily AP
    #   1 : 3 hr AP index for current time
    #   2 : 3 hr AP index for 3 hrs before current time
    #   3 : 3 hr AP index for 6 hrs before current time
    #   4 : 3 hr AP index for 9 hrs before current time
    #   5 : Average of eight 3 hr AP indicies from 12 to 33 hrs prior to current time
    #   6 : Average of eight 3 hr AP indicies from 36 to 57 hrs prior to current time
    aph = ap_array()

    # Set all AP indices to 100
    for i in range(7):
        aph.a[i] = 100

    # Init the output array
    # OUTPUT VARIABLES:
    #     d[0] - HE NUMBER DENSITY(CM-3)
    #     d[1] - O NUMBER DENSITY(CM-3)
    #     d[2] - N2 NUMBER DENSITY(CM-3)
    #     d[3] - O2 NUMBER DENSITY(CM-3)
    #     d[4] - AR NUMBER DENSITY(CM-3)
    #     d[5] - TOTAL MASS DENSITY(GM/CM3) [includes d[8] in td7d]
    #     d[6] - H NUMBER DENSITY(CM-3)
    #     d[7] - N NUMBER DENSITY(CM-3)
    #     d[8] - Anomalous oxygen NUMBER DENSITY(CM-3)
    #     t[0] - EXOSPHERIC TEMPERATURE
    #     t[1] - TEMPERATURE AT ALT
    out = nrlmsise_output()

    # Evaluate the atmosphere with the given parameters
    gtd7(inp, flags, out)

    # Get the total mass density
    atm_density = out.d[5]

    return atm_density
Example #18
0
def solveTrajectoryRMS(json_list, dir_path, solver='original', **kwargs):
    """ Feed the list of meteors in the trajectory solver. """

    # Normalize the observations to the same reference Julian date and precess them from J2000 to the
    # epoch of date
    jdt_ref, meteor_list = initMeteorObjects(json_list)

    # Create name of output directory
    output_dir = os.path.join(
        dir_path,
        jd2Date(jdt_ref, dt_obj=True).strftime("%Y%m%d-%H%M%S.%f"))

    # Init the trajectory solver
    if solver == 'original':
        traj = Trajectory(jdt_ref, output_dir=output_dir, meastype=1, **kwargs)

    elif solver.lower().startswith('gural'):
        velmodel = solver.lower().strip('gural')
        if len(velmodel) == 1:
            velmodel = int(velmodel)
        else:
            velmodel = 0

        traj = GuralTrajectory(len(meteor_list),
                               jdt_ref,
                               velmodel=velmodel,
                               meastype=1,
                               verbose=1,
                               output_dir=output_dir)

    else:
        print('No such solver:', solver)
        return

    # Add meteor observations to the solver
    for meteor in meteor_list:

        if solver == 'original':

            traj.infillTrajectory(meteor.ra_data, meteor.dec_data, meteor.time_data, meteor.latitude,
                meteor.longitude, meteor.height, station_id=meteor.station_id, \
                magnitudes=meteor.mag_data)

        elif solver.lower().startswith('gural'):

            # Extract velocity model is given
            try:
                velmodel = int(solver[-1])

            except:
                # Default to the exponential model
                velmodel = 3

            traj.infillTrajectory(meteor.ra_data, meteor.dec_data,
                                  meteor.time_data, meteor.latitude,
                                  meteor.longitude, meteor.height)

    # Solve the trajectory
    traj.run()

    return traj
Example #19
0
    def __init__(self, dir_path_mir, traj_pickle_file):

        # Name of input file for meteor parameters
        meteor_inputs_file = config.met_sim_input_file

        # Load input meteor data
        met, consts = loadInputs(meteor_inputs_file)

        # Load the pickled trajectory
        self.traj = loadPickle(dir_path_mir, traj_pickle_file)

        self.results_list = []
        self.full_cost_list = []

        # Go through all observations
        for station_ind, obs in enumerate(self.traj.observations):

            # Name of the results file
            results_file = jd2Date(self.traj.jdt_ref, dt_obj=True).strftime('%Y%m%d_%H%M%S') + "_" \
                + str(self.traj.observations[station_ind].station_id) + "_simulations.npy"

            results_file = os.path.join(dir_path_mir, results_file)

            # Add the results file to the results list
            self.results_list.append(results_file)

            # Take the parameters of the observation with the highest beginning height
            obs_time = self.traj.observations[station_ind].time_data
            obs_length = self.traj.observations[station_ind].length

            # Fit only the first 25% of the observed trajectory
            len_part = int(0.25 * len(obs_time))

            # If the first 25% has less than 4 points, than take the first 4 points
            if len_part < 4:
                len_part = 4

            # Cut the observations to the first part of the trajectory
            obs_time = obs_time[:len_part]
            obs_length = obs_length[:len_part]

            # Calculate observed velocities
            velocities, time_diffs = calcVelocity(obs_time, obs_length)
            print(velocities)

            # Calculate the RMS of velocities
            vel_rms = np.sqrt(np.mean((velocities[1:] - self.traj.v_init)**2))

            print('Vel RMS:', vel_rms)

            # Calculate the along track differences
            along_track_diffs = (velocities[1:] -
                                 self.traj.v_init) * time_diffs[1:]

            # Calculate the full 3D residuals
            full_residuals = np.sqrt(along_track_diffs**2 \
                + self.traj.observations[station_ind].v_residuals[:len_part][1:]**2 \
                + self.traj.observations[station_ind].h_residuals[:len_part][1:]**2)

            # Calculate the average 3D deviation from the estimated trajectory
            full_cost = np.sum(np.abs(
                np.array(full_residuals))) / len(full_residuals)

            self.full_cost_list.append(full_cost)

        # Load solutions from a file
        self.loadSimulations()

        # Initialize the plot framework
        self.initGrid()

        # Initialize main plots
        self.dens_min_init, self.dens_max_init = self.updatePlots(init=True)

        self.dens_min = self.dens_min_init
        self.dens_max = self.dens_max_init

        ### SLIDERS

        # Sliders for density
        self.sl_ind_dev_1 = Slider(self.ax_sl_11,
                                   'Min',
                                   self.dens_min,
                                   self.dens_max,
                                   valinit=self.dens_min)
        self.sl_ind_dev_2 = Slider(self.ax_sl_12,
                                   'Max',
                                   self.dens_min,
                                   self.dens_max,
                                   valinit=self.dens_max,
                                   slidermin=self.sl_ind_dev_1)
        self.ax_sl_12.set_xlabel('Density')

        # Turn on slider updating
        self.sl_ind_dev_1.on_changed(self.updateSliders)
        self.sl_ind_dev_2.on_changed(self.updateSliders)

        ######

        plt.show()
Example #20
0
    # meteor2 = meteor_list[2:4]

    
    # Assume all entires in the FTPdetectinfo path should be used for one meteor
    meteor_proc_list = [meteor_list]


    for meteor in meteor_proc_list:

        for met in meteor:
            print('--------------------------')
            print(met)


        # Run the trajectory solver
        traj = solveTrajectoryCAMS(meteor, os.path.join(dir_path, jd2Date(meteor[0].jdt_ref, \
            dt_obj=True).strftime("%Y%m%d-%H%M%S.%f")), solver=cml_args.solver, max_toffset=max_toffset, \
            monte_carlo=(not cml_args.disablemc), mc_runs=cml_args.mcruns, \
            geometric_uncert=cml_args.uncertgeom, gravity_correction=(not cml_args.disablegravity), 
            plot_all_spatial_residuals=cml_args.plotallspatial, plot_file_type=cml_args.imgformat, \
            show_plots=(not cml_args.hideplots), v_init_part=velpart, v_init_ht=vinitht)


    # ### PERFORM PHOTOMETRY

    # import matplotlib
    # import matplotlib.pyplot as plt

    # # Override default DPI for saving from the interactive window
    # matplotlib.rcParams['savefig.dpi'] = 300

    # # Compute absolute mangitudes
Example #21
0
    def loadUnprocessedObservations(self, processing_list, dt_range=None):
        """ Load unprocessed meteor observations. """

        # Go through folders for processing
        met_obs_list = []
        for station_code, rel_proc_path, proc_path, night_dt in processing_list:

            # Check that the night datetime is within the given range of times, if the range is given
            if (dt_range is not None) and (night_dt is not None):
                dt_beg, dt_end = dt_range

                # Skip all folders which are outside the limits
                if (night_dt < dt_beg) or (night_dt > dt_end):
                    continue

            ftpdetectinfo_name = None
            platepar_recalibrated_name = None

            # Find FTPdetectinfo and platepar files
            for name in os.listdir(proc_path):

                # Find FTPdetectinfo
                if name.startswith("FTPdetectinfo") and name.endswith('.txt') and \
                    (not "backup" in name) and (not "uncalibrated" in name):
                    ftpdetectinfo_name = name
                    continue

                if name == "platepars_all_recalibrated.json":
                    platepar_recalibrated_name = name
                    continue

            # Skip these observations if no data files were found inside
            if (ftpdetectinfo_name is None) or (platepar_recalibrated_name is
                                                None):
                print("Skipping {:s} due to missing data files...".format(
                    rel_proc_path))

                # Add the folder to the list of processed folders
                self.db.addProcessedDir(station_code, rel_proc_path)

                continue

            # Save database to mark those with missing data files
            self.db.save()

            # Load platepars
            with open(os.path.join(proc_path,
                                   platepar_recalibrated_name)) as f:
                platepars_recalibrated_dict = json.load(f)

            # If all files exist, init the meteor container object
            cams_met_obs_list = self.initMeteorObs(station_code, os.path.join(proc_path, \
                ftpdetectinfo_name), platepars_recalibrated_dict)

            # Format the observation object to the one required by the trajectory correlator
            for cams_met_obs in cams_met_obs_list:

                # Get the platepar
                pp_dict = platepars_recalibrated_dict[cams_met_obs.ff_name]
                pp = PlateparDummy(**pp_dict)

                # Init meteor data
                meteor_data = []
                for entry in zip(cams_met_obs.frames, cams_met_obs.time_data, cams_met_obs.x_data,\
                    cams_met_obs.y_data, cams_met_obs.azim_data, cams_met_obs.elev_data, \
                    cams_met_obs.ra_data, cams_met_obs.dec_data, cams_met_obs.mag_data):

                    frame, time_rel, x, y, azim, alt, ra, dec, mag = entry

                    met_point = MeteorPointRMS(frame, time_rel, x, y, np.degrees(ra), np.degrees(dec), \
                        np.degrees(azim), np.degrees(alt), mag)

                    meteor_data.append(met_point)

                # Init the new meteor observation object
                met_obs = MeteorObsRMS(station_code, jd2Date(cams_met_obs.jdt_ref, dt_obj=True), pp, \
                    meteor_data, rel_proc_path)

                print(station_code, met_obs.reference_dt, rel_proc_path)

                met_obs_list.append(met_obs)

        return met_obs_list
Example #22
0
    # Calculate the arrival times as the time in seconds from the earliest JD
    jd_ref = min(jd_list)
    ref_indx = np.argmin(jd_list)

    try:
        _, _, _, lat0, lon0, elev0, ref_time, pick_time, station_no = station_list[
            ref_indx]
        lat, lon0, elev0 = setup.lat_centre, setup.lon_centre, 0

    except:
        print(
            "ERROR: data_picks.csv files created previous to Jan. 8, 2019 are lacking a channel tag added. Redownloading the waveform files will likely fix this"
        )

    # Date for weather
    ref_time = jd2Date(jd_ref)
    setup.ref_time = datetime.datetime(*(map(int, ref_time)))

    # Find search area in lat/lon (weather area)

    # Init the constants
    consts = Constants()

    # Convert switches to booleans
    # setup.mode =            setup.mode.lower()
    # setup.debug =           (setup.debug.lower() == 'true')
    # setup.enable_winds =    (setup.enable_winds.lower() == 'true')
    # setup.get_data =        (setup.get_data.lower() == 'true')
    # setup.perturb =         (setup.perturb.lower() == 'true')

    sounding = parseWeather(setup, consts)
Example #23
0
def fetchJpgsAndMp4s(traj, outdir):
    archdir = os.getenv('ARCHDIR')
    if len(archdir) < 5:
        archdir = '/home/ec2-user/ukmon-shared/archive'

    print('getting camera details file')
    cinfo = cdet.SiteInfo()

    for obs in traj.observations:
        statid = obs.station_id
        fldr = cinfo.getFolder(statid)
        print(statid, fldr)
        evtdate = jd2Date(obs.jdt_ref, dt_obj=True)

        print('station {} event {} '.format(statid,
                                            evtdate.strftime('%Y%m%d-%H%M%S')))
        # if the event is after midnight the folder will have the previous days date
        if evtdate.hour < 12:
            evtdate += timedelta(days=-1)
        yr = evtdate.year
        ym = evtdate.year * 100 + evtdate.month
        ymd = ym * 100 + evtdate.day

        print('getting jpgs and mp4s')
        thispth = '{:s}/{:04d}/{:06d}/{:08d}/'.format(fldr, yr, ym, ymd)
        srcpath = os.path.join(archdir, thispth)
        print(thispth)
        flist = glob.glob1(srcpath, 'FF*.jpg')
        srcfil = None
        for fil in flist:
            spls = fil.split('_')
            fdt = datetime.strptime(spls[2] + '_' + spls[3], '%Y%m%d_%H%M%S')
            tdiff = (evtdate - fdt).seconds
            if tdiff > 0 and tdiff < 11:
                srcfil = fil
                break
        if srcfil is not None:
            srcfil = srcpath + srcfil
            shutil.copy2(srcfil, outdir)
            file_name, _ = os.path.splitext(srcfil)
            srcfil = file_name + '.mp4'
            try:
                shutil.copy2(srcfil, outdir)
            except FileNotFoundError:
                pass
        else:
            print('no jpgs in {}'.format(srcpath))

        print('R90 CSV, KML and FTPDetect file')
        flist = os.listdir(srcpath)
        for fil in flist:
            file_name, file_ext = os.path.splitext(fil)
            if ('FTPdetectinfo' in fil) and (file_ext == '.txt') and (
                    '_original' not in file_name
            ) and ('_uncal' not in file_name) and ('_backup' not in file_name):
                srcfil = srcpath + fil
                shutil.copy2(srcfil, outdir)
            elif file_ext == '.csv':
                srcfil = srcpath + fil
                shutil.copy2(srcfil, outdir)
            elif file_ext == '.kml':
                srcfil = srcpath + fil
                shutil.copy2(srcfil, outdir)
                kmldir = os.path.join(archdir, 'kmls')
                shutil.copy2(srcfil, kmldir)

    return
Example #24
0
def calcOrbit(radiant_eci, v_init, v_avg, eci_ref, jd_ref, stations_fixed=False, reference_init=True, \
    rotation_correction=False):
    """ Calculate the meteor's orbit from the given meteor trajectory. The orbit of the meteoroid is defined 
        relative to the centre of the Sun (heliocentric).

    Arguments:
        radiant_eci: [3 element ndarray] Radiant vector in ECI coordinates (meters).
        v_init: [float] Initial velocity (m/s).
        v_avg: [float] Average velocity of a meteor (m/s).
        eci_ref: [float] reference ECI coordinates in the epoch of date (meters, in the epoch of date) of the 
            meteor trajectory. They can be calculated with the geo2Cartesian function. Ceplecha (1987) assumes 
            this to the the average point on the trajectory, while Jennsikens et al. (2011) assume this to be 
            the first point on the trajectory as that point is not influenced by deceleration.
            NOTE: If the stations are not fixed, the reference ECI coordinates should be the ones
            of the initial point on the trajectory, NOT of the average point!
        jd_ref: [float] reference Julian date of the meteor trajectory. Ceplecha (1987) takes this as the 
            average time of the trajectory, while Jenniskens et al. (2011) take this as the the first point
            on the trajectory.
    
    Keyword arguments:
        stations_fixed: [bool] If True, the correction for Earth's rotation will be performed on the radiant,
            but not the velocity. This should be True ONLY in two occasions:
                - if the ECEF coordinate system was used for trajectory estimation
                - if the ECI coordinate system was used for trajectory estimation, BUT the stations were not
                    moved in time, but were kept fixed at one point, regardless of the trajectory estimation
                    method.
            It is necessary to perform this correction for the intersecting planes method, but not for
            the lines of sight method ONLY when the stations are not fixed. Of course, if one is using the 
            lines of sight method with fixed stations, one should perform this correction!
        reference_init: [bool] If True (default), the initial point on the trajectory is given as the reference
            one, i.e. the reference ECI coordinates are the ECI coordinates of the initial point on the
            trajectory, where the meteor has the velocity v_init. If False, then the reference point is the
            average point on the trajectory, and the average velocity will be used to do the corrections.
        rotation_correction: [bool] If True, the correction of the initial velocity for Earth's rotation will
            be performed. False by default. This should ONLY be True if the coordiante system for trajectory
            estimation was ECEF, i.e. did not rotate with the Earth. In all other cases it should be False, 
            even if fixed station coordinates were used in the ECI coordinate system!

    Return:
        orb: [Orbit object] Object containing the calculated orbit.

    """

    ### Correct the velocity vector for the Earth's rotation if the stations are fixed ###
    ##########################################################################################################

    eci_x, eci_y, eci_z = eci_ref

    # Calculate the geocentric latitude (latitude which considers the Earth as an elipsoid) of the reference
    # trajectory point
    lat_geocentric = np.arctan2(eci_z, np.sqrt(eci_x**2 + eci_y**2))

    # Calculate the dynamical JD
    jd_dyn = jd2DynamicalTimeJD(jd_ref)

    # Calculate the geographical coordinates of the reference trajectory ECI position
    lat_ref, lon_ref, ht_ref = cartesian2Geo(jd_ref, *eci_ref)

    # Initialize a new orbit structure and assign calculated parameters
    orb = Orbit()

    # Calculate the velocity of the Earth rotation at the position of the reference trajectory point (m/s)
    v_e = 2 * np.pi * vectMag(eci_ref) * np.cos(lat_geocentric) / 86164.09053

    # Calculate the equatorial coordinates of east from the reference position on the trajectory
    azimuth_east = np.pi / 2
    altitude_east = 0
    ra_east, dec_east = altAz2RADec(azimuth_east, altitude_east, jd_ref,
                                    lat_ref, lon_ref)

    # Compute velocity components of the state vector
    if reference_init:

        # If the initial velocity was the reference velocity, use it for the correction
        v_ref_vect = v_init * radiant_eci

    else:
        # Calculate reference velocity vector using the average point on the trajectory and the average
        # velocity
        v_ref_vect = v_avg * radiant_eci

    # Apply the Earth rotation correction if the station coordinates are fixed (a MUST for the
    # intersecting planes method!)
    if stations_fixed:

        ### Set fixed stations radiant info ###

        # If the stations are fixed, then the input state vector is already fixed to the ground
        orb.ra_norot, orb.dec_norot = eci2RaDec(radiant_eci)

        # Apparent azimuth and altitude (no rotation)
        orb.azimuth_apparent_norot, orb.elevation_apparent_norot = raDec2AltAz(orb.ra_norot, orb.dec_norot, \
            jd_ref, lat_ref, lon_ref)

        # Estimated average velocity (no rotation)
        orb.v_avg_norot = v_avg

        # Estimated initial velocity (no rotation)
        orb.v_init_norot = v_init

        ### ###

        v_ref_corr = np.zeros(3)

        # Calculate the corrected reference velocity vector/radiant
        v_ref_corr[0] = v_ref_vect[0] - v_e * np.cos(ra_east)
        v_ref_corr[1] = v_ref_vect[1] - v_e * np.sin(ra_east)
        v_ref_corr[2] = v_ref_vect[2]

    else:

        # MOVING STATIONS
        # Velocity vector will remain unchanged if the stations were moving
        if reference_init:
            v_ref_corr = v_init * radiant_eci

        else:
            v_ref_corr = v_avg * radiant_eci

        ### ###
        # If the rotation correction does not have to be applied, meaning that the rotation is already
        # included, compute a version of the radiant and the velocity without Earth's rotation
        # (REPORTING PURPOSES ONLY, THESE VALUES ARE NOT USED IN THE CALCULATION)

        v_ref_nocorr = np.zeros(3)

        # Calculate the derotated reference velocity vector/radiant
        v_ref_nocorr[0] = v_ref_vect[0] + v_e * np.cos(ra_east)
        v_ref_nocorr[1] = v_ref_vect[1] + v_e * np.sin(ra_east)
        v_ref_nocorr[2] = v_ref_vect[2]

        # Compute the radiant without Earth's rotation included
        orb.ra_norot, orb.dec_norot = eci2RaDec(vectNorm(v_ref_nocorr))
        orb.azimuth_apparent_norot, orb.elevation_apparent_norot = raDec2AltAz(orb.ra_norot, orb.dec_norot, \
            jd_ref, lat_ref, lon_ref)
        orb.v_init_norot = vectMag(v_ref_nocorr)
        orb.v_avg_norot = orb.v_init_norot - v_init + v_avg

        ### ###

    ##########################################################################################################

    ### Correct velocity for Earth's gravity ###
    ##########################################################################################################

    # If the reference velocity is the initial velocity
    if reference_init:

        # Use the corrected velocity for Earth's rotation (when ECEF coordinates are used)
        if rotation_correction:
            v_init_corr = vectMag(v_ref_corr)

        else:
            # IMPORTANT NOTE: The correction in this case is only done on the radiant (even if the stations
            # were fixed, but NOT on the initial velocity!). Thus, correction from Ceplecha 1987,
            # equation (35) is not needed. If the initial velocity is determined from time vs. length and in
            # ECI coordinates, whose coordinates rotate with the Earth, the moving stations play no role in
            # biasing the velocity.
            v_init_corr = v_init

    else:

        if rotation_correction:

            # Calculate the corrected initial velocity if the reference velocity is the average velocity
            v_init_corr = vectMag(v_ref_corr) + v_init - v_avg

        else:
            v_init_corr = v_init

    # Calculate apparent RA and Dec from radiant state vector
    orb.ra, orb.dec = eci2RaDec(radiant_eci)
    orb.v_init = v_init
    orb.v_avg = v_avg

    # Calculate the apparent azimuth and altitude (geodetic latitude, because ra/dec are calculated from ECI,
    #   which is calculated from WGS84 coordinates)
    orb.azimuth_apparent, orb.elevation_apparent = raDec2AltAz(
        orb.ra, orb.dec, jd_ref, lat_ref, lon_ref)

    orb.jd_ref = jd_ref
    orb.lon_ref = lon_ref
    orb.lat_ref = lat_ref
    orb.ht_ref = ht_ref
    orb.lat_geocentric = lat_geocentric

    # Assume that the velocity in infinity is the same as the initial velocity (after rotation correction, if
    # it was needed)
    orb.v_inf = v_init_corr

    # Make sure the velocity of the meteor is larger than the escape velocity
    if v_init_corr**2 > (2 * 6.67408 * 5.9722) * 1e13 / vectMag(eci_ref):

        # Calculate the geocentric velocity (sqrt of squared inital velocity minus the square of the Earth escape
        # velocity at the height of the trajectory), units are m/s.
        # Square of the escape velocity is: 2GM/r, where G is the 2014 CODATA-recommended value of
        # 6.67408e-11 m^3/(kg s^2), and the mass of the Earth is M = 5.9722e24 kg
        v_g = np.sqrt(v_init_corr**2 -
                      (2 * 6.67408 * 5.9722) * 1e13 / vectMag(eci_ref))

        # Calculate the radiant corrected for Earth's rotation (ONLY if the stations were fixed, otherwise it
        #   is the same as the apparent radiant)
        ra_corr, dec_corr = eci2RaDec(vectNorm(v_ref_corr))

        # Calculate the Local Sidreal Time of the reference trajectory position
        lst_ref = np.radians(jd2LST(jd_ref, np.degrees(lon_ref))[0])

        # Calculate the apparent zenith angle
        zc = np.arccos(np.sin(dec_corr)*np.sin(lat_geocentric) \
            + np.cos(dec_corr)*np.cos(lat_geocentric)*np.cos(lst_ref - ra_corr))

        # Calculate the zenith attraction correction
        delta_zc = 2 * np.arctan2(
            (v_init_corr - v_g) * np.tan(zc / 2), v_init_corr + v_g)

        # Zenith distance of the geocentric radiant
        zg = zc + np.abs(delta_zc)

        ##########################################################################################################

        ### Calculate the geocentric radiant ###
        ##########################################################################################################

        # Get the azimuth from the corrected RA and Dec
        azimuth_corr, _ = raDec2AltAz(ra_corr, dec_corr, jd_ref,
                                      lat_geocentric, lon_ref)

        # Calculate the geocentric radiant
        ra_g, dec_g = altAz2RADec(azimuth_corr, np.pi / 2 - zg, jd_ref,
                                  lat_geocentric, lon_ref)

        ### Precess ECI coordinates to J2000 ###

        # Convert rectangular to spherical coordiantes
        re, delta_e, alpha_e = cartesianToSpherical(*eci_ref)

        # Precess coordinates to J2000
        alpha_ej, delta_ej = equatorialCoordPrecession(jd_ref, J2000_JD.days,
                                                       alpha_e, delta_e)

        # Convert coordinates back to rectangular
        eci_ref = sphericalToCartesian(re, delta_ej, alpha_ej)
        eci_ref = np.array(eci_ref)

        ######

        # Precess the geocentric radiant to J2000
        ra_g, dec_g = equatorialCoordPrecession(jd_ref, J2000_JD.days, ra_g,
                                                dec_g)

        # Calculate the ecliptic latitude and longitude of the geocentric radiant (J2000 epoch)
        L_g, B_g = raDec2Ecliptic(J2000_JD.days, ra_g, dec_g)

        # Load the JPL ephemerids data
        jpl_ephem_data = SPK.open(config.jpl_ephem_file)

        # Get the position of the Earth (km) and its velocity (km/s) at the given Julian date (J2000 epoch)
        # The position is given in the ecliptic coordinates, origin of the coordinate system is in the centre
        # of the Sun
        earth_pos, earth_vel = calcEarthRectangularCoordJPL(
            jd_dyn, jpl_ephem_data, sun_centre_origin=True)

        # print('Earth position:')
        # print(earth_pos)
        # print('Earth velocity:')
        # print(earth_vel)

        # Convert the Earth's position to rectangular equatorial coordinates (FK5)
        earth_pos_eq = rotateVector(earth_pos, np.array([1, 0, 0]),
                                    J2000_OBLIQUITY)

        # print('Earth position (FK5):')
        # print(earth_pos_eq)

        # print('Meteor ECI:')
        # print(eci_ref)

        # Add the position of the meteor's trajectory to the position of the Earth to calculate the
        # equatorial coordinates of the meteor (in kilometers)
        meteor_pos = earth_pos_eq + eci_ref / 1000

        # print('Meteor position (FK5):')
        # print(meteor_pos)

        # Convert the position of the trajectory from FK5 to heliocentric ecliptic coordinates
        meteor_pos = rotateVector(meteor_pos, np.array([1, 0, 0]),
                                  -J2000_OBLIQUITY)

        # print('Meteor position:')
        # print(meteor_pos)

        ##########################################################################################################

        # Calculate components of the heliocentric velocity of the meteor (km/s)
        v_h = np.array(earth_vel) + np.array(
            eclipticToRectangularVelocityVect(L_g, B_g, v_g / 1000))

        # Calculate the heliocentric velocity in km/s
        v_h_mag = vectMag(v_h)

        # Calculate the corrected heliocentric ecliptic coordinates of the meteoroid using the method of
        # Sato and Watanabe (2014).
        L_h, B_h, met_v_h = correctedEclipticCoord(L_g, B_g, v_g / 1000,
                                                   earth_vel)

        # Calculate the solar longitude
        la_sun = jd2SolLonJPL(jd_dyn)

        # Calculations below done using Dave Clark's Master thesis equations

        # Specific orbital energy
        epsilon = (vectMag(v_h)**2) / 2 - SUN_MU / vectMag(meteor_pos)

        # Semi-major axis in AU
        a = -SUN_MU / (2 * epsilon * AU)

        # Calculate mean motion in rad/day
        n = np.sqrt(G * SUN_MASS / ((np.abs(a) * AU * 1000.0)**3)) * 86400.0

        # Calculate the orbital period in years
        T = 2 * np.pi * np.sqrt(
            ((a * AU)**3) / SUN_MU) / (86400 * SIDEREAL_YEAR)

        # Calculate the orbit angular momentum
        h_vect = np.cross(meteor_pos, v_h)

        # Calculate inclination
        incl = np.arccos(h_vect[2] / vectMag(h_vect))

        # Calculate eccentricity
        e_vect = np.cross(v_h, h_vect) / SUN_MU - vectNorm(meteor_pos)
        eccentricity = vectMag(e_vect)

        # Calculate perihelion distance (source: Jenniskens et al., 2011, CAMS overview paper)
        if eccentricity == 1:
            q = (vectMag(meteor_pos) +
                 np.dot(e_vect, meteor_pos)) / (1 + vectMag(e_vect))
        else:
            q = a * (1.0 - eccentricity)

        # Calculate the aphelion distance
        Q = a * (1.0 + eccentricity)

        # Normal vector to the XY reference frame
        k_vect = np.array([0, 0, 1])

        # Vector from the Sun pointing to the ascending node
        n_vect = np.cross(k_vect, h_vect)

        # Calculate node
        if vectMag(n_vect) == 0:
            node = 0
        else:
            node = np.arctan2(n_vect[1], n_vect[0])

        node = node % (2 * np.pi)

        # Calculate argument of perihelion
        if vectMag(n_vect) != 0:
            peri = np.arccos(
                np.dot(n_vect, e_vect) / (vectMag(n_vect) * vectMag(e_vect)))

            if e_vect[2] < 0:
                peri = 2 * np.pi - peri

        else:
            peri = np.arccos(e_vect[0] / vectMag(e_vect))

        peri = peri % (2 * np.pi)

        # Calculate the longitude of perihelion
        pi = (node + peri) % (2 * np.pi)

        ### Calculate true anomaly
        true_anomaly = np.arccos(
            np.dot(e_vect, meteor_pos) /
            (vectMag(e_vect) * vectMag(meteor_pos)))
        if np.dot(meteor_pos, v_h) < 0:
            true_anomaly = 2 * np.pi - true_anomaly

        true_anomaly = true_anomaly % (2 * np.pi)

        ###

        # Calculate eccentric anomaly
        eccentric_anomaly = np.arctan2(np.sqrt(1 - eccentricity**2)*np.sin(true_anomaly), eccentricity \
            + np.cos(true_anomaly))

        # Calculate mean anomaly
        mean_anomaly = eccentric_anomaly - eccentricity * np.sin(
            eccentric_anomaly)
        mean_anomaly = mean_anomaly % (2 * np.pi)

        # Calculate the time in days since the last perihelion passage of the meteoroid
        dt_perihelion = (mean_anomaly * a**(3.0 / 2)) / 0.01720209895

        if not np.isnan(dt_perihelion):

            # Calculate the date and time of the last perihelion passage
            last_perihelion = jd2Date(jd_dyn - dt_perihelion, dt_obj=True)

        else:
            last_perihelion = None

        # Calculate Tisserand's parameter with respect to Jupiter
        Tj = 2 * np.sqrt(
            (1 - eccentricity**2) * a / 5.204267) * np.cos(incl) + 5.204267 / a

        # Assign calculated parameters
        orb.lst_ref = lst_ref
        orb.jd_dyn = jd_dyn
        orb.v_g = v_g
        orb.ra_g = ra_g
        orb.dec_g = dec_g

        orb.meteor_pos = meteor_pos
        orb.L_g = L_g
        orb.B_g = B_g

        orb.v_h_x, orb.v_h_y, orb.v_h_z = met_v_h
        orb.L_h = L_h
        orb.B_h = B_h

        orb.zc = zc
        orb.zg = zg

        orb.v_h = v_h_mag * 1000

        orb.la_sun = la_sun

        orb.a = a
        orb.e = eccentricity
        orb.i = incl
        orb.peri = peri
        orb.node = node
        orb.pi = pi
        orb.q = q
        orb.Q = Q
        orb.true_anomaly = true_anomaly
        orb.eccentric_anomaly = eccentric_anomaly
        orb.mean_anomaly = mean_anomaly
        orb.last_perihelion = last_perihelion
        orb.n = n
        orb.T = T

        orb.Tj = Tj

    return orb
Example #25
0
        # begin_heights = [obs.rbeg_ele for obs in traj.observations]
        # station_ind = begin_heights.index(max(begin_heights))

        # TEST FOR REPRODUCING OLD RESULTS!!!
        # station_ind = 0

        obs_height = traj.observations[station_ind].model_ht
        obs_length = traj.observations[station_ind].length
        obs_time = traj.observations[station_ind].time_data

        # End height of the observed meteor
        end_ht = np.min(obs_height)


        # Name of the results file
        results_file = jd2Date(traj.jdt_ref, dt_obj=True).strftime('%Y%m%d_%H%M%S') + "_" + str(traj.observations[station_ind].station_id) + "_simulations.npy"
        results_file = os.path.join(dir_path_mir, results_file)

        # Add the results file to the results list
        results_list.append(results_file)


        # Fit only the first 25% of the observed trajectory
        len_part = int(0.25*len(obs_time))

        # If the first 25% has less than 4 points, than take the first 4 points
        if len_part < 4:
            len_part = 4

        obs_height = obs_height[:len_part]
        obs_length = obs_length[:len_part]