예제 #1
0
    def loadFullTraj(self, traj_reduced):
        """ Load the full trajectory object. 
    
        Arguments:
            traj_reduced: [TrajectoryReduced object]

        Return:
            traj: [Trajectory object] or [None] if file not found
        """

        # Generate the path to the output directory
        output_dir = self.generateTrajOutputDirectoryPath(traj_reduced)

        # Get the file name
        file_name = os.path.basename(traj_reduced.traj_file_path)

        # Try loading a full trajectory
        try:
            traj = loadPickle(output_dir, file_name)

            # Check if the traj object as fixed time offsets
            if not hasattr(traj, 'fixed_time_offsets'):
                traj.fixed_time_offsets = {}

            return traj

        except FileNotFoundError:
            print("File {:s} not found!".format(traj_reduced.traj_file_path))

            return None
예제 #2
0
def dumpAsEvFiles(dir_path, file_name):
    """ Dump the given pickle file as UWO-style ev_* file. """

    # Load the pickles trajectory
    traj = loadPickle(dir_path, file_name)

    # Dump the results as a UWO-style ev file

    year, month, day, hour, minute, second, _ = jd2Date(traj.jdt_ref)

    for i, obs in enumerate(traj.observations):

        # Construct file name
        date_str = "{:4d}{:02d}{:02d}_{:02d}{:02d}{:02d}A_{:s}".format(year, month, day, hour, minute, second, \
            obs.station_id)

        ev_file_name = 'ev_' + date_str + '.txt'

        # Convert azimuth and altitude to theta/tphi
        theta_data = np.pi / 2.0 - obs.elev_data
        phi_data = (np.pi / 2.0 - obs.azim_data) % (2 * np.pi)

        # Write the ev_* file
        writeEvFile(dir_path, ev_file_name, traj.jdt_ref, str(i), obs.lat,
                    obs.lon, obs.ele, obs.time_data, theta_data, phi_data)
def saveProcessedList(data_path, results_list, param_class_name,
                      min_frames_visible):
    """ Save a list of pickle files which passes postprocessing criteria to disk.

    Arguments:
        data_path: [str] Path to directory with simulation pickle files.
        results_list: [list] A list of pickle files which passes the filers, plus randomly drawn parameters 
            such as the limiting magnitude. If the pickle file didn't pass the filter, None is the entry.
        param_class_name: [str] Name of the parameter class used for postprocessing.
        min_frame_visible: [int] Minimum number of frames above the limting magnitude.

    """

    # Reject all None's from the results
    good_list = [entry for entry in results_list if entry is not None]

    # Load one simulation to get simulation parameters
    sim = loadPickle(data_path, good_list[0][0])

    # Compute the average minimum time the meteor needs to be visible
    min_time_visible = min_frames_visible/sim.params.fps \
        + (sim.params.len_delay_min + sim.params.len_delay_max)/2

    # Save the list of good files to disk
    simulation_resuts_file = "{:s}_lm{:+04.1f}_mintime{:.3f}s_good_files.txt".format(param_class_name, \
        (sim.params.lim_mag_faintest + sim.params.lim_mag_brightest)/2, min_time_visible)

    # If the file exists, append to it
    append = False
    if os.path.isfile(simulation_resuts_file):
        file_mode = 'a'
        append = True
    else:
        file_mode = 'w'

    with open(os.path.join(data_path, simulation_resuts_file), file_mode) as f:

        # Write the header when the file is created
        if not append:

            ### Write header ###

            # Write name of class used for postprocessing
            if param_class_name is None:
                param_class_name = sim.params.__class__.__name__
            f.write("# param_class_name = {:s}\n".format(param_class_name))

            # Write column labels
            f.write("# File name, lim mag, lim mag length, length delay (s)\n")

            ### ###

        # Write entries
        for file_name, random_params in good_list:
            f.write("{:s}, {:.8f}, {:.8f}, {:.8f}\n".format(
                file_name, *random_params))

    print("{:d} entries saved to {:s}".format(len(good_list),
                                              simulation_resuts_file))
예제 #4
0
def generateExtraFiles(outdir):

    picklefile = glob.glob1(outdir, '*.pickle')[0]
    traj = loadPickle(outdir, picklefile)
    traj.save_results = True

    createAdditionalOutput(traj, outdir)
    fetchJpgsAndMp4s(traj, outdir)
    return
예제 #5
0
def dataFunction(file_path, param_class_name, postprocess_params):

    # Load the pickle file
    sim = loadPickle(*os.path.split(file_path))

    # Extract model inputs and outputs
    return extractSimData(sim,
                          param_class_name=param_class_name,
                          postprocess_params=postprocess_params)
예제 #6
0
def validateSimulation(dir_path, file_name, param_class_name,
                       min_frames_visible):

    # Load the pickle file
    sim = loadPickle(dir_path, file_name)

    # Extract simulation data
    res = extractSimData(sim, min_frames_visible=min_frames_visible, check_only=True, \
        param_class_name=param_class_name)

    # If the simulation didn't satisfy the filters, skip it
    if res is None:
        return None

    print("Good:", file_name)

    return os.path.join(dir_path, file_name), res
예제 #7
0
def get_temp_plots(uuid):
    # enter files lock while
    temp_lock.append(uuid)

    file_path = os.path.join(app.config.get('TEMP_DIR'), uuid)

    if not is_safe_path(app.config.get('TEMP_DIR'), file_path):
        return abort(404)

    pickle_filename = "trajectory.pickle"
    try:
        traj = loadPickle(file_path, pickle_filename)
    except:
        return abort(404)

    # end lock
    temp_lock.remove(uuid)

    frag_pickle_dict = traj.savePlots(None, None, show_plots=False, ret_figs=True)
    frag_pickle_dict_json = {}
    for name, value in frag_pickle_dict.items():
        fig = pickle.loads(value)

        if len(fig.axes) == 2:
            fig.axes[1].patch.set_alpha(0.0)  # necessary for mpld3 to work with twinx()
            mpld3.plugins.clear(fig)  # disable zooming, moving ... due to double axis mpld3 problem

        if name == "orbit":
            plt.axis([-0.3, 0.3, -0.3, 0.3])  # zoom in
            fig.axes[0].view_init(elev=90, azim=90)  # top down view

        frag_pickle_dict_json[name] = mpld3.fig_to_dict(fig)

        # Refresh figure
        global plt_clearing_lock
        while plt_clearing_lock: pass
        plt_clearing_lock = True
        plt.clf()
        plt.close()
        plt_clearing_lock = False

    frag_pickle_dict_json_fixed = json.dumps(frag_pickle_dict_json, cls=NumpyEncoder)
    frag_pickle_dict_json_fixed = frag_pickle_dict_json_fixed.replace(', "visible": false',
                                                                      "")  # fixes https://github.com/mpld3/mpld3/issues/370
    frag_pickle_dict_json_fixed = frag_pickle_dict_json_fixed.replace(', "visible": true', "")
    return frag_pickle_dict_json_fixed
예제 #8
0
    def __init__(self, traj_file_path, json_dict=None, traj_obj=None):
        """ Reduced representation of the Trajectory object which helps to save memory. 

        Arguments:
            traj_file_path: [str] Full path to the trajectory object.

        Keyword arguments:
            json_dict: [dict] Load values from a dictionary instead of a traj pickle file. None by default,
                in which case a traj pickle file will be loaded.
            traj_obj: [Trajectory instance] Load values from a full Trajectory object, instead from the disk.
                None by default.
        """

        # Load values from a JSON pickle file
        if json_dict is None:

            if traj_obj is None:

                # Full path to the trajectory object (saved to it can be loaded later if needed)
                self.traj_file_path = traj_file_path

                # Load the trajectory object
                traj = loadPickle(*os.path.split(traj_file_path))

            else:

                # Load values from a given trajectory file
                traj = traj_obj
                self.traj_file_path = os.path.join(
                    traj.output_dir, traj.file_name + "_trajectory.pickle")

            # Reference Julian date (beginning of the meteor)
            self.jdt_ref = traj.jdt_ref

            # ECI coordinates of the state vector computed through minimization
            if traj.state_vect_mini is None:
                self.state_vect_mini = None
            else:
                self.state_vect_mini = traj.state_vect_mini.tolist()

            # Apparent radiant vector computed through minimization
            if traj.radiant_eci_mini is None:
                self.radiant_eci_mini = None
            else:
                self.radiant_eci_mini = traj.radiant_eci_mini.tolist()

            # Initial and average velocity
            self.v_init = traj.v_init
            self.v_avg = traj.v_avg

            # Coordinates of the first point (observed)
            self.rbeg_lat = traj.rbeg_lat
            self.rbeg_lon = traj.rbeg_lon
            self.rbeg_ele = traj.rbeg_ele
            self.rbeg_jd = traj.rbeg_jd

            # Coordinates of the last point (observed)
            self.rend_lat = traj.rend_lat
            self.rend_lon = traj.rend_lon
            self.rend_ele = traj.rend_ele
            self.rend_jd = traj.rend_jd

            # Stations participating in the solution
            self.participating_stations = sorted([obs.station_id for obs in traj.observations \
                if obs.ignore_station == False])

            # Ignored stations
            self.ignored_stations = sorted([obs.station_id for obs in traj.observations \
                if obs.ignore_station == True])

        # Load values from a dictionary
        else:
            self.__dict__ = json_dict
예제 #9
0
    arg_parser.add_argument('-s', '--statfixed', \
        help="Shoud be used if the stations were fixed during trajectory estimation (e.g. with MILIG).", \
        action="store_true")

    arg_parser.add_argument('-m', '--milig', \
        help="MILIG input mode, i.e. the trajectory was estimated with fixed stations and reference average position on the trajectory. This replaces calling both options --refavg and --statfixed.", \
        action="store_true")

    # Parse the command line arguments
    cml_args = arg_parser.parse_args()

    ############################

    # Load the pickle file, if given
    if cml_args.pickle_file is not None:
        traj = loadPickle(*os.path.split(cml_args.pickle_file))

    else:
        traj = None

    parameter_missing_message = "To compute the orbit without the existing trajectory file, {:s} must also be provided!"

    if cml_args.ra is not None:
        ra = np.radians(cml_args.ra)
    elif traj is not None:
        ra = traj.orbit.ra
    else:
        print(parameter_missing_message.format('RA'))
        sys.exit()

    if cml_args.dec is not None:
예제 #10
0
    arg_parser = argparse.ArgumentParser(
        description="Fit the alpha-beta model to the trajectory.")

    arg_parser.add_argument('traj_path', nargs="?", metavar='TRAJ_PATH', type=str, \
        help="Path to the trajectory pickle file.")

    # Parse the command line arguments
    cml_args = arg_parser.parse_args()

    #########################

    # If the trajectory pickle was given, load the orbital elements from it
    if cml_args.traj_path is not None:

        # Load the trajectory pickle
        traj = loadPickle(*os.path.split(cml_args.traj_path))

        # Construct an input data array
        ht_data = []
        vel_data = []
        for obs in traj.observations:

            if obs.ignore_station:
                continue

            filter_mask = (obs.ignore_list == 0) & (obs.velocities != 0)

            ht_data += obs.model_ht[filter_mask].tolist()
            vel_data += obs.velocities[filter_mask].tolist()

        ht_data = np.array(ht_data)
예제 #11
0
    ##########################################################################################################

    # Go through all systems and showers
    for shower_dir in shower_dir_list:

        sim_meteor_list = []

        # Load simulated meteors from pickle files
        for file_name in sorted(os.listdir(shower_dir)):

            if 'sim_met.pickle' in file_name:

                print('Loading pickle file:', file_name)

                sim_met = loadPickle(shower_dir, file_name)

                sim_meteor_list.append(sim_met)

        # Solve generated trajectories
        for met_no, sim_met in enumerate(sim_meteor_list):

            # Directory where trajectory results will be saved
            output_dir = os.path.join(
                shower_dir, "{:03d} - {:.6f}".format(met_no, sim_met.jdt_ref))

            # Prepare everything for saving data to disk
            sim_met.initOutput(output_dir)

            # Save info about the simulated meteor (THIS CAN BE DISABLED WHEN YOU ONLY WANT TO UPDATE SOLUTIONS)
            if not update_only:
예제 #12
0
def collectTrajPickles(dir_path, traj_type='original', unique=False):
    """ Recursively collect all trajectory .pickle files in the given directory and load them to memory. 
    
    Arguments:
        dir_path: [str] Path to the directory.

    Keyword arguments:
        traj_type: [str] Type of the picke file to load. 'original' by default.
            - 'sim_met' - simulated meteors
            - 'mc' - Monte Carlo trajectory
            - 'gural' - Gural trajectory
            - <anything else> - any other .pickle format will be loaded

        unique: [bool] Return only unique file names, and if there are more file names with the same name,
            return the one that is in the directory with the minimum depth.

    Return:
        [list] A list of loaded objects.

    """
    def _checkUniquenessAndDepth(lst, index):
        """ Checks if the file name with the given index is unique, and if not, if it has the smallest depth. """

        ref_name, ref_depth = lst[index]

        min_depth = np.inf

        for entry in lst:
            file_name, depth = entry

            if (ref_name == file_name):
                min_depth = min(min_depth, depth)

        # If the given depth is the minimum depth, return True
        if min_depth == ref_depth:
            return True

        else:
            return False

    # Get all files in the given directory structure
    dir_files = listDirRecursive(dir_path)

    # Select only pickle files
    pickle_files = [
        file_path for file_path in dir_files
        if '.pickle' in os.path.split(file_path)[1]
    ]

    # Select SimMet pickle files
    if traj_type == 'sim_met':
        pickle_files = [
            pickle_f for pickle_f in pickle_files
            if '_sim_met.pickle' in pickle_f
        ]

    # Select only Monte Carlo pickle files if monte_carlo is True
    elif traj_type == 'mc':
        pickle_files = [
            pickle_f for pickle_f in pickle_files
            if '_mc_trajectory.pickle' in pickle_f
        ]

    # Select MILIG trajectories
    elif traj_type == 'milig':
        pickle_files = [
            pickle_f for pickle_f in pickle_files
            if '_milig.pickle' in pickle_f
        ]

    # Select intersecting planes trajectories
    elif traj_type == 'planes':
        pickle_files = [
            pickle_f for pickle_f in pickle_files
            if '_planes.pickle' in pickle_f
        ]

    # Select gural trajectory
    elif 'gural' in traj_type:
        pickle_files = [pickle_f for pickle_f in pickle_files if '_{:s}_trajectory.pickle'.format(traj_type) \
            in pickle_f]

    # Select non-Monte Carlo pickle files
    else:
        pickle_files = [pickle_f for pickle_f in pickle_files if ('trajectory.pickle' in pickle_f) \
            and not ('_mc' in pickle_f) and not ('_gural' in pickle_f)]

    # Get only unique file names. If there are duplicated, get those which have the smallest directory depth,
    #   and if the depth is the same, return the first one alphabetically
    if unique:

        pickle_files = sorted(pickle_files)

        # Extract file names and their depths
        name_depth_list = []
        for file_name in pickle_files:

            # Split by the directory
            s = file_name.split(os.sep)

            # Store the name with the depth
            name_depth_list.append([s[-1], len(s)])

        pickle_files_unique = []

        # Find unique file names with the smalled directory depth. If depths are equal, the first file will be
        #   chosen
        added_names = []
        for i, (pickle_file,
                entry) in enumerate(zip(pickle_files, name_depth_list)):

            file_name, depth = entry

            # Check if the file name is unique and it has the smallest depth, and add it to the final list if it is
            if _checkUniquenessAndDepth(name_depth_list,
                                        i) and (file_name not in added_names):
                pickle_files_unique.append(pickle_file)
                added_names.append(file_name)

    # Load pickle files to memory
    pickle_trajs = [
        loadPickle(*os.path.split(pickle_f)) for pickle_f in pickle_files
    ]

    return pickle_trajs
예제 #13
0
    ### ###

    # Get a list of paths of all trajectory pickle files
    traj_list = []
    for entry in os.walk(cml_args.dir_path):

        dir_path, _, file_names = entry

        # Go through all files
        for file_name in file_names:

            # Check if the file is a pickel file
            if file_name.endswith("_trajectory.pickle"):

                # Load the pickle file
                traj = loadPickle(dir_path, file_name)

                # Skip those with no orbit solution
                if traj.orbit.ra_g is None:
                    continue

                ### MINIMUM POINTS
                ### Reject all trajectories with small number of used points ###
                points_count = [len(obs.time_data[obs.ignore_list == 0]) for obs in traj.observations \
                    if obs.ignore_station == False]

                if not points_count:
                    continue

                max_points = max(points_count)
예제 #14
0
def sampleTrajectory(dir_path, file_name, beg_ht, end_ht, sample_step):
    """ Given the trajectory, beginning, end and step in km, this function will interpolate the 
        fireball height vs. distance and return the coordinates of sampled positions and compute the azimuth
        and elevation for every point.
    
    Arguments:


    Return:
    """

    # Load the trajectory file
    traj = loadPickle(dir_path, file_name)

    # Set begin and end heights, if not given
    if beg_ht < 0:
        beg_ht = traj.rbeg_ele / 1000

    if end_ht < 0:
        end_ht = traj.rend_ele / 1000

    # Convert heights to meters
    beg_ht *= 1000
    end_ht *= 1000
    sample_step *= 1000

    # Generate heights for sampling
    height_array = np.flipud(
        np.arange(end_ht, beg_ht + sample_step, sample_step))

    ### Fit time vs. height

    time_data = []
    height_data = []

    for obs in traj.observations:

        time_data += obs.time_data.tolist()
        height_data += obs.model_ht.tolist()

        # Plot the station data
        plt.scatter(obs.time_data,
                    obs.model_ht / 1000,
                    label=obs.station_id,
                    marker='x',
                    zorder=3)

    height_data = np.array(height_data)
    time_data = np.array(time_data)

    # Sort the arrays by decreasing time
    arr_sort_indices = np.argsort(time_data)[::-1]
    height_data = height_data[arr_sort_indices]
    time_data = time_data[arr_sort_indices]

    # Plot the non-smoothed time vs. height
    #plt.scatter(time_data, height_data/1000, label='Data')

    # Apply Savitzky-Golay to smooth out the height change
    height_data = scipy.signal.savgol_filter(height_data, 21, 5)

    plt.scatter(time_data,
                height_data / 1000,
                label='Savitzky-Golay filtered',
                marker='+',
                zorder=3)

    # Sort the arrays by increasing heights (needed for interpolation)
    arr_sort_indices = np.argsort(height_data)
    height_data = height_data[arr_sort_indices]
    time_data = time_data[arr_sort_indices]

    # Interpolate height vs. time
    ht_vs_time_interp = scipy.interpolate.PchipInterpolator(
        height_data, time_data)

    # Plot the interpolation
    ht_arr = np.linspace(np.min(height_data), np.max(height_data), 1000)
    time_arr = ht_vs_time_interp(ht_arr)

    plt.plot(time_arr, ht_arr / 1000, label='Interpolation', zorder=3)

    plt.legend()

    plt.xlabel('Time (s)')
    plt.ylabel('Height (km)')

    plt.grid()

    plt.show()

    ###

    # Take the ground above the state vector as the reference distance from the surface of the Earth
    ref_radius = vectMag(traj.state_vect_mini) - np.max(height_data)

    # Compute distance from the centre of the Earth to each height
    radius_array = ref_radius + height_array

    print('Beginning coordinates (observed):')
    print('    Lat: {:.6f}'.format(np.degrees(traj.rbeg_lat)))
    print('    Lon: {:.6f}'.format(np.degrees(traj.rbeg_lon)))
    print('    Elev: {:.1f}'.format(traj.rbeg_ele))
    print()
    print("Ground-fixed azimuth and altitude:")
    print(
        ' Time(s), Sample ht (m),  Lat (deg),   Lon (deg), Height (m), Azim (deg), Elev (deg)'
    )

    # Go through every distance from the Earth centre and compute the geo coordinates at the given distance,
    #   as well as the point-to-point azimuth and elevation
    prev_eci = None
    for ht, radius in zip(height_array, radius_array):

        # If the height is lower than the eng height, use a fixed velocity of 3 km/s

        if ht < traj.rend_ele:
            t_est = ht_vs_time_interp(
                traj.rend_ele) + abs(ht - traj.rend_ele) / 3000
            time_marker = "*"

        else:

            # Estimate the fireball time at the given height using interpolated values
            t_est = ht_vs_time_interp(ht)
            time_marker = " "

        # Compute the intersection between the trajectory line and the sphere of radius at the given height
        intersections = lineAndSphereIntersections(np.array([0, 0, 0]), radius,
                                                   traj.state_vect_mini,
                                                   traj.radiant_eci_mini)

        # Choose the intersection that is closer to the state vector
        inter_min_dist_indx = np.argmin(
            [vectMag(inter - traj.state_vect_mini) for inter in intersections])
        height_eci = intersections[inter_min_dist_indx]

        # Compute the Julian date at the given height
        jd = traj.jdt_ref + t_est / 86400.0

        # Compute geographical coordinates
        lat, lon, ele_geo = cartesian2Geo(jd, *height_eci)

        # Compute azimuth and elevation
        if prev_eci is not None:

            # Compute the vector pointing from the previous point to the current point
            direction_vect = vectNorm(prev_eci - height_eci)

            ### Compute the ground-fixed alt/az

            eci_x, eci_y, eci_z = height_eci

            # Calculate the geocentric latitude (latitude which considers the Earth as an elipsoid) of the reference
            # trajectory point
            lat_geocentric = np.arctan2(eci_z, np.sqrt(eci_x**2 + eci_y**2))

            # Calculate the velocity of the Earth rotation at the position of the reference trajectory point (m/s)
            v_e = 2 * np.pi * vectMag(height_eci) * np.cos(
                lat_geocentric) / 86164.09053

            # Calculate the equatorial coordinates of east from the reference position on the trajectory
            azimuth_east = np.pi / 2
            altitude_east = 0
            ra_east, dec_east = altAz2RADec(azimuth_east, altitude_east, jd,
                                            lat, lon)

            # The reference velocity vector has the average velocity and the given direction
            # Note that ideally this would be the instantaneous velocity
            v_ref_vect = traj.orbit.v_avg_norot * direction_vect

            v_ref_nocorr = np.zeros(3)

            # Calculate the derotated reference velocity vector/radiant
            v_ref_nocorr[0] = v_ref_vect[0] + v_e * np.cos(ra_east)
            v_ref_nocorr[1] = v_ref_vect[1] + v_e * np.sin(ra_east)
            v_ref_nocorr[2] = v_ref_vect[2]

            # Compute the radiant without Earth's rotation included
            ra_norot, dec_norot = eci2RaDec(vectNorm(v_ref_nocorr))
            azim_norot, elev_norot = raDec2AltAz(ra_norot, dec_norot, jd, lat,
                                                 lon)

            ###

        else:
            azim_norot = -np.inf
            elev_norot = -np.inf

        prev_eci = np.copy(height_eci)

        print(
            "{:s}{:7.3f}, {:13.1f}, {:10.6f}, {:11.6f}, {:10.1f}, {:10.6f}, {:10.6f}"
            .format(time_marker, t_est, ht, np.degrees(lat), np.degrees(lon),
                    ele_geo, np.degrees(azim_norot), np.degrees(elev_norot)))

    print(
        'The star * denotes heights extrapolated after the end of the fireball, with the fixed velocity of 3 km/s.'
    )

    print('End coordinates (observed):')
    print('    Lat: {:.6f}'.format(np.degrees(traj.rend_lat)))
    print('    Lon: {:.6f}'.format(np.degrees(traj.rend_lon)))
    print('    Elev: {:.1f}'.format(traj.rend_ele))
예제 #15
0
    def __init__(self, dir_path_mir, traj_pickle_file):

        # Name of input file for meteor parameters
        meteor_inputs_file = config.met_sim_input_file

        # Load input meteor data
        met, consts = loadInputs(meteor_inputs_file)

        # Load the pickled trajectory
        self.traj = loadPickle(dir_path_mir, traj_pickle_file)

        self.results_list = []
        self.full_cost_list = []

        # Go through all observations
        for station_ind, obs in enumerate(self.traj.observations):

            # Name of the results file
            results_file = jd2Date(self.traj.jdt_ref, dt_obj=True).strftime('%Y%m%d_%H%M%S') + "_" \
                + str(self.traj.observations[station_ind].station_id) + "_simulations.npy"

            results_file = os.path.join(dir_path_mir, results_file)

            # Add the results file to the results list
            self.results_list.append(results_file)

            # Take the parameters of the observation with the highest beginning height
            obs_time = self.traj.observations[station_ind].time_data
            obs_length = self.traj.observations[station_ind].length

            # Fit only the first 25% of the observed trajectory
            len_part = int(0.25 * len(obs_time))

            # If the first 25% has less than 4 points, than take the first 4 points
            if len_part < 4:
                len_part = 4

            # Cut the observations to the first part of the trajectory
            obs_time = obs_time[:len_part]
            obs_length = obs_length[:len_part]

            # Calculate observed velocities
            velocities, time_diffs = calcVelocity(obs_time, obs_length)
            print(velocities)

            # Calculate the RMS of velocities
            vel_rms = np.sqrt(np.mean((velocities[1:] - self.traj.v_init)**2))

            print('Vel RMS:', vel_rms)

            # Calculate the along track differences
            along_track_diffs = (velocities[1:] -
                                 self.traj.v_init) * time_diffs[1:]

            # Calculate the full 3D residuals
            full_residuals = np.sqrt(along_track_diffs**2 \
                + self.traj.observations[station_ind].v_residuals[:len_part][1:]**2 \
                + self.traj.observations[station_ind].h_residuals[:len_part][1:]**2)

            # Calculate the average 3D deviation from the estimated trajectory
            full_cost = np.sum(np.abs(
                np.array(full_residuals))) / len(full_residuals)

            self.full_cost_list.append(full_cost)

        # Load solutions from a file
        self.loadSimulations()

        # Initialize the plot framework
        self.initGrid()

        # Initialize main plots
        self.dens_min_init, self.dens_max_init = self.updatePlots(init=True)

        self.dens_min = self.dens_min_init
        self.dens_max = self.dens_max_init

        ### SLIDERS

        # Sliders for density
        self.sl_ind_dev_1 = Slider(self.ax_sl_11,
                                   'Min',
                                   self.dens_min,
                                   self.dens_max,
                                   valinit=self.dens_min)
        self.sl_ind_dev_2 = Slider(self.ax_sl_12,
                                   'Max',
                                   self.dens_min,
                                   self.dens_max,
                                   valinit=self.dens_max,
                                   slidermin=self.sl_ind_dev_1)
        self.ax_sl_12.set_xlabel('Density')

        # Turn on slider updating
        self.sl_ind_dev_1.on_changed(self.updateSliders)
        self.sl_ind_dev_2.on_changed(self.updateSliders)

        ######

        plt.show()
    def __init__(self, name, dir_path, met_file, traj_file, traj_uncert_file=None, metal_met_file=None, \
        frag_dict=None, fragmentation_points=None, fit_full_exp_model=False, v_init_adjust=0,
        bulk_density=3500):
        """ Structure storing input data. 
    
        Arguments:
            name: [str] Name of the event.
            dir_path: [str] Full path to the directory with data.
            met_file: [str] Name of the mirfit .met file in the dir_path directory.
            traj_file: [str] Name of the .pickle trajectory file in the dir_path directory.

        Keyword arguments:
            traj_uncert_file: [str] Path to the MC uncertainties file in the dir_path directory. None by
                default, in which case the file will be tried to found in the same directory where
                the traj_file is.
            metal_met_file: [str] Name of the METAL .met file in the dir_path directory. 'state.met' is used 
                by default.
            frag_dict: [dict] Dictionary that maps the mirfit fragment IDs to desired numbers or names.
                None by default, in which case the fragments will be named from 1 to N.
            fragmentation_points: [dict] Determines where the fragmentation points are and in which points
                the dynamic pressure will be computed. Format:
                    {site_ID: {main fragment: [time of fragmentation, [indices of daughter fragments]],
                               main_fragment 2: ...},
                    #   ...}
            fit_full_exp_model: [bool] If True, the full exponential deceleration model will the fit, 
                including the velocity. If False, the initial velocity will be taken from the given
                trajectory and a simpler (more robust) model will be fitted to the lag.
            v_init_adjust: [float] Sometimes to get a good exponential fit, the initial velocity has to be
                adjusted on the order of +/- 100 m/s. The default value is 0 m/s.
            bulk_density: [float] Bulk density of the meteoroid in kg/m3 used for dynamic mass computation. 
                3500 kg/m3 by default.

        """

        self.name = name
        self.dir_path = dir_path
        self.met_file = met_file
        self.traj_file = traj_file

        if traj_uncert_file is None:

            # Try finding the trajectory undertainty file
            self.traj_uncert_file = self.traj_file.replace(
                '_mc_trajectory', '_mc_uncertainties')

            # If the file cannot be found, don't use the uncertainties
            if not os.path.isfile(
                    os.path.join(self.dir_path, self.traj_uncert_file)):
                self.traj_uncert_file = None

            # Try with the typo in name
            if self.traj_uncert_file is None:

                # Try finding the trajectory undertainty file
                self.traj_uncert_file = self.traj_file.replace(
                    '_mc_trajectory', '_mc_uncertanties')

                # If the file cannot be found, don't use the uncertainties
                if not os.path.isfile(
                        os.path.join(self.dir_path, self.traj_uncert_file)):
                    self.traj_uncert_file = None

            if self.traj_uncert_file is None:
                print('The trajectry uncertainties file cannot be found:',
                      traj_uncert_file)

        else:
            self.traj_uncert_file = traj_uncert_file

        if metal_met_file is None:
            self.metal_met_file = 'state.met'
        else:
            self.metal_met_file = metal_met_file

        # If the file cannot be found, don't use anything
        if not os.path.isfile(os.path.join(self.dir_path,
                                           self.metal_met_file)):
            print('The METAL .met file cannot be found:', self.metal_met_file)
            self.metal_met_file = None

        # Init the fragmentation info container
        self.frag_info = FragmentationInfo(frag_dict, fragmentation_points, v_init_adjust=v_init_adjust, \
        fit_full_exp_model=fit_full_exp_model, bulk_density=bulk_density)

        # Load the Mirfit .met file
        self.met = loadMet(self.dir_path, self.met_file)

        # Load the trajectory
        self.traj = loadPickle(self.dir_path, self.traj_file)

        # Load trajectory uncertainties
        if self.traj_uncert_file is not None:
            self.traj_uncert = loadPickle(dir_path, self.traj_uncert_file)
        else:
            self.traj_uncert = None

        # Load magnitudes from the METAL .met file
        if self.metal_met_file is not None:
            self.metal_mags = loadMetalMags(self.dir_path, self.metal_met_file)
        else:
            self.metal_mags = None
예제 #17
0
def solveTrajectoryPickle(dir_path,
                          file_name,
                          only_plot=False,
                          solver='original',
                          **kwargs):
    """ Rerun the trajectory solver on the given trajectory pickle file. """

    # Load the pickles trajectory
    traj_p = loadPickle(dir_path, file_name)

    # Run the PyLIG trajectory solver
    if solver == 'original':

        # Given the max time offset from the pickle file and input, use the larger one of the two
        max_toffset = traj_p.max_toffset
        if "max_toffset" in kwargs:

            if (kwargs["max_toffset"] is not None) and (traj_p.max_toffset
                                                        is not None):

                max_toffset = max(traj_p.max_toffset, kwargs["max_toffset"])

            # Remove the max time offset from the list of keyword arguments
            kwargs.pop("max_toffset", None)

        # Preserve the trajectory ID
        if hasattr(traj_p, "traj_id"):
            traj_id = traj_p.traj_id
        else:
            traj_id = None

        # Reinitialize the trajectory solver
        meastype = 2
        traj = Trajectory(traj_p.jdt_ref, output_dir=dir_path, max_toffset=max_toffset, \
            meastype=meastype, traj_id=traj_id, **kwargs)

        # Fill the observations
        for obs in traj_p.observations:
            traj.infillWithObs(obs, meastype=meastype)

    elif solver == 'gural':

        # Init the Gural solver
        traj = GuralTrajectory(len(traj_p.observations), traj_p.jdt_ref, velmodel=3, \
            max_toffset=traj_p.max_toffset, meastype=2, output_dir=dir_path, verbose=True)

        # Fill the observations
        for obs in traj_p.observations:

            traj.infillTrajectory(obs.azim_data, obs.elev_data, obs.time_data,
                                  obs.lat, obs.lon, obs.ele)

    else:
        print('Unrecognized solver:', solver)

    if only_plot:

        # Set saving results
        traj_p.save_results = True

        # Override plotting options with given options
        traj_p.plot_all_spatial_residuals = kwargs[
            "plot_all_spatial_residuals"]
        traj_p.plot_file_type = kwargs["plot_file_type"]

        # Show the plots
        traj_p.savePlots(dir_path,
                         traj_p.file_name,
                         show_plots=kwargs["show_plots"])

    # Recompute the trajectory
    else:

        # Run the trajectory solver
        traj.run()

    return traj
예제 #18
0
    # Output directory
    out_dir = os.path.join(dir_path, 'marked_frames')

    # .met file containing narrow-field picks
    met_file = 'state_fragment_picks.met'

    # .vid file
    vid_file = os.path.join('cut_20170721_070418_01T', 'ev_20170721_070420A_01T.vid')

    # Trajectory file
    traj_file = os.path.join('Monte Carlo', '20170721_070419_mc_trajectory.pickle')


    ##########################################################################################################

    # Load the MET file
    met = loadMet(dir_path, met_file)

    # Load the vid file
    vid = readVid(dir_path, vid_file)

    # Load the trajectory file
    traj = loadPickle(dir_path, traj_file)

    # ID of the site used for loading proper picks from the met object
    site_id = '1'

    # Generate images with marked fragments on them
    markFragments(out_dir, vid, met, site_id, traj=traj, crop=[175, None, None, 340])
예제 #19
0
    arg_parser.add_argument('-n', '--nbins', metavar="NUM_BINS", nargs=1, \
        help='Number of bins for the histogram.', type=int)

    # Parse the command line arguments
    cml_args = arg_parser.parse_args()

    ############################


    dir_path_mc, mc_unc_file = os.path.split(cml_args.mc_uncertainties_path)


    ### Load trajectory pickles

    # Load uncertainties
    traj_unc = loadPickle(dir_path_mc, mc_unc_file)


    # Extract file name core
    traj_file_name_core = mc_unc_file.replace('_mc_uncertainties.pickle', '')

    # Load geometrical trajectory
    dir_path_parent = os.path.abspath(os.path.join(dir_path_mc, os.pardir))
    traj = loadPickle(dir_path_parent, traj_file_name_core + '_trajectory.pickle')

    # Load MC trajectory
    traj_best = loadPickle(dir_path_mc, traj_file_name_core + '_mc_trajectory.pickle')


    ###
예제 #20
0
    dir_path_metal = "/home/dvida/Dropbox/UWO Master's/Projects/MetalPrepare/20161007_052346_met"

    metal_file = 'state.met'

    ######


    # Name of input file for meteor parameters
    meteor_inputs_file = config.met_sim_input_file

    # Load input meteor data
    met, consts = loadInputs(meteor_inputs_file)


    # Load the pickled trajectory
    traj = loadPickle(dir_path_mir, traj_pickle_file)

    # Load parameters from the trajectory object
    v_init = traj.v_init
    v_avg = traj.orbit.v_avg
    zc = traj.orbit.zc



    ### Calculate the photometric mass of the meteoroid (kg) estimated from widefield data

    # Load apparent magnitudes from the METAL reduction
    time_mags = loadMetalMags(dir_path_metal, metal_file)

    masses = []