def test_acquisition(): # Replace argv with args to skip tracking and navigation. with patch.object(sys, 'argv', ['peregrine', SAMPLES, '-t', '-n']): try: peregrine.run.main() except SystemExit: # Thrown if track and nav results files are not present and we # supplied command line args to skip tracking and navigation. pass new_acq_results = load_acq_results(NEW_ACQ_RES) old_acq_results = load_acq_results(OLD_ACQ_RES) assert new_acq_results == old_acq_results # Clean-up. os.remove(NEW_ACQ_RES)
def check_acq_results(filename, prn, doppler, code_phase): acq_results = acq.load_acq_results( get_acq_result_file_name(filename)) acq_results = sorted(acq_results, lambda x, y: -1 if x.snr > y.snr else 1) assert len(acq_results) != 0 result = acq_results[0] print "result = ", result assert (result.prn + 1) == prn # check doppler phase estimation doppler_diff = abs(abs(result.doppler) - abs(doppler)) print "doppler_diff = ", doppler_diff assert doppler_diff < 100.0 # check code phase estimation code_phase_diff = abs(abs(result.code_phase) - abs(code_phase)) print "code_phase_diff = ", code_phase_diff assert code_phase_diff < 1.0
def main(): default_logging_config() # Initialize constants, settings settings = initSettings() parser = argparse.ArgumentParser() parser.add_argument("file", help="the sample data file to process") parser.add_argument("-a", "--skip-acquisition", help="use previously saved acquisition results", action="store_true") parser.add_argument("-t", "--skip-tracking", help="use previously saved tracking results", action="store_true") parser.add_argument("-n", "--skip-navigation", help="use previously saved navigation results", action="store_true") args = parser.parse_args() settings.fileName = args.file samplesPerCode = int(round(settings.samplingFreq / (settings.codeFreqBasis / settings.codeLength))) # Do acquisition acq_results_file = args.file + ".acq_results" if args.skip_acquisition: logging.info("Skipping acquisition, loading saved acquisition results.") try: acq_results = load_acq_results(acq_results_file) except IOError: logging.critical("Couldn't open acquisition results file '%s'.", acq_results_file) sys.exit(1) else: # Get 11ms of acquisition samples for fine frequency estimation acq_samples = load_samples(args.file, 11*samplesPerCode, settings.skipNumberOfBytes) acq = Acquisition(acq_samples, settings.samplingFreq, settings.IF, samplesPerCode) acq_results = acq.acquisition() try: save_acq_results(acq_results_file, acq_results) logging.debug("Saving acquisition results as '%s'" % acq_results_file) except IOError: logging.error("Couldn't save acquisition results file '%s'.", acq_results_file) # Filter out non-acquired satellites. acq_results = [ar for ar in acq_results if ar.status == 'A'] if len(acq_results) == 0: logging.critical("No satellites acquired!") sys.exit(1) acq_results.sort(key=attrgetter('snr'), reverse=True) # Track the acquired satellites track_results_file = args.file + ".track_results" if args.skip_tracking: logging.info("Skipping tracking, loading saved tracking results.") try: with open(track_results_file, 'rb') as f: track_results = pickle.load(f) except IOError: logging.critical("Couldn't open tracking results file '%s'.", track_results_file) sys.exit(1) else: signal = load_samples(args.file, int(settings.samplingFreq*1e-3*37100)) track_results = track(signal, acq_results, settings) try: with open(track_results_file, 'wb') as f: pickle.dump(track_results, f) logging.debug("Saving tracking results as '%s'" % track_results_file) except IOError: logging.error("Couldn't save tracking results file '%s'.", track_results_file) # Do navigation if not args.skip_navigation: navSolutions = navigation(track_results, settings)
def main(): default_logging_config() # Initialize constants, settings settings = initSettings() parser = argparse.ArgumentParser() parser.add_argument("file", help="the sample data file to process") parser.add_argument("-a", "--skip-acquisition", help="use previously saved acquisition results", action="store_true") parser.add_argument("-t", "--skip-tracking", help="use previously saved tracking results", action="store_true") parser.add_argument("-n", "--skip-navigation", help="use previously saved navigation results", action="store_true") args = parser.parse_args() settings.fileName = args.file samplesPerCode = int( round(settings.samplingFreq / (settings.codeFreqBasis / settings.codeLength))) # Do acquisition acq_results_file = args.file + ".acq_results" if args.skip_acquisition: logging.info( "Skipping acquisition, loading saved acquisition results.") try: acq_results = load_acq_results(acq_results_file) except IOError: logging.critical("Couldn't open acquisition results file '%s'.", acq_results_file) sys.exit(1) else: # Get 11ms of acquisition samples for fine frequency estimation acq_samples = load_samples(args.file, 11 * samplesPerCode, settings.skipNumberOfBytes) acq = Acquisition(acq_samples, settings.samplingFreq, settings.IF, samplesPerCode) acq_results = acq.acquisition() try: save_acq_results(acq_results_file, acq_results) logging.debug("Saving acquisition results as '%s'" % acq_results_file) except IOError: logging.error("Couldn't save acquisition results file '%s'.", acq_results_file) # Filter out non-acquired satellites. acq_results = [ar for ar in acq_results if ar.status == 'A'] if len(acq_results) == 0: logging.critical("No satellites acquired!") sys.exit(1) acq_results.sort(key=attrgetter('snr'), reverse=True) # Track the acquired satellites track_results_file = args.file + ".track_results" if args.skip_tracking: logging.info("Skipping tracking, loading saved tracking results.") try: with open(track_results_file, 'rb') as f: track_results = cPickle.load(f) except IOError: logging.critical("Couldn't open tracking results file '%s'.", track_results_file) sys.exit(1) else: signal = load_samples( args.file, int(settings.samplingFreq * 1e-3 * settings.msToProcess)) track_results = track(signal, acq_results, settings) try: with open(track_results_file, 'wb') as f: cPickle.dump(track_results, f, protocol=cPickle.HIGHEST_PROTOCOL) logging.debug("Saving tracking results as '%s'" % track_results_file) except IOError: logging.error("Couldn't save tracking results file '%s'.", track_results_file) # Do navigation nav_results_file = args.file + ".nav_results" if not args.skip_navigation: nav_solns = navigation(track_results, settings) nav_results = [] for s, t in nav_solns: nav_results += [(t, s.pos_llh, s.vel_ned)] with open(nav_results_file, 'wb') as f: cPickle.dump(nav_results, f, protocol=cPickle.HIGHEST_PROTOCOL) logging.debug("Saving navigation results as '%s'" % nav_results_file)
def main(): default_logging_config() parser = argparse.ArgumentParser() parser.add_argument("-a", "--skip-acquisition", help="use previously saved acquisition results", action="store_true") parser.add_argument("-t", "--skip-tracking", help="use previously saved tracking results", action="store_true") parser.add_argument("-n", "--skip-navigation", help="use previously saved navigation results", action="store_true") populate_peregrine_cmd_line_arguments(parser) args = parser.parse_args() if args.no_run: return 0 if args.file is None: parser.print_help() return if args.profile == 'peregrine' or args.profile == 'custom_rate': freq_profile = defaults.freq_profile_peregrine elif args.profile == 'low_rate': freq_profile = defaults.freq_profile_low_rate elif args.profile == 'normal_rate': freq_profile = defaults.freq_profile_normal_rate elif args.profile == 'high_rate': freq_profile = defaults.freq_profile_high_rate else: raise NotImplementedError() if args.l1ca_profile: profile = defaults.l1ca_stage_profiles[args.l1ca_profile] stage2_coherent_ms = profile[1]['coherent_ms'] stage2_params = profile[1]['loop_filter_params'] else: stage2_coherent_ms = None stage2_params = None if args.pipelining is not None: tracker_options = {'mode': 'pipelining', 'k': args.pipelining} else: tracker_options = None ms_to_process = int(args.ms_to_process) skip_samples = 0 if args.skip_samples is not None: skip_samples = args.skip_samples if args.skip_ms is not None: skip_samples = int(args.skip_ms * freq_profile['sampling_freq'] / 1e3) samples = {gps.L1CA: {'IF': freq_profile['GPS_L1_IF']}, gps.L2C: {'IF': freq_profile['GPS_L2_IF']}, 'samples_total': -1, 'sample_index': skip_samples} # Do acquisition acq_results_file = args.file + ".acq_results" if args.skip_acquisition: logging.info("Skipping acquisition, loading saved acquisition results.") try: acq_results = load_acq_results(acq_results_file) except IOError: logging.critical("Couldn't open acquisition results file '%s'.", acq_results_file) sys.exit(1) else: for signal in [gps.L1CA]: samplesPerCode = int(round(freq_profile['sampling_freq'] / (gps.l1ca_chip_rate / gps.l1ca_code_length))) # Get 11ms of acquisition samples for fine frequency estimation load_samples(samples=samples, num_samples=11 * samplesPerCode, filename=args.file, file_format=args.file_format) acq = Acquisition(signal, samples[signal]['samples'], freq_profile['sampling_freq'], freq_profile['GPS_L1_IF'], gps.l1ca_code_period * freq_profile['sampling_freq'], gps.l1ca_code_length) # only one signal - L1CA is expected to be acquired at the moment # TODO: add handling of acquisition results from GLONASS once GLONASS # acquisition is supported. acq_results = acq.acquisition(progress_bar_output=args.progress_bar) print "Acquisition is over!" try: save_acq_results(acq_results_file, acq_results) logging.debug("Saving acquisition results as '%s'" % acq_results_file) except IOError: logging.error("Couldn't save acquisition results file '%s'.", acq_results_file) # Filter out non-acquired satellites. acq_results = [ar for ar in acq_results if ar.status == 'A'] if len(acq_results) == 0: logging.critical("No satellites acquired!") sys.exit(1) acq_results.sort(key=attrgetter('snr'), reverse=True) # Track the acquired satellites if not args.skip_tracking: # Remove tracking output files from the previous session. removeTrackingOutputFiles(args.file) load_samples(samples=samples, filename=args.file, file_format=args.file_format) if ms_to_process < 0: ms_to_process = int( 1e3 * samples['samples_total'] / freq_profile['sampling_freq']) # Create the tracker object, which also create one tracking # channel per each acquisition result in 'acq_results' list. tracker = tracking.Tracker(samples=samples, channels=acq_results, ms_to_track=ms_to_process, sampling_freq=freq_profile[ 'sampling_freq'], # [Hz] stage2_coherent_ms=stage2_coherent_ms, stage2_loop_filter_params=stage2_params, tracker_options=tracker_options, output_file=args.file, progress_bar_output=args.progress_bar, check_l2c_mask=args.check_l2c_mask) # The tracking channels are designed to support batch processing. # In the batch processing mode the data samples are provided in # batches (chunks) of 'defaults.processing_block_size' bytes size. # The loop below runs all tracking channels for each batch as it # reads it from the samples file. tracker.start() condition = True while condition: # Each tracking channel remembers its own data samples offset within # 'samples' such that when new batch of data is provided, it # starts precisely, where it finished at the previous batch # processing round. # 'sample_index' is set to the smallest offset within 'samples' # array across all tracking channels. sample_index = tracker.run_channels(samples) if sample_index == samples['sample_index']: condition = False else: samples['sample_index'] = sample_index load_samples(samples=samples, filename=args.file, file_format=args.file_format) fn_results = tracker.stop() logging.debug("Saving tracking results as '%s'" % fn_results) # Do navigation if not args.skip_navigation: combinedResultObject = TrackingResults(args.file) # Dump combined output into a text file with open(createTrackingDumpOutputFileName(args.file), "wt") as f: logging.debug("Creating combined tracking file %s", f.name) combinedResultObject.dump(f) samplingFreqHz = freq_profile['sampling_freq'] nav_solns = navigation(combinedResultObject, samplingFreqHz) nav_results = [] for s, t in nav_solns: nav_results += [(t, s.pos_llh, s.vel_ned)] if len(nav_results): print "First nav solution: t=%s lat=%.5f lon=%.5f h=%.1f vel_ned=(%.2f, %.2f, %.2f)" % ( nav_results[0][0], np.degrees(nav_results[0][1][0]), np.degrees( nav_results[0][1][1]), nav_results[0][1][2], nav_results[0][2][0], nav_results[0][2][1], nav_results[0][2][2]) nav_results_file = args.file + ".nav_results" with open(nav_results_file, 'wb') as f: cPickle.dump(nav_results, f, protocol=cPickle.HIGHEST_PROTOCOL) print "and %d more are cPickled in '%s'." % (len(nav_results) - 1, nav_results_file) else: print "No navigation results."
def main(): default_logging_config() # Initialize constants, settings settings = initSettings() parser = argparse.ArgumentParser() parser.add_argument("file", help="the sample data file to process") parser.add_argument("-a", "--skip-acquisition", help="use previously saved acquisition results", action="store_true") parser.add_argument("-t", "--skip-tracking", help="use previously saved tracking results", action="store_true") parser.add_argument("-n", "--skip-navigation", help="use previously saved navigation results", action="store_true") parser.add_argument("-f", "--file-format", default=defaults.file_format, help="the format of the sample data file " "(e.g. 'piksi', 'int8', '1bit', '1bitrev')") args = parser.parse_args() settings.fileName = args.file samplesPerCode = int(round(settings.samplingFreq / (settings.codeFreqBasis / settings.codeLength))) # Do acquisition acq_results_file = args.file + ".acq_results" if args.skip_acquisition: logging.info("Skipping acquisition, loading saved acquisition results.") try: acq_results = load_acq_results(acq_results_file) except IOError: logging.critical("Couldn't open acquisition results file '%s'.", acq_results_file) sys.exit(1) else: # Get 11ms of acquisition samples for fine frequency estimation acq_samples = load_samples(args.file, 11*samplesPerCode, settings.skipNumberOfBytes, file_format=args.file_format) acq = Acquisition(acq_samples) acq_results = acq.acquisition() try: save_acq_results(acq_results_file, acq_results) logging.debug("Saving acquisition results as '%s'" % acq_results_file) except IOError: logging.error("Couldn't save acquisition results file '%s'.", acq_results_file) # Filter out non-acquired satellites. acq_results = [ar for ar in acq_results if ar.status == 'A'] if len(acq_results) == 0: logging.critical("No satellites acquired!") sys.exit(1) acq_results.sort(key=attrgetter('snr'), reverse=True) # Track the acquired satellites track_results_file = args.file + ".track_results" if args.skip_tracking: logging.info("Skipping tracking, loading saved tracking results.") try: with open(track_results_file, 'rb') as f: track_results = cPickle.load(f) except IOError: logging.critical("Couldn't open tracking results file '%s'.", track_results_file) sys.exit(1) else: signal = load_samples(args.file, int(settings.samplingFreq*1e-3*(settings.msToProcess+22)), settings.skipNumberOfBytes, file_format=args.file_format) track_results = track(signal, acq_results, settings.msToProcess) try: with open(track_results_file, 'wb') as f: cPickle.dump(track_results, f, protocol=cPickle.HIGHEST_PROTOCOL) logging.debug("Saving tracking results as '%s'" % track_results_file) except IOError: logging.error("Couldn't save tracking results file '%s'.", track_results_file) # Do navigation nav_results_file = args.file + ".nav_results" if not args.skip_navigation: nav_solns = navigation(track_results, settings) nav_results = [] for s, t in nav_solns: nav_results += [(t, s.pos_llh, s.vel_ned)] if len(nav_results): print "First nav solution: t=%s lat=%.5f lon=%.5f h=%.1f vel_ned=(%.2f, %.2f, %.2f)" % ( nav_results[0][0], np.degrees(nav_results[0][1][0]), np.degrees(nav_results[0][1][1]), nav_results[0][1][2], nav_results[0][2][0], nav_results[0][2][1], nav_results[0][2][2]) with open(nav_results_file, 'wb') as f: cPickle.dump(nav_results, f, protocol=cPickle.HIGHEST_PROTOCOL) print "and %d more are cPickled in '%s'." % (len(nav_results)-1, nav_results_file) else: print "No navigation results."
def main(): default_logging_config() parser = argparse.ArgumentParser() parser.add_argument("-a", "--skip-acquisition", help="use previously saved acquisition results", action="store_true") parser.add_argument("-t", "--skip-tracking", help="use previously saved tracking results", action="store_true") parser.add_argument("-n", "--skip-navigation", help="use previously saved navigation results", action="store_true") populate_peregrine_cmd_line_arguments(parser) args = parser.parse_args() if args.no_run: return 0 if args.file is None: parser.print_help() return if args.profile == 'peregrine' or args.profile == 'custom_rate': freq_profile = defaults.freq_profile_peregrine elif args.profile == 'low_rate': freq_profile = defaults.freq_profile_low_rate elif args.profile == 'normal_rate': freq_profile = defaults.freq_profile_normal_rate elif args.profile == 'high_rate': freq_profile = defaults.freq_profile_high_rate else: raise NotImplementedError() if args.l1ca_profile: profile = defaults.l1ca_stage_profiles[args.l1ca_profile] stage2_coherent_ms = profile[1]['coherent_ms'] stage2_params = profile[1]['loop_filter_params'] else: stage2_coherent_ms = None stage2_params = None if args.pipelining is not None: tracker_options = {'mode': 'pipelining', 'k': args.pipelining} else: tracker_options = None ms_to_process = int(args.ms_to_process) skip_samples = 0 if args.skip_samples is not None: skip_samples = args.skip_samples if args.skip_ms is not None: skip_samples = int(args.skip_ms * freq_profile['sampling_freq'] / 1e3) samples = { gps.L1CA: { 'IF': freq_profile['GPS_L1_IF'] }, gps.L2C: { 'IF': freq_profile['GPS_L2_IF'] }, 'samples_total': -1, 'sample_index': skip_samples } # Do acquisition acq_results_file = args.file + ".acq_results" if args.skip_acquisition: logging.info( "Skipping acquisition, loading saved acquisition results.") try: acq_results = load_acq_results(acq_results_file) except IOError: logging.critical("Couldn't open acquisition results file '%s'.", acq_results_file) sys.exit(1) else: for signal in [gps.L1CA]: samplesPerCode = int( round(freq_profile['sampling_freq'] / (gps.l1ca_chip_rate / gps.l1ca_code_length))) # Get 11ms of acquisition samples for fine frequency estimation load_samples(samples=samples, num_samples=11 * samplesPerCode, filename=args.file, file_format=args.file_format) acq = Acquisition( signal, samples[signal]['samples'], freq_profile['sampling_freq'], freq_profile['GPS_L1_IF'], gps.l1ca_code_period * freq_profile['sampling_freq'], gps.l1ca_code_length) # only one signal - L1CA is expected to be acquired at the moment # TODO: add handling of acquisition results from GLONASS once GLONASS # acquisition is supported. acq_results = acq.acquisition( progress_bar_output=args.progress_bar) print "Acquisition is over!" try: save_acq_results(acq_results_file, acq_results) logging.debug("Saving acquisition results as '%s'" % acq_results_file) except IOError: logging.error("Couldn't save acquisition results file '%s'.", acq_results_file) # Filter out non-acquired satellites. acq_results = [ar for ar in acq_results if ar.status == 'A'] if len(acq_results) == 0: logging.critical("No satellites acquired!") sys.exit(1) acq_results.sort(key=attrgetter('snr'), reverse=True) # Track the acquired satellites if not args.skip_tracking: # Remove tracking output files from the previous session. removeTrackingOutputFiles(args.file) load_samples(samples=samples, filename=args.file, file_format=args.file_format) if ms_to_process < 0: ms_to_process = int(1e3 * samples['samples_total'] / freq_profile['sampling_freq']) # Create the tracker object, which also create one tracking # channel per each acquisition result in 'acq_results' list. tracker = tracking.Tracker( samples=samples, channels=acq_results, ms_to_track=ms_to_process, sampling_freq=freq_profile['sampling_freq'], # [Hz] stage2_coherent_ms=stage2_coherent_ms, stage2_loop_filter_params=stage2_params, tracker_options=tracker_options, output_file=args.file, progress_bar_output=args.progress_bar, check_l2c_mask=args.check_l2c_mask) # The tracking channels are designed to support batch processing. # In the batch processing mode the data samples are provided in # batches (chunks) of 'defaults.processing_block_size' bytes size. # The loop below runs all tracking channels for each batch as it # reads it from the samples file. tracker.start() condition = True while condition: # Each tracking channel remembers its own data samples offset within # 'samples' such that when new batch of data is provided, it # starts precisely, where it finished at the previous batch # processing round. # 'sample_index' is set to the smallest offset within 'samples' # array across all tracking channels. sample_index = tracker.run_channels(samples) if sample_index == samples['sample_index']: condition = False else: samples['sample_index'] = sample_index load_samples(samples=samples, filename=args.file, file_format=args.file_format) fn_results = tracker.stop() logging.debug("Saving tracking results as '%s'" % fn_results) # Do navigation if not args.skip_navigation: combinedResultObject = TrackingResults(args.file) # Dump combined output into a text file with open(createTrackingDumpOutputFileName(args.file), "wt") as f: logging.debug("Creating combined tracking file %s", f.name) combinedResultObject.dump(f) samplingFreqHz = freq_profile['sampling_freq'] nav_solns = navigation(combinedResultObject, samplingFreqHz) nav_results = [] for s, t in nav_solns: nav_results += [(t, s.pos_llh, s.vel_ned)] if len(nav_results): print "First nav solution: t=%s lat=%.5f lon=%.5f h=%.1f vel_ned=(%.2f, %.2f, %.2f)" % ( nav_results[0][0], np.degrees( nav_results[0][1][0]), np.degrees(nav_results[0][1][1]), nav_results[0][1][2], nav_results[0][2][0], nav_results[0][2][1], nav_results[0][2][2]) nav_results_file = args.file + ".nav_results" with open(nav_results_file, 'wb') as f: cPickle.dump(nav_results, f, protocol=cPickle.HIGHEST_PROTOCOL) print "and %d more are cPickled in '%s'." % (len(nav_results) - 1, nav_results_file) else: print "No navigation results."