def plot_j1979(self, j1979_dictionary: dict, vehicle_number: str): self.make_and_move_to_vehicle_directory() plot_j1979(a_timer, j1979_dictionary, vehicle_number, force_j1979_plotting) self.move_back_to_parent_directory()
def plot_j1979(self, j1979_dictionary: dict, vehicle_number: str): plot_j1979(a_timer, j1979_dictionary, vehicle_number, force_j1979_plotting)
# Threshold parameters used during semantic analysis subset_selection_size: float = 0.25 fuzzy_labeling: bool = True min_correlation_threshold: float = 0.85 # A timer class to record timings throughout the pipeline. a_timer = PipelineTimer(verbose=True) # DATA IMPORT AND PRE-PROCESSING # pre_processor = PreProcessor(can_data_filename, pickle_arb_id_filename, pickle_j1979_filename) id_dictionary, j1979_dictionary = pre_processor.generate_arb_id_dictionary( a_timer, tang_normalize_strategy, time_conversion, freq_analysis_accuracy, freq_synchronous_threshold, force_pre_processing) if j1979_dictionary: plot_j1979(a_timer, j1979_dictionary, force_j1979_plotting) # LEXICAL ANALYSIS # print("\n\t\t\t##### BEGINNING LEXICAL ANALYSIS #####") tokenize_dictionary(a_timer, id_dictionary, force_lexical_analysis, include_padding=tokenize_padding, merge=True, max_distance=tokenization_bit_distance) signal_dictionary = generate_signals(a_timer, id_dictionary, pickle_signal_filename, signal_normalize_strategy, force_lexical_analysis) plot_signals_by_arb_id(a_timer, id_dictionary, signal_dictionary, force_arb_id_plotting)