Beispiel #1
0
 def generate_signals(self,
                      id_dictionary: dict,
                      postpone_pickle: bool = False):
     signal_dict = generate_signals(
         a_timer=a_timer,
         arb_id_dict=id_dictionary,
         signal_pickle_filename=pickle_signal_filename,
         normalize_strategy=signal_normalize_strategy,
         force=force_signal_generation)
     # postpone_pickle is simply a check whether J1979 data was present in the sample. If it was present, then wait
     # to save out the signal_dictionary until correlated Signals are labeled by sample.j1979_labeling().
     # if dump_to_pickle and not postpone_pickle and not path.isfile(pickle_signal_filename):
     #     print("\nDumping signal dictionary for " + self.output_vehicle_dir + " to " + pickle_signal_filename)
     #     dump(signal_dict, open(pickle_signal_filename, "wb"))
     #     print("\tComplete...")
     return signal_dict
Beispiel #2
0
id_dictionary, j1979_dictionary = pre_processor.generate_arb_id_dictionary(
    a_timer, tang_normalize_strategy, time_conversion, freq_analysis_accuracy,
    freq_synchronous_threshold, force_pre_processing)
if j1979_dictionary:
    plot_j1979(a_timer, j1979_dictionary, force_j1979_plotting)

#                 LEXICAL ANALYSIS                     #
print("\n\t\t\t##### BEGINNING LEXICAL ANALYSIS #####")
tokenize_dictionary(a_timer,
                    id_dictionary,
                    force_lexical_analysis,
                    include_padding=tokenize_padding,
                    merge=True,
                    max_distance=tokenization_bit_distance)
signal_dictionary = generate_signals(a_timer, id_dictionary,
                                     pickle_signal_filename,
                                     signal_normalize_strategy,
                                     force_lexical_analysis)
plot_signals_by_arb_id(a_timer, id_dictionary, signal_dictionary,
                       force_arb_id_plotting)

#                  SEMANTIC ANALYSIS                    #
print("\n\t\t\t##### BEGINNING SEMANTIC ANALYSIS #####")
subset_df = subset_selection(a_timer,
                             signal_dictionary,
                             pickle_subset_filename,
                             force_semantic_analysis,
                             subset_size=subset_selection_size)
corr_matrix_subset = subset_correlation(subset_df, csv_correlation_filename,
                                        force_semantic_analysis)
cluster_dict = greedy_signal_clustering(
    corr_matrix_subset,