コード例 #1
0
 def plot_arb_ids(self, id_dictionary: dict, signal_dictionary: dict,
                  vehicle_number: str):
     plot_signals_by_arb_id(a_timer=a_timer,
                            arb_id_dict=id_dictionary,
                            signal_dict=signal_dictionary,
                            vehicle_number=vehicle_number,
                            force=force_arb_id_plotting)
コード例 #2
0
ファイル: Sample.py プロジェクト: jupadhya1/CBRE
 def plot_arb_ids(self, id_dictionary: dict, signal_dictionary: dict,
                  vehicle_number: str):
     self.make_and_move_to_vehicle_directory()
     plot_signals_by_arb_id(a_timer=a_timer,
                            arb_id_dict=id_dictionary,
                            signal_dict=signal_dictionary,
                            vehicle_number=vehicle_number,
                            force=force_arb_id_plotting)
     self.move_back_to_parent_directory()
コード例 #3
0
ファイル: Main.py プロジェクト: zoraj/CAN_Reverse_Engineering
if j1979_dictionary:
    plot_j1979(a_timer, j1979_dictionary, force_j1979_plotting)

#                 LEXICAL ANALYSIS                     #
print("\n\t\t\t##### BEGINNING LEXICAL ANALYSIS #####")
tokenize_dictionary(a_timer,
                    id_dictionary,
                    force_lexical_analysis,
                    include_padding=tokenize_padding,
                    merge=True,
                    max_distance=tokenization_bit_distance)
signal_dictionary = generate_signals(a_timer, id_dictionary,
                                     pickle_signal_filename,
                                     signal_normalize_strategy,
                                     force_lexical_analysis)
plot_signals_by_arb_id(a_timer, id_dictionary, signal_dictionary,
                       force_arb_id_plotting)

#                  SEMANTIC ANALYSIS                    #
print("\n\t\t\t##### BEGINNING SEMANTIC ANALYSIS #####")
subset_df = subset_selection(a_timer,
                             signal_dictionary,
                             pickle_subset_filename,
                             force_semantic_analysis,
                             subset_size=subset_selection_size)
corr_matrix_subset = subset_correlation(subset_df, csv_correlation_filename,
                                        force_semantic_analysis)
cluster_dict = greedy_signal_clustering(
    corr_matrix_subset,
    correlation_threshold=min_correlation_threshold,
    fuzzy_labeling=fuzzy_labeling)
df_full, corr_matrix_full, cluster_dict = label_propagation(