def extract_phys(self, df_raw, tp_type=None): """Given df of raw data and list of decoding databases, create new def with physical values (no duplicate signals and optionally filtered/rebaselined) """ import can_decoder import pandas as pd df_phys = pd.DataFrame() for db in self.db_list: df_decoder = can_decoder.DataFrameDecoder(db) if tp_type != None: df_phys_tp = pd.DataFrame() for length, group in df_raw.groupby("DataLength"): df_phys_group = df_decoder.decode_frame(group) df_phys_tp = df_phys_tp.append(df_phys_group) df_phys = df_phys.append(df_phys_tp.sort_index()) else: df_phys = df_phys.append(df_decoder.decode_frame(df_raw)) # remove duplicates in case multiple DBC files contain identical signals df_phys["datetime"] = df_phys.index df_phys = df_phys.drop_duplicates(keep="first") df_phys = df_phys.drop("datetime", 1) # optionally filter and rebaseline the data df_phys = self.filter_signals(df_phys) df_phys = self.rebaseline_data(df_phys) return df_phys
# specify which devices to process (from local folder or S3 bucket) devices = ["LOG/958D2219"] # specify which time period to fetch log files for start = datetime(year=2020, month=1, day=13, hour=0, minute=0, tzinfo=timezone.utc) stop = datetime(year=2099, month=1, day=1, tzinfo=timezone.utc) # specify DBC path dbc_path = r"CSS-Electronics-SAE-J1939-DEMO.dbc" # --------------------------------------------------- # initialize DBC converter and file loader db = can_decoder.load_dbc(dbc_path) df_decoder = can_decoder.DataFrameDecoder(db) # fs = setup_fs_s3() fs = setup_fs() # List log files based on inputs log_files = canedge_browser.get_log_files(fs, devices, start_date=start, stop_date=stop) print(f"Found a total of {len(log_files)} log files") df_concat = [] for log_file in log_files: # open log file, get device id and extract dataframe with raw CAN data print(f"\nProcessing log file: {log_file}") with fs.open(log_file, "rb") as handle: mdf_file = mdf_iter.MdfFile(handle)