def process_tp_example(devices, dbc_path, tp_type): fs = setup_fs(s3=False) db_list = load_dbc_files(dbc_paths) log_files = canedge_browser.get_log_files(fs, devices) proc = ProcessData(fs, db_list) for log_file in log_files: output_folder = "output" + log_file.replace(".MF4", "") if not os.path.exists(output_folder): os.makedirs(f"{output_folder}") df_raw, device_id = proc.get_raw_data(log_file) df_raw.to_csv(f"{output_folder}/tp_raw_data.csv") # replace transport protocol sequences with single frames tp = MultiFrameDecoder(tp_type) df_raw = tp.combine_tp_frames(df_raw) df_raw.to_csv(f"{output_folder}/tp_raw_data_combined.csv") # extract physical values as normal, but add tp_type df_phys = proc.extract_phys(df_raw) df_phys.to_csv(f"{output_folder}/tp_physical_values.csv") print("Finished saving CSV output for devices:", devices)
load_dbc_files, list_log_files, ProcessData, MultiFrameDecoder, ) from utils_db import SetupInflux import inputs as inp # initialize connection to InfluxDB + get latest data entries per device influx = SetupInflux(inp.influx_url, inp.token, inp.org_id, inp.influx_bucket, inp.res) start_times = influx.get_start_times(inp.devices, inp.default_start, inp.dynamic) # setup filesystem (local/S3), load DBC files and list log files for processing fs = setup_fs(inp.s3, inp.key, inp.secret, inp.endpoint, inp.pw) db_list = load_dbc_files(inp.dbc_paths) log_files = list_log_files(fs, inp.devices, start_times, inp.pw) # process log files and write extracted signals to InfluxDB proc = ProcessData(fs, db_list, inp.signals, inp.days_offset) for log_file in log_files: df_raw, device_id = proc.get_raw_data(log_file, inp.pw) if inp.tp_type != "": tp = MultiFrameDecoder(inp.tp_type) df_raw = tp.combine_tp_frames(df_raw) df_phys = proc.extract_phys(df_raw) proc.print_log_summary(device_id, log_file, df_phys)
devices = ["LOG/958D2219"] # specify which time period to fetch log files for start = datetime(year=2020, month=1, day=13, hour=0, minute=0, tzinfo=timezone.utc) stop = datetime(year=2099, month=1, day=1, tzinfo=timezone.utc) # specify DBC path dbc_path = r"CSS-Electronics-SAE-J1939-DEMO.dbc" # --------------------------------------------------- # initialize DBC converter and file loader db = can_decoder.load_dbc(dbc_path) df_decoder = can_decoder.DataFrameDecoder(db) # fs = setup_fs_s3() fs = setup_fs() # List log files based on inputs log_files = canedge_browser.get_log_files(fs, devices, start_date=start, stop_date=stop) print(f"Found a total of {len(log_files)} log files") df_concat = [] for log_file in log_files: # open log file, get device id and extract dataframe with raw CAN data print(f"\nProcessing log file: {log_file}") with fs.open(log_file, "rb") as handle: mdf_file = mdf_iter.MdfFile(handle) device_id = mdf_file.get_metadata()[ "HDComment.Device Information.serial number" ]["value_raw"]
import mdf_iter import canedge_browser import pandas as pd from datetime import datetime, timezone from utils import setup_fs, load_dbc_files, restructure_data, add_custom_sig, ProcessData # specify devices to process (from local/S3), DBC files and start time devices = ["LOG/958D2219"] dbc_paths = ["dbc_files/CSS-Electronics-SAE-J1939-DEMO.dbc"] start = datetime(year=2020, month=1, day=13, hour=0, tzinfo=timezone.utc) # setup filesystem (local/S3), load DBC files and list log files for processing fs = setup_fs(s3=False, key="", secret="", endpoint="") db_list = load_dbc_files(dbc_paths) log_files = canedge_browser.get_log_files(fs, devices, start_date=start) print(f"Found a total of {len(log_files)} log files") # -------------------------------------------- # perform data processing of each log file proc = ProcessData(fs, db_list, signals=[]) df_phys_all = pd.DataFrame() for log_file in log_files: df_raw, device_id = proc.get_raw_data(log_file) df_phys = proc.extract_phys(df_raw) proc.print_log_summary(device_id, log_file, df_phys) df_phys_all = df_phys_all.append(df_phys) # --------------------------------------------