def test_read_mdf3_00(self): seed = np.random.randint(0, 2 ** 31) np.random.seed(seed) print("Read 3.00 using seed =", seed) sig_int = Signal( np.random.randint(-2 ** 16, 2 ** 16, CHANNEL_LEN, np.int32), np.arange(CHANNEL_LEN), name="Integer Channel", unit="unit1", ) sig_float = Signal( np.random.random(CHANNEL_LEN), np.arange(CHANNEL_LEN), name="Float Channel", unit="unit2", ) for memory in MEMORY: print(memory) with MDF(version="3.00", memory=memory) as mdf: mdf.append([sig_int, sig_float], common_timebase=True) outfile = mdf.save("tmp", overwrite=True) with MDF(outfile, memory=memory) as mdf: ret_sig_int = mdf.get(sig_int.name) ret_sig_float = mdf.get(sig_float.name) self.assertTrue(np.array_equal(ret_sig_int.samples, sig_int.samples)) self.assertTrue(np.array_equal(ret_sig_float.samples, sig_float.samples))
def test_read_mdf4_10(self): seed = np.random.randint(0, 2**31) np.random.seed(seed) print("Read 4.10 using seed =", seed) sig_int = Signal( np.random.randint(-(2**31), 2**31, CHANNEL_LEN), np.arange(CHANNEL_LEN), name="Integer Channel", unit="unit1", ) sig_float = Signal( np.random.random(CHANNEL_LEN), np.arange(CHANNEL_LEN), name="Float Channel", unit="unit2", ) with MDF(version="4.10") as mdf: mdf.append([sig_int, sig_float], common_timebase=True) outfile = mdf.save(Path(TestMDF4.tempdir.name) / "tmp", overwrite=True) with MDF(outfile) as mdf: ret_sig_int = mdf.get(sig_int.name) ret_sig_float = mdf.get(sig_float.name) self.assertTrue(np.array_equal(ret_sig_int.samples, sig_int.samples)) self.assertTrue( np.array_equal(ret_sig_float.samples, sig_float.samples))
def test_read_mdf4_10(self): seed = np.random.randint(0, 2**31) np.random.seed(seed) print('Read 4.10 using seed =', seed) sig_int = Signal( np.random.randint(-2**31, 2**31, CHANNEL_LEN), np.arange(CHANNEL_LEN), name='Integer Channel', unit='unit1', ) sig_float = Signal( np.random.random(CHANNEL_LEN), np.arange(CHANNEL_LEN), name='Float Channel', unit='unit2', ) for memory in MEMORY: with MDF(version='4.10', memory=memory) as mdf: mdf.append([sig_int, sig_float], common_timebase=True) outfile = mdf.save('tmp', overwrite=True) with MDF(outfile, memory=memory) as mdf: ret_sig_int = mdf.get(sig_int.name) ret_sig_float = mdf.get(sig_float.name) self.assertTrue(np.array_equal(ret_sig_int.samples, sig_int.samples)) self.assertTrue(np.array_equal(ret_sig_float.samples, sig_float.samples))
def test_convert_demo(self): print("MDF convert tests") for out in SUPPORTED_VERSIONS: for input_file in Path(TestMDF.tempdir_demo.name).iterdir(): if MDF(input_file).version == "2.00": continue print(input_file, out) with MDF(input_file) as mdf: outfile = mdf.convert(out).save( Path(TestMDF.tempdir_demo.name) / "tmp", overwrite=True, ) equal = True with MDF(input_file) as mdf, MDF(outfile) as mdf2: for name in set(mdf2.channels_db) - {"t", "time"}: original = mdf.get(name) converted = mdf2.get(name) if not np.array_equal(original.samples, converted.samples): equal = False print(original, converted, outfile) 1 / 0 if not np.array_equal(original.timestamps, converted.timestamps): equal = False 1 / 0 self.assertTrue(equal)
def test_convert_demo(self): print("MDF convert tests") for out in SUPPORTED_VERSIONS: for mdfname in os.listdir('tmpdir_demo'): for memory in MEMORY: input_file = os.path.join('tmpdir_demo', mdfname) if MDF(input_file).version == '2.00': continue print(input_file, memory, out) with MDF(input_file, memory=memory) as mdf: outfile = mdf.convert(out, memory=memory).save( 'tmp', overwrite=True) equal = True with MDF(input_file, memory=memory) as mdf, \ MDF(outfile, memory=memory) as mdf2: for name in set(mdf.channels_db) - {'t', 'time'}: original = mdf.get(name) converted = mdf2.get(name) if not np.array_equal(original.samples, converted.samples): equal = False if not np.array_equal(original.timestamps, converted.timestamps): equal = False self.assertTrue(equal) cleanup_files()
def test_convert_demo(self): print("MDF convert demo tests") mdf_files = [ file for file in Path(TestMDF.tempdir_demo.name).iterdir() if file.suffix in ('.mdf', '.mf4') ] signals = [ file for file in Path(TestMDF.tempdir_demo.name).iterdir() if file.suffix == '.npy' ] for file in mdf_files: for inp in (file, BytesIO(file.read_bytes())): with MDF(inp, use_display_names=True) as input_file: for out in SUPPORTED_VERSIONS: print(file, out, type(inp)) outfile = input_file.convert(out).save( Path(TestMDF.tempdir_demo.name) / "tmp", overwrite=True, ) with MDF(outfile, use_display_names=True) as mdf: for signal in signals: name = signal.stem target = np.load(signal) values = mdf.get(name).samples self.assertTrue(np.array_equal(target, values))
def test_select(self): print("MDF select tests") for input_file in Path(TestMDF.tempdir_demo.name).iterdir(): if MDF(input_file).version == "2.00": continue print(input_file) channels_nr = np.random.randint(1, len(CHANNELS_DEMO) + 1) channel_list = random.sample(list(CHANNELS_DEMO), channels_nr) selected_signals = MDF(input_file).select(channel_list) self.assertTrue(len(selected_signals) == len(channel_list)) self.assertTrue( all(ch.name == name for ch, name in zip(selected_signals, channel_list))) equal = True with MDF(input_file) as mdf: for selected in selected_signals: original = mdf.get(selected.name) if not np.array_equal(original.samples, selected.samples): equal = False if not np.array_equal(original.timestamps, selected.timestamps): equal = False self.assertTrue(equal)
def GetDatafromMdf_asDF(filename, SigList, SampleTime=0.01, EncodeEnums=True): ## Local helper def cleanEnumString(string): return string.decode().split(r'\x00')[0].strip().strip('\x00') ## Function body tempMDF = MDF() sigs = [] with MDF(filename, remove_source_from_channel_names=True) as mdf: for var in SigList: try: # get group and Index from channel_db grp_idx = mdf.channels_db[var][0] # Fetch signal as data object sigs.append(mdf.get(group=grp_idx[0], index=grp_idx[1])) # Append to mdf except: continue tempMDF.append(sigs) df = tempMDF.to_dataframe(raster=SampleTime) types = df.apply(lambda x: pd.api.types.infer_dtype(x.values)) for col in types[types == 'bytes'].index: # String/Enum df[col] = df[col].apply(cleanEnumString) if (EncodeEnums == True): types = df.apply(lambda x: pd.api.types.infer_dtype(x.values)) for col in types[types == 'string'].index: # String/Enum df[col] = df[col].astype('category') df[col] = df[col].cat.codes return df
def test_merge(self): print("MDF merge tests") for out in SUPPORTED_VERSIONS: for mdfname in os.listdir('tmpdir'): for memory in MEMORY: input_file = os.path.join('tmpdir', mdfname) files = [ input_file, ] * 4 MDF.merge(files, out, memory).save('tmp', overwrite=True) equal = True with MDF(input_file, memory=memory) as mdf, \ MDF('tmp', memory=memory) as mdf2: for i, group in enumerate(mdf.groups): for j, channel in enumerate( group['channels'][1:], 1): original = mdf.get(group=i, index=j) converted = mdf2.get(group=i, index=j) if not np.array_equal( np.tile(original.samples, 4), converted.samples): equal = False self.assertTrue(equal)
def test_mixed(self): t = np.arange(15, dtype="<f8") s1 = Signal( np.frombuffer(b"\x00\x00\x00\x02" * 15, dtype=">u4"), t, name="Motorola" ) s2 = Signal( np.frombuffer(b"\x04\x00\x00\x00" * 15, dtype="<u4"), t, name="Intel" ) for version in ("3.30", "4.10"): mdf = MDF(version=version) mdf.append([s1, s2], common_timebase=True) outfile = mdf.save( Path(TestEndianess.tempdir.name) / f"out", overwrite=True ) mdf.close() with MDF(outfile) as mdf: self.assertTrue(np.array_equal(mdf.get("Motorola").samples, [2] * 15)) self.assertTrue(np.array_equal(mdf.get("Intel").samples, [4] * 15)) for version in ("3.30", "4.10"): mdf = MDF(version=version) mdf.append([s2, s1], common_timebase=True) outfile = mdf.save( Path(TestEndianess.tempdir.name) / f"out", overwrite=True ) mdf.close() with MDF(outfile) as mdf: self.assertTrue(np.array_equal(mdf.get("Motorola").samples, [2] * 15)) self.assertTrue(np.array_equal(mdf.get("Intel").samples, [4] * 15))
def test_cut_demo(self): print("MDF cut demo tests") mdf_files = [ file for file in Path(TestMDF.tempdir_demo.name).iterdir() if file.suffix in ('.mdf', '.mf4') ] signals = [ file for file in Path(TestMDF.tempdir_demo.name).iterdir() if file.suffix == '.npy' ] for file in mdf_files: print(file) for inp in (file, BytesIO(file.read_bytes())): with MDF(inp, use_display_names=True) as input_file: for whence in (0, 1): print(file, whence) outfile1 = (input_file.cut( stop=2, whence=whence, include_ends=False).save( Path(TestMDF.tempdir.name) / "tmp1", overwrite=True)) outfile2 = (input_file.cut( start=2, stop=6, whence=whence, include_ends=False).save( Path(TestMDF.tempdir.name) / "tmp2", overwrite=True)) outfile3 = (input_file.cut( start=6, whence=whence, include_ends=False).save( Path(TestMDF.tempdir.name) / "tmp3", overwrite=True)) outfile = MDF.concatenate( [outfile1, outfile2, outfile3], version=input_file.version, use_display_names=True, ).save(Path(TestMDF.tempdir.name) / "tmp", overwrite=True) print("OUT", outfile) with MDF(outfile, use_display_names=True) as mdf2: for signal in signals: target = np.load(signal) sig = mdf2.get(signal.stem) timestamps = input_file.get( signal.stem).timestamps self.assertTrue( np.array_equal(sig.samples, target)) self.assertTrue( np.array_equal(timestamps, sig.timestamps))
def test_mixed(self): t = np.arange(15, dtype='<f8') s1 = Signal( np.frombuffer(b'\x00\x00\x00\x02' * 15, dtype='>u4'), t, name='Motorola' ) s2 = Signal( np.frombuffer(b'\x04\x00\x00\x00' * 15, dtype='<u4'), t, name='Intel' ) for version in ('3.30', '4.10'): mdf = MDF(version=version) mdf.append([s1, s2], common_timebase=True) outfile = mdf.save( Path(TestEndianess.tempdir.name) / f"out", overwrite=True, ) mdf.close() with MDF(outfile) as mdf: self.assertTrue( np.array_equal( mdf.get('Motorola').samples, [2,] * 15 ) ) self.assertTrue( np.array_equal( mdf.get('Intel').samples, [4,] * 15 ) ) for version in ('3.30', '4.10'): mdf = MDF(version=version) mdf.append([s2, s1], common_timebase=True) outfile = mdf.save( Path(TestEndianess.tempdir.name) / f"out", overwrite=True, ) mdf.close() with MDF(outfile) as mdf: self.assertTrue( np.array_equal( mdf.get('Motorola').samples, [2,] * 15 ) ) self.assertTrue( np.array_equal( mdf.get('Intel').samples, [4,] * 15 ) )
def save_mdf4_column(output, fmt): x = MDF(r"test_column.mf4") with Timer("Save file", f"asammdf {asammdf_version} mdfv4 column", fmt) as timer: x.save(r"x.mf4", overwrite=True) output.send([timer.output, timer.error])
def test_read_demo(self): print("MDF read tests") ret = True for enable in (True, False): for mdf in os.listdir('tmpdir_demo'): for memory in MEMORY: with MDF(os.path.join('tmpdir_demo', mdf), memory=memory) as input_file: if input_file.version == '2.00': continue for name in set( input_file.channels_db) - {'time', 't'}: signal = input_file.get(name) original_samples = CHANNELS_DEMO[name] if signal.samples.dtype.kind == 'f': signal = signal.astype(np.float32) res = np.array_equal(signal.samples, original_samples) if not res: ret = False self.assertTrue(ret) cleanup_files()
def test_resample(self): raster = 1.33 sigs = [ Signal( samples=np.arange(1000, dtype='f8'), timestamps=np.concatenate( [np.arange(500), np.arange(1000, 1500)]), name=f'Signal_{i}', ) for i in range(20) ] mdf = MDF() mdf.append(sigs) mdf = mdf.resample(raster=raster) target_timestamps = np.arange(0, 1500, 1.33) target_samples = np.concatenate([ np.arange(0, 500, 1.33), np.linspace(499.00215568862274, 499.9976646706587, 376), np.arange(500.1600000000001, 1000, 1.33) ]) for i, sig in enumerate(mdf.iter_channels(skip_master=True)): self.assertTrue(np.array_equal(sig.timestamps, target_timestamps)) self.assertTrue(np.allclose(sig.samples, target_samples))
def get_all_mdf4_nodata(): os.chdir(path) x = MDF(r'test.mf4', load_measured_data=False) with Timer('asammdf {} nodata mdfv4'.format(asammdf_version)): for i, gp in enumerate(x.groups): for j in range(len(gp['channels'])): y = x.get(group=i, index=j, samples_only=True)
def open_mdf4(output, fmt, memory): with Timer('Open file', 'asammdf {} {} mdfv4'.format(asammdf_version, memory), fmt) as timer: MDF(r'test.mf4', memory=memory) output.send([timer.output, timer.error])
def write_config(pathname): print('\nGenerating a default signal configuration file...') path = re.search(r"[\\/]*.*[\\/]", pathname).group(0) # filename = re.search(r"[\\/]*.*[\\/](.*)", pathname).group(1) table_name = re.search(r"[\\/]*.*[\\/](.*)\.*\.", pathname).group(1) config_name = 'config_' + table_name + '.json' config_path = path + config_name config = dict() with MDF(pathname) as mdf_file: counter = 0 suffix = 1 for group in mdf_file.groups: for channel in group.channels: if channel.name != 't': # May need adaption for other MDF files if channel.name in config: old_name = channel.name channel.name += '_DUPE_' + str(suffix) suffix += 1 config[channel.name] = old_name else: config[channel.name] = channel.name counter += 1 print('Total Number of Signals: %s' % counter) with open(config_path, 'w') as fp_config: json.dump(config, fp_config, sort_keys=False, indent=4, ensure_ascii=False) print('Signal configuration file [%s] generated.' % config_name) return counter
def fun(): mf = MDF('./GAC_A18_2020-06-26_15-01_11_0016.MF4') file_name = 'GAC_A18_2020-06-26_15-01_11_0016.json' signal_list = [] data_dict = {} with open('./Book1.txt', 'r') as f: for line in f.readlines(): signal_list.append(line[:-1]) while True: try: signal = signal_list.pop() except IndexError: break else: try: data = mf.get(signal) except Exception: continue else: data_dict[signal] = { 'samples': data.samples.tolist(), 'timestamps': data.timestamps.tolist() } with open('./data.josn', 'w') as f: json.dump(data_dict, f, cls=MyEncoder, ensure_ascii=False)
def test_j1939_get_can_signal(self): print('J1939 get CAN signal') temp_dir = Path(TestCANBusLogging.tempdir_j1939.name) mdf = [ input_file for input_file in temp_dir.iterdir() if input_file.suffix == '.mf4' ][0] mdf = MDF(mdf) dbc = [ input_file for input_file in temp_dir.iterdir() if input_file.suffix == '.dbc' ][0] signals = [ input_file for input_file in temp_dir.iterdir() if input_file.suffix == '.npy' ] for signal in signals: name = signal.stem target = np.load(signal) values = mdf.get_can_signal(name=name, database=str(dbc)).samples self.assertTrue(np.array_equal(values, target))
def convert_v3_v4(output, fmt): with MDF(r'test.mdf') as x: with Timer('Convert file', f'asammdf {asammdf_version} v3 to v4', fmt) as timer: x.convert('4.10') output.send([timer.output, timer.error])
def get_all_mdf3_compressed(): os.chdir(path) x = MDF(r'test.mdf', compression=True) with Timer('asammdf {} compression mdfv3'.format(asammdf_version)): for i, gp in enumerate(x.groups): for j in range(len(gp['channels'])): y = x.get(group=i, index=j)
def test_j1939_extract(self): print('J1939 extract') temp_dir = Path(TestCANBusLogging.tempdir_j1939.name) mdf = [ input_file for input_file in temp_dir.iterdir() if input_file.suffix == '.mf4' ][0] mdf = MDF(mdf) dbc = [ input_file for input_file in temp_dir.iterdir() if input_file.suffix == '.dbc' ][0] signals = [ input_file for input_file in temp_dir.iterdir() if input_file.suffix == '.npy' ] out = mdf.extract_can_logging([dbc]) for signal in signals: name = signal.stem target = np.load(signal) values = out.get(name).samples self.assertTrue(np.array_equal(values, target))
def filter_mdf(file, signals): mdf_file = MDF(file) # Lenkradwinkel lenkradwinkel = mdf_file.get('can0_LWI_Lenkradwinkel') samples = list(lenkradwinkel.samples) vz_samples = list(mdf_file.get('can0_LWI_VZ_Lenkradwinkel').samples) for i in range(0, len(samples)): if not int(vz_samples[i]): samples[i] = samples[i] * -1 lenkradwinkel.samples = np.asarray(samples) # Lenkradwinkel geschw. lenkrad_gesch = mdf_file.get('can0_LWI_Lenkradw_Geschw') samples = list(lenkrad_gesch.samples) vz_samples = list(mdf_file.get('can0_LWI_VZ_Lenkradw_Geschw').samples) for i in range(0, len(samples)): if not int(vz_samples[i]): samples[i] = samples[i] * -1 lenkrad_gesch.samples = np.asarray(samples) # Gierrate gierrate = mdf_file.get('can0_ESP_Gierrate') samples = list(gierrate.samples) vz_samples = list(mdf_file.get('can0_ESP_VZ_Gierrate').samples) for i in range(0, len(samples)): if not int(vz_samples[i]): samples[i] = samples[i] * -1 gierrate.samples = np.asarray(samples) # filter file with VZ signals filtered_mdf = mdf_file.filter(signals) return filtered_mdf
def test_obd_extract(self): print("OBD extract") temp_dir = Path(TestCANBusLogging.tempdir_obd.name) for file in temp_dir.iterdir(): print(file) mdf = [ input_file for input_file in temp_dir.iterdir() if input_file.suffix == ".mf4" ][0] mdf = MDF(mdf) dbc = [ input_file for input_file in temp_dir.iterdir() if input_file.suffix == ".dbc" ][0] signals = [ input_file for input_file in temp_dir.iterdir() if input_file.suffix == ".npy" ] out = mdf.extract_bus_logging({"CAN": [dbc]}) for signal in signals: name = signal.stem target = np.load(signal) values = out.get(name).samples self.assertTrue(np.array_equal(values, target))
def extractMDF(self): # Get user selected MDF file path and make extracted MDF object. mdf_obj = MDF(self.mdf_path) self.mdf_extracted = mdf_obj.extract_can_logging([self.dbc_path]) # Group names only exist in comment formatted as CAN#.group.signal. # Populate channel_dict by channel_dict[group] = [{signal: signal_object}, ...] for sig in self.mdf_extracted.iter_channels(): for word in sig.comment.split(): if word[0] != '<': s = word.split(".") if s[1] not in self.channel_dict: self.channel_dict[s[1]] = {s[2]: sig} else: self.channel_dict[s[1]][s[2]] = sig # Populate tree widget with groups and signals. for group, signals in self.channel_dict.items(): parent = QtWidgets.QTreeWidgetItem(self.channel_selectors, [group]) for sig in signals: child = QtWidgets.QTreeWidgetItem(parent) child.setText(0, sig) # Render any graphs already present and grab data for pedals. self.render_graphs() self.load_pedal_data() self.load_steering_wheel()
def convert_v3_v4(output, fmt): with MDF(r"test.mdf") as x: with Timer("Convert file", f"asammdf {asammdf_version} v3 to v4", fmt) as timer: x.convert("4.10") output.send([timer.output, timer.error])
def test_read_demo(self): print("MDF read tests") mdf_files = [ file for file in Path(TestMDF.tempdir_demo.name).iterdir() if file.suffix in ('.mdf', '.mf4') ] signals = [ file for file in Path(TestMDF.tempdir_demo.name).iterdir() if file.suffix == '.npy' ] for file in mdf_files: print(file) for inp in (file, BytesIO(file.read_bytes())): with MDF(inp, use_display_names=True) as input_file: for signal in signals: name = signal.stem target = np.load(signal) values = input_file.get(name).samples self.assertTrue(np.array_equal(target, values))
def get_all_mdf4(): os.chdir(path) x = MDF(r'test.mf4') with Timer('asammdf {} mdfv4'.format(asammdf_version)): for i, gp in enumerate(x.groups): for j in range(len(gp['channels'])): y = x.get(group=i, index=j)
def gis_get_cord(pathname, sample_rate=0.01): path = re.search(r"[\\/]*.*[\\/]", pathname).group(0) # filename = re.search(r"[\\/]*.*[\\/](.*)", pathname).group(1) table_name = re.search(r"[\\/]*.*[\\/](.*)\.*\.", pathname).group(1) config_path = path + 'config_' + table_name + '.json' try: cfg = read_config(config_path) # lat lon signals are hardcoded and will be looked for in config files sig_name_lat = cfg['GPS_Lat'] sig_name_lon = cfg['GPS_Lon'] except Exception: print('Signal configuration does not exist. Aborted.') return with MDF(pathname) as mdf0_file: mdf1_filter = mdf0_file.filter([sig_name_lat, sig_name_lon]) if sample_rate: mdf2_resample = mdf1_filter.resample(raster=sample_rate) signals = mdf2_resample.select([sig_name_lat, sig_name_lon]) else: signals = mdf1_filter.select([sig_name_lat, sig_name_lon]) lat = signals[0].samples lon = signals[1].samples # gps_points = numpy.array([lng_value, lat_value]).transpose() # for GeoJSON: [longitude, latitude] gps_cords = list(zip(lon, lat)) return gps_cords