Beispiel #1
0
def cut_asam(output, fmt):
    x = MDF(r"test.mf4")
    t = x.get_master(0)
    start, stop = 0.2 * (t[-1] - t[0]) + t[0], 0.8 * (t[-1] - t[0]) + t[0]
    with Timer("Cut file", f"asammdf {asammdf_version} mdfv4", fmt) as timer:
        x = x.cut(start=start, stop=stop)

    output.send([timer.output, timer.error])
Beispiel #2
0
def main(text_output, fmt):
    if os.path.dirname(__file__):
        os.chdir(os.path.dirname(__file__))
    for version in ("3.30", "4.10"):
        generate_test_files(version)

    mdf = MDF("test.mdf", "minimum")
    v3_size = os.path.getsize("test.mdf") // 1024 // 1024
    v3_groups = len(mdf.groups)
    v3_channels = sum(len(gp["channels"]) for gp in mdf.groups)
    v3_version = mdf.version

    mdf = MDF("test.mf4", "minimum")
    mdf.get_master(0)
    v4_size = os.path.getsize("test.mf4") // 1024 // 1024
    v4_groups = len(mdf.groups)
    v4_channels = sum(len(gp["channels"]) for gp in mdf.groups)
    v4_version = mdf.version

    listen, send = multiprocessing.Pipe()
    output = MyList()
    errors = []

    installed_ram = round(psutil.virtual_memory().total / 1024 / 1024 / 1024)

    output.append("\n\nBenchmark environment\n")
    output.append("* {}".format(sys.version))
    output.append("* {}".format(platform.platform()))
    output.append("* {}".format(platform.processor()))
    output.append("* numpy {}".format(np.__version__))
    output.append("* {}GB installed RAM\n".format(installed_ram))
    output.append("Notations used in the results\n")
    output.append(("* compress = mdfreader mdf object created with "
                   "compression=blosc"))
    output.append(("* nodata = mdfreader mdf object read with "
                   "no_data_loading=True"))
    output.append("\nFiles used for benchmark:\n")
    output.append("* mdf version {}".format(v3_version))
    output.append("    * {} MB file size".format(v3_size))
    output.append("    * {} groups".format(v3_groups))
    output.append("    * {} channels".format(v3_channels))
    output.append("* mdf version {}".format(v4_version))
    output.append("    * {} MB file size".format(v4_size))
    output.append("    * {} groups".format(v4_groups))
    output.append("    * {} channels\n\n".format(v4_channels))

    OPEN, SAVE, GET, CONVERT, MERGE, FILTER, CUT = 1, 1, 1, 1, 1, 0, 0

    tests = (
        open_mdf3,
        open_reader3,
        open_reader3_nodata,
        open_reader3_compression,
        open_mdf4,
        #        open_mdf4_column,
        open_reader4,
        open_reader4_nodata,
        open_reader4_compression,
    )

    if tests and OPEN:
        output.extend(table_header("Open file", fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        save_mdf3,
        save_reader3,
        save_reader3_nodata,
        save_reader3_compression,
        save_mdf4,
        #        save_mdf4_column,
        save_reader4,
        save_reader4_nodata,
        save_reader4_compression,
    )

    if tests and SAVE:
        output.extend(table_header("Save file", fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        get_all_mdf3,
        get_all_reader3,
        get_all_reader3_nodata,
        get_all_reader3_compression,
        get_all_mdf4,
        #        get_all_mdf4_column,
        get_all_reader4,
        get_all_reader4_nodata,
        get_all_reader4_compression,
    )

    if tests and GET:
        output.extend(table_header("Get all channels (36424 calls)", fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        convert_v3_v4,
        convert_v4_v410,
        convert_v4_v420,
    )

    if tests and CONVERT:
        output.extend(table_header("Convert file", fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        merge_v3,
        merge_reader_v3,
        merge_reader_v3_nodata,
        merge_reader_v3_compress,
        merge_v4,
        merge_reader_v4,
        merge_reader_v4_nodata,
        merge_reader_v4_compress,
    )

    if tests and MERGE:
        output.extend(table_header("Merge 3 files", fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        filter_asam,
        filter_reader4,
        filter_reader4_compression,
        filter_reader4_nodata,
    )

    if tests and FILTER:
        output.extend(table_header("Filter 200 channels", fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        cut_asam,
        #        cut_reader4,
        #        cut_reader4_compression,
        #        cut_reader4_nodata,
    )

    if tests and CUT:
        output.extend(table_header("Cut file from 20% to 80%", fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    errors = [err for err in errors if err]
    if errors:
        print("\n\nERRORS\n", "\n".join(errors))

    if text_output:
        arch = "x86" if platform.architecture()[0] == "32bit" else "x64"
        file = "{}_asammdf_{}_mdfreader_{}.{}".format(arch, asammdf_version,
                                                      mdfreader_version, fmt)
        with open(file, "w") as out:
            out.write("\n".join(output))

    for file in ("x.mdf", "x.mf4"):
        if PYVERSION >= 3:
            try:
                os.remove(file)
            except FileNotFoundError:
                pass
        else:
            try:
                os.remove(file)
            except IOError:
                pass
Beispiel #3
0
def main(text_output, fmt):
    if os.path.dirname(__file__):
        os.chdir(os.path.dirname(__file__))
    for version in ('3.30', '4.10'):
        generate_test_files(version)

    mdf = MDF('test.mdf', 'minimum')
    v3_size = os.path.getsize('test.mdf') // 1024 // 1024
    v3_groups = len(mdf.groups)
    v3_channels = sum(len(gp['channels']) for gp in mdf.groups)
    v3_version = mdf.version

    mdf = MDF('test.mf4', 'minimum')
    mdf.get_master(0, copy_master=True)
    v4_size = os.path.getsize('test.mf4') // 1024 // 1024
    v4_groups = len(mdf.groups)
    v4_channels = sum(len(gp['channels']) for gp in mdf.groups)
    v4_version = mdf.version

    listen, send = multiprocessing.Pipe()
    output = MyList()
    errors = []

    installed_ram = round(psutil.virtual_memory().total / 1024 / 1024 / 1024)

    output.append('\n\nBenchmark environment\n')
    output.append('* {}'.format(sys.version))
    output.append('* {}'.format(platform.platform()))
    output.append('* {}'.format(platform.processor()))
    output.append('* numpy {}'.format(np.__version__))
    output.append('* {}GB installed RAM\n'.format(installed_ram))
    output.append('Notations used in the results\n')
    output.append(('* compress = mdfreader mdf object created with '
                   'compression=blosc'))
    output.append(('* nodata = mdfreader mdf object read with '
                   'no_data_loading=True'))
    output.append('\nFiles used for benchmark:\n')
    output.append('* mdf version {}'.format(v3_version))
    output.append('    * {} MB file size'.format(v3_size))
    output.append('    * {} groups'.format(v3_groups))
    output.append('    * {} channels'.format(v3_channels))
    output.append('* mdf version {}'.format(v4_version))
    output.append('    * {} MB file size'.format(v4_size))
    output.append('    * {} groups'.format(v4_groups))
    output.append('    * {} channels\n\n'.format(v4_channels))

    OPEN, SAVE, GET, CONVERT, MERGE, FILTER, CUT = 1, 1, 1, 1, 1, 0, 0

    tests = (
        open_mdf3,
        open_reader3,
        open_reader3_compression,
        open_reader3_nodata,
        open_mdf4,
        open_reader4,
        open_reader4_compression,
        open_reader4_nodata,
    )

    if tests and OPEN:
        output.extend(table_header('Open file', fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        save_mdf3,
        save_reader3,
        save_reader3_nodata,
        save_reader3_compression,
        save_mdf4,
        save_reader4,
        save_reader4_nodata,
        save_reader4_compression,
    )

    if tests and SAVE:
        output.extend(table_header('Save file', fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        get_all_mdf3,
        get_all_reader3,
        get_all_reader3_nodata,
        get_all_reader3_compression,
        get_all_mdf4,
        get_all_reader4,
        get_all_reader4_compression,
        get_all_reader4_nodata,
    )

    if tests and GET:
        output.extend(table_header('Get all channels (36424 calls)', fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        convert_v3_v4,
        convert_v4_v3,
    )

    if tests and CONVERT:
        output.extend(table_header('Convert file', fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        merge_v3,
        merge_reader_v3,
        merge_reader_v3_compress,
        merge_reader_v3_nodata,
        merge_v4,
        merge_reader_v4,
        merge_reader_v4_nodata,
        merge_reader_v4_compress,
    )

    if tests and MERGE:
        output.extend(table_header('Merge 3 files', fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        filter_asam,
        filter_reader4,
        filter_reader4_compression,
        filter_reader4_nodata,
    )

    if tests and FILTER:
        output.extend(table_header('Filter 200 channels', fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    tests = (
        cut_asam,
        cut_reader4,
        cut_reader4_compression,
        cut_reader4_nodata,
    )

    if tests and CUT:
        output.extend(table_header('Cut file from 20% to 80%', fmt))
        for func in tests:
            thr = multiprocessing.Process(target=func, args=(send, fmt))
            thr.start()
            thr.join()
            result, err = listen.recv()
            output.append(result)
            errors.append(err)
        output.extend(table_end(fmt))

    errors = [err for err in errors if err]
    if errors:
        print('\n\nERRORS\n', '\n'.join(errors))

    if text_output:
        arch = 'x86' if platform.architecture()[0] == '32bit' else 'x64'
        file = '{}_asammdf_{}_mdfreader_{}.{}'.format(arch, asammdf_version,
                                                      mdfreader_version, fmt)
        with open(file, 'w') as out:
            out.write('\n'.join(output))

    for file in ('x.mdf', 'x.mf4'):
        if PYVERSION >= 3:
            try:
                os.remove(file)
            except FileNotFoundError:
                pass
        else:
            try:
                os.remove(file)
            except IOError:
                pass