Beispiel #1
0
import argparse

import utility

DEFAULT_NUM_CASES = 100
DEFAULT_NUM_POINTS = (14, 16)
DEFAULT_MAP_SIZE = 100

if __name__ == '__main__':
    PARSER = argparse.ArgumentParser(
            description='Benchmark the little-tsp binary')
    PARSER.add_argument(
            '--generate', dest='generate', action='store_true',
            help='Whether to generate new cases or not')
    PARSER.add_argument(
            '--no-generate', dest='generate', action='store_false',
            help='Whether to generate new cases or not')
    PARSER.set_defaults(generate=False)

    ARGS = PARSER.parse_args()

    # generate cases if necessary
    if ARGS.generate:
        utility.write_cases(
                DEFAULT_NUM_CASES, DEFAULT_NUM_POINTS, DEFAULT_MAP_SIZE,
                overwrite=True)

    # run the binary on all the cases, print the time it takes
    for case_filename in utility.iterate_cases():
        print(utility.benchmark_solution(utility.PROJECT_BINARY, case_filename))
Beispiel #2
0
    # remove old cases, write new ones and build the project
    utility.write_cases(args.num, args.points, args.size, overwrite=True)
    utility.build_project()

    # print the header
    header_format = "{:>5} | {:>16} | {:>16} | {:>16} | {:>16}"
    print(header_format.format('Case', 'Naive Time (s)', 'Optimal Time (s)',
        'Difference (s)', 'Speedup '))
    print('{:->5}-+-{:->16}-+-{:->16}-+-{:->16}-+-{:->16}'.format('', '', '',
        '', ''))

    # run the binary on all cases
    for case_filename in utility.iterate_cases():

        # benchmark naive and optimal solutions
        naive_time = utility.benchmark_solution([utility.PROJECT_BINARY, '-m',
            'NAIVETSP'], case_filename)
        opt_time = utility.benchmark_solution([utility.PROJECT_BINARY, '-m',
            'OPTTSP'], case_filename)

        # calculate and print statistics
        diff = naive_time - opt_time
        if opt_time:
            speedup = naive_time / opt_time
        else:
            speedup = 0
        case_num = utility.get_case_num(case_filename)
        stats = '{:>5} | {:>16.4} | {:>16.4} | {:>16.4} | {:>15.4}x'.format(
                case_num, naive_time, opt_time, diff, speedup)
        print(stats)
Beispiel #3
0
DEFAULT_NUM_CASES = 100
DEFAULT_NUM_POINTS = (14, 16)
DEFAULT_MAP_SIZE = 100

if __name__ == '__main__':
    PARSER = argparse.ArgumentParser(
        description='Benchmark the little-tsp binary')
    PARSER.add_argument('--generate',
                        dest='generate',
                        action='store_true',
                        help='Whether to generate new cases or not')
    PARSER.add_argument('--no-generate',
                        dest='generate',
                        action='store_false',
                        help='Whether to generate new cases or not')
    PARSER.set_defaults(generate=False)

    ARGS = PARSER.parse_args()

    # generate cases if necessary
    if ARGS.generate:
        utility.write_cases(DEFAULT_NUM_CASES,
                            DEFAULT_NUM_POINTS,
                            DEFAULT_MAP_SIZE,
                            overwrite=True)

    # run the binary on all the cases, print the time it takes
    for case_filename in utility.iterate_cases():
        print(utility.benchmark_solution(utility.PROJECT_BINARY,
                                         case_filename))