Beispiel #1
0
def run_unit_tests(args):

    results = []
    banner('Unit tests')

    if args.test_ssh:
        with build_directory():
            ssh_param = args.test_ssh.split(":")
            ssh_host = ssh_param[0]
            ssh_port = ssh_param[1] if len(ssh_param) > 1 else None

            try:
                with TestBenchSSH(ssh_host,
                                  args.prebuild_path,
                                  ssh_port=ssh_port) as t:
                    result = t.run()

            except TestBenchBase.Error as e:
                result = e.code

        results.append(('Unit tests on remote machine', result))

    if args.test_fvp:
        with build_directory():
            binary_path = os.path.expanduser(args.test_fvp[0])
            try:
                with TestBenchFVP(binary_path, args.prebuild_path) as t:
                    result = t.run()

            except TestBenchBase.Error as e:
                result = e.code

        results.append(('Unit tests on FVP', result))

    return results
Beispiel #2
0
def main():
    machine = platform.machine()
    results = []
    threads = len(os.sched_getaffinity(0))

    parser = argparse.ArgumentParser()
    target = parser.add_argument_group(
        'Unit tests',
        'Note: At least one unit test target or --skip-tests must be selected')

    target.add_argument('--test-ssh',
                        help='Run unit tests on a target system via SSH. This \
                              argument requires the following parameters:\n\
                              <ip|hostname>[:port].\n\
                              The password and username for the target system \
                              are hardcoded in the script.',
                        required=False,
                        metavar="<ip|hostname>[:port]")

    target.add_argument('--test-fvp',
                        help='Run unit tests on a fast-model. This argument \
                              requires the following parameters:\n\
                              <path/to/system/binaries>',
                        required=False,
                        metavar="<path>",
                        nargs=1)

    target.add_argument('--skip-tests',
                        help='Only do the style validation and building of \
                              the tests. Tests are not excecuted.',
                        action='store_true')

    target.add_argument('--prebuild-path',
                        '-p',
                        help='Location of an existing build folder to use for \
                              the remote unit tests instead of building \
                              inside the remote machine. This argument \
                              requires the following parameters:\n\
                              <path/to/build/folder>',
                        required=False,
                        metavar="<path>",
                        default=None,
                        nargs=1)

    args = parser.parse_args(sys.argv[1:])
    if args.prebuild_path:
        args.prebuild_path =\
            os.path.abspath(os.path.expanduser(args.prebuild_path[0]))

    if not args.test_ssh and not args.test_fvp and not args.skip_tests:
        print("No unit test target or --skip-tests supplied\n",
              file=sys.stderr)
        parser.print_help()
        return 1

    banner('Style validation')

    result = check_copyright.main()
    results.append(('Check copyright', result))

    result = check_tabs.main()
    results.append(('Check tabs', result))

    result = check_EOF.main()
    results.append(('Check EOF', result))

    result = subprocess.call(
        'pycodestyle --show-source tools/ '
        '--exclude=arm_platform_build',
        shell=True)
    results.append(('Pycodestyle', result))

    with build_directory():
        subprocess.call('cmake -DBUILD_DOC=ON ..', shell=True)
        result = subprocess.call('make doc', shell=True)
        results.append(('Check doc', result))

    banner('Builds')

    cmake_params = '-DBUILD_DOC=ON -DBUILD_TEST=ON '
    build_info = ''
    if machine == 'x86_64':
        print('Using cross-compilation')
        cmake_params += '-DCMAKE_TOOLCHAIN_FILE=../tools/toolchain.cmake'
        build_info += ' (cross-compiled)'

    with build_directory():
        subprocess.call('cmake {} ..'.format(cmake_params), shell=True)
        result = subprocess.call('make -j{}'.format(threads), shell=True)
        results.append(('Build libddssec{}'.format(build_info), result))
        library_build_result = result

    with build_directory():
        subprocess.call('cmake ..', shell=True)
        result = subprocess.call('make ta', shell=True)
        results.append(('Build trusted application', result))
        ta_build_result = result

    # Skip the unit tests if either build fails or if args.skip_tests is set
    if library_build_result != 0 or ta_build_result != 0 or args.skip_tests:
        print('Skipping unit tests')
        results.append(('Unit tests on remote machine', None))
        results.append(('Unit tests on FVP', None))
    else:
        result = run_unit_tests(args)
        for r in result:
            results.append(r)

    return process_results(results)
Beispiel #3
0
def main():
    machine = platform.machine()
    results = []
    threads = len(os.sched_getaffinity(0))
    prebuild_path = None

    parser = argparse.ArgumentParser()
    target = parser.add_argument_group(
        'Unit tests',
        'Note: At least one unit test target or --skip-tests must be selected')

    target.add_argument('--test-ssh',
                        help='Run unit tests on a target system via SSH. This \
                              argument requires the following parameters:\n\
                              <ip|hostname>[:port].\n\
                              The password and username for the target system \
                              are hardcoded in the script.',
                        required=False,
                        metavar="<ip|hostname>[:port]")

    target.add_argument('--test-fvp',
                        help='Run unit tests on a fast-model. This argument \
                              requires the following parameters:\n\
                              <path/to/system/binaries>',
                        required=False,
                        metavar="<path>",
                        nargs=1)

    target.add_argument('--skip-tests',
                        help='Only do the style validation and building of \
                              the tests. Tests are not excecuted.',
                        action='store_true')

    target.add_argument('--build-on-target',
                        '-p',
                        help='Build tests natively on the target (e.g. \
                              FastModels). This option is much slower than \
                              using cross-compilation but has the benefit of \
                              testing a native build.',
                        required=False,
                        default=False,
                        action='store_true')

    args = parser.parse_args(sys.argv[1:])

    if not args.test_ssh and not args.test_fvp and not args.skip_tests:
        print("No unit test target or --skip-tests supplied\n",
              file=sys.stderr)
        parser.print_help()
        return 1

    banner('Style validation')

    result = check_copyright.main()
    results.append(('Check copyright', result))

    result = check_tabs.main()
    results.append(('Check tabs', result))

    result = check_EOF.main()
    results.append(('Check EOF', result))

    result = subprocess.call(
        'pycodestyle --show-source tools/ '
        '--exclude=arm_platform_build',
        shell=True)
    results.append(('Pycodestyle', result))

    basedir = os.getcwd()
    with build_directory():
        subprocess.call('cmake -DBUILD_DOC=ON {}'.format(basedir), shell=True)
        result = subprocess.call('make doc', shell=True)
        results.append(('Check doc', result))

    banner('Builds')

    cmake_params = '-DBUILD_DOC=ON -DBUILD_TEST=ON '
    build_info = ''

    if machine == 'x86_64':
        print('Using cross-compilation')
        cmake_params += '-DCMAKE_TOOLCHAIN_FILE=../tools/toolchain.cmake'
        build_info += ' (cross-compiled)'

    persist_test_build = not args.build_on_target
    with build_directory(persist=persist_test_build) as build_dir_name:
        subprocess.call('cmake {} {}'.format(cmake_params, basedir),
                        shell=True)

        result = subprocess.call('make -j{}'.format(threads), shell=True)
        results.append(('Build libddssec{}'.format(build_info), result))
        library_build_result = result

        result = subprocess.call('make test-ta'.format(threads), shell=True)
        results.append(('Build test-ta{}'.format(build_info), result))
        ta_build_result = result

        if persist_test_build:
            prebuild_path = build_dir_name

    with build_directory():
        subprocess.call('cmake {}'.format(basedir), shell=True)
        result = subprocess.call('make ta', shell=True)
        results.append(('Build trusted application', result))

    # Skip the unit tests if either build fails or if args.skip_tests is set
    if library_build_result != 0 or ta_build_result != 0 or args.skip_tests:
        print('Skipping unit tests')
        results.append(('Unit tests on remote machine', None))
        results.append(('Unit tests on FVP', None))
    else:
        result = run_unit_tests(args, prebuild_path)
        for r in result:
            results.append(r)

    if persist_test_build:
        shutil.rmtree(prebuild_path)

    return process_results(results)
Beispiel #4
0
def do_check(selected_profiles, output_base_dir, use_cross_compilation,
             selected_build_types):
    """
    Setup and build the project under the Coverity tools.
    Return 0 when the analysis succeeded. Any other value when the build fails
    or the code base has issues.
    """
    has_issues = False

    if use_cross_compilation:
        if 'CROSS_COMPILE' not in os.environ:
            print("Error: When using the cross-compiler option, you must set"
                  " the environment variable CROSS_COMPILE")
            sys.exit(1)
        compiler = os.environ.get('CROSS_COMPILE') + 'gcc'
        print("Using cross compiler: {}".format(compiler))

    elif 'CC' in os.environ:
        cc = os.environ.get('CC')
        print("Using compiler from the CC: {}".format(cc))
        compiler = os.path.basename(cc)

    else:
        compiler = 'gcc'

    if os.path.dirname(compiler):
        print("Error: Coverity does not accept a full path to the compiler,"
              " you must only specify the executable name.")
        sys.exit(1)

    for build_type in selected_build_types:

        print('*' * 80)
        print("Build type: {}".format(build_type))

        output_base_dir = 'coverity_results' if output_base_dir is None \
            else output_base_dir
        output_dir = os.path.join(output_base_dir, build_type)

        with build_directory():
            cmake_params = '-DCMAKE_BUILD_TYPE={}'.format(
                build_type.capitalize())

            if use_cross_compilation:
                cmake_params += \
                    ' -DCMAKE_TOOLCHAIN_FILE=../tools/toolchain.cmake'

            print("CMake parameters: {}".format(cmake_params))

            # Run cmake to prepare the build
            ret = subprocess.call('cmake {} ..'.format(cmake_params),
                                  shell=True)
            if ret != 0:
                return ret

            # Configure compiler for native compilation
            subprocess.check_call('cov-configure'
                                  '  --comptype gcc'
                                  '  --compiler {}'
                                  '  --config ./coverity_config.xml'
                                  '  --template'.format(compiler),
                                  shell=True)

            # Initialize Coverity build directory
            subprocess.check_call(
                'cov-build'
                '  --config ./coverity_config.xml'
                '  --dir ./coverity_build'
                '  --initialize',
                shell=True)

            # Build library and trusted application
            ret = subprocess.call(
                'cov-build'
                '  --config ./coverity_config.xml'
                '  --dir ./coverity_build'
                '  --capture make libddssec ta',
                shell=True)
            if ret != 0:
                return ret

            for profile_name in selected_profiles:
                profile = next(p for p in profiles
                               if profile_name == p['name'])
                result = do_check_profile(profile, output_dir)
                if result == 1:
                    has_issues = True

    if has_issues:
        return 1
    else:
        return 0
Beispiel #5
0
    sherpa.Choice('lr', [1e-1, 1e-2, 1e-3, 1e-4, 1e-5, 1e-6]),
    # sherpa.Choice('activation', ['relu', 'prelu', 'elu', 'leaky_relu', 'sigmoid']),
    # sherpa.Choice('kernel_initializer', ['glorot_normal', 'glorot_uniform', 'he_normal', 'he_uniform']),
]

for k,v in vars(FLAGS).iteritems():
    parameters.append(
        sherpa.Choice(k, [v])
    )

# Run on local machine.
gpus = [int(x) for x in FLAGS.gpus.split(',')]
processes_per_gpu = FLAGS.max_concurrent//len(gpus)
assert FLAGS.max_concurrent%len(gpus) == 0
resources = list(itertools.chain.from_iterable(itertools.repeat(x, processes_per_gpu) for x in gpus))

sched = sherpa.schedulers.LocalScheduler(resources=resources)
alg = sherpa.algorithms.RandomSearch(max_num_trials=200)

build_directory('SherpaResults/' + FLAGS.dataset + '/Models')

sherpa.optimize(
    parameters=parameters,
    algorithm=alg,
    lower_is_better=True,
    command='python hp_main.py',
    scheduler=sched,
    max_concurrent=FLAGS.max_concurrent,
    output_dir='SherpaResults/' + FLAGS.dataset +'/'
)
Beispiel #6
0
    sherpa.Choice('number_of_layers', [2, FLAGS.max_layers]),
    sherpa.Choice('lr', [0.01, 0.001, 0.0001]),
    sherpa.Choice('batch_size', [32]),
    # sherpa.Choice('activation', ['relu', 'prelu', 'elu', 'leaky_relu', 'sigmoid']),
    # sherpa.Choice('kernel_initializer', ['glorot_normal', 'glorot_uniform', 'he_normal', 'he_uniform']),
]

for k, v in vars(FLAGS).iteritems():
    parameters.append(sherpa.Choice(k, [v]))

# Run on local machine.
gpus = [int(x) for x in FLAGS.gpus.split(',')]
processes_per_gpu = FLAGS.max_concurrent // len(gpus)
assert FLAGS.max_concurrent % len(gpus) == 0
resources = list(
    itertools.chain.from_iterable(
        itertools.repeat(x, processes_per_gpu) for x in gpus))

sched = sherpa.schedulers.LocalScheduler(resources=resources)
alg = sherpa.algorithms.RandomSearch(max_num_trials=150)

build_directory('SherpaResults/' + FLAGS.name + '/Models')

sherpa.optimize(parameters=parameters,
                algorithm=alg,
                lower_is_better=True,
                command='python main.py',
                scheduler=sched,
                max_concurrent=FLAGS.max_concurrent,
                output_dir='SherpaResults/' + FLAGS.name + '/')