def export(target, ide, build=None, src=None, macros=None, project_id=None, zip_proj=False, build_profile=None, export_path=None, notify=None, app_config=None, ignore=None, resource_filter=None): """Do an export of a project. Positional arguments: target - MCU that the project will compile for ide - the IDE or project structure to export to Keyword arguments: build - to use the compiled mbed libraries or not src - directory or directories that contain the source to export macros - extra macros to add to the project project_id - the name of the project clean - start from a clean state before exporting zip_proj - create a zip file or not ignore - list of paths to add to mbedignore resource_filter - can be used for filtering out resources after scan Returns an object of type Exporter (tools/exports/exporters.py) """ project_dir, name, src, lib = setup_project( ide, target, bool(zip_proj), program=project_id, source_dir=src, build=build, export_path=export_path, ) zip_name = name + ".zip" if zip_proj else None return export_project(src, project_dir, target, ide, name=name, macros=macros, libraries_paths=lib, zip_proj=zip_name, build_profile=build_profile, notify=TerminalNotifier(), app_config=app_config, ignore=ignore, resource_filter=resource_filter)
os.path.join(mbed_os_dir, "tools/profiles/debug.json"), os.path.join(mbed_os_dir, "tools/profiles/release.json")] # CMake build type matching each Mbed profile profile_cmake_names = ["RELWITHDEBINFO", "DEBUG", "RELEASE"] print("Configuring build system for target " + target_name) # Can NOT be the current directory, or it screws up some internal regexes inside mbed tools. # That was a fun hour to debug... config_header_dir = os.path.join(generated_path, "config-headers") pathlib.Path(config_header_dir).mkdir(parents=True, exist_ok=True) # create dir if not exists notifier = TerminalNotifier(True, False) # create a different toolchain for each profile so that we can detect the flags needed in each configuration profile_toolchains = [] for profile_json_path in profile_jsons: with open(profile_json_path) as profile_file: print(">> Collecting data for config " + profile_json_path) profile_data = json.load(profile_file) profile_toolchain = build_api.prepare_toolchain(src_paths=[mbed_os_dir], build_dir=config_header_dir, target=target_name, toolchain_name=toolchain_name, build_profile=[profile_data]) # each toolchain must then scan the mbed dir to pick up more configs resources = Resources(notifier).scan_with_toolchain(src_paths=[mbed_os_dir], toolchain=profile_toolchain, exclude=True) profile_toolchain.RESPONSE_FILES=False profile_toolchains.append(profile_toolchain)
def export(target, ide, build=None, src=None, macros=None, project_id=None, zip_proj=False, build_profile=None, export_path=None, notify=None, app_config=None, ignore=None): """Do an export of a project. Positional arguments: target - MCU that the project will compile for ide - the IDE or project structure to export to Keyword arguments: build - to use the compiled mbed libraries or not src - directory or directories that contain the source to export macros - extra macros to add to the project project_id - the name of the project clean - start from a clean state before exporting zip_proj - create a zip file or not ignore - list of paths to add to mbedignore Returns an object of type Exporter (tools/exports/exporters.py) """ ################################### # mbed Classic/2.0/libary support # # Find build system profile profile = None targets_json = None for path in src: profile = find_build_profile(path) or profile if profile: targets_json = join(dirname(dirname(abspath(__file__))), 'legacy_targets.json') else: targets_json = find_targets_json(path) or targets_json # Apply targets.json to active targets if targets_json: notify.info("Using targets from %s" % targets_json) set_targets_json_location(targets_json) # Apply profile to toolchains if profile: def init_hook(self): profile_data = get_toolchain_profile(self.name, profile) if not profile_data: return notify.info("Using toolchain %s profile %s" % (self.name, profile)) for k, v in profile_data.items(): if self.flags.has_key(k): self.flags[k] = v else: setattr(self, k, v) mbedToolchain.init = init_hook # mbed Classic/2.0/libary support # ################################### project_dir, name, src, lib = setup_project( ide, target, bool(zip_proj), program=project_id, source_dir=src, build=build, export_path=export_path, ) zip_name = name + ".zip" if zip_proj else None return export_project(src, project_dir, target, ide, name=name, macros=macros, libraries_paths=lib, zip_proj=zip_name, build_profile=build_profile, notify=TerminalNotifier(), app_config=app_config, ignore=ignore)
def main(): start = time() # Parse Options parser = get_default_options_parser() parser.add_argument("--source", dest="source_dir", type=argparse_filestring_type, default=None, help="The source (input) directory", action="append") parser.add_argument("--build", dest="build_dir", type=argparse_dir_not_parent(ROOT), default=None, help="The build (output) directory") parser.add_argument("--no-archive", dest="no_archive", action="store_true", default=False, help="Do not produce archive (.ar) file, but rather .o") # Extra libraries parser.add_argument("-r", "--rtos", action="store_true", dest="rtos", default=False, help="Compile the rtos") parser.add_argument("--rpc", action="store_true", dest="rpc", default=False, help="Compile the rpc library") parser.add_argument("-u", "--usb", action="store_true", dest="usb", default=False, help="Compile the USB Device library") parser.add_argument("-d", "--dsp", action="store_true", dest="dsp", default=False, help="Compile the DSP library") parser.add_argument( "--cpputest", action="store_true", dest="cpputest_lib", default=False, help="Compiles 'cpputest' unit test library (library should be on the same directory level as mbed repository)") parser.add_argument("-D", action="append", dest="macros", help="Add a macro definition") parser.add_argument("-S", "--supported-toolchains", action="store_true", dest="supported_toolchains", default=False, help="Displays supported matrix of MCUs and toolchains") parser.add_argument('-f', '--filter', dest='general_filter_regex', default=None, help='For some commands you can use filter to filter out results') parser.add_argument("-j", "--jobs", type=int, dest="jobs", default=0, help="Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)") parser.add_argument("-N", "--artifact-name", dest="artifact_name", default=None, help="The built project's name") parser.add_argument("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Verbose diagnostic output") parser.add_argument("--silent", action="store_true", dest="silent", default=False, help="Silent diagnostic output (no copy, compile notification)") parser.add_argument("-x", "--extra-verbose-notifications", action="store_true", dest="extra_verbose_notify", default=False, help="Makes compiler more verbose, CI friendly.") parser.add_argument("--ignore", dest="ignore", type=argparse_many(str), default=None, help="Comma separated list of patterns to add to mbedignore (eg. ./main.cpp)") options = parser.parse_args() # Only prints matrix of supported toolchains if options.supported_toolchains: print(mcu_toolchain_matrix(platform_filter=options.general_filter_regex)) exit(0) # Get target list targets = extract_mcus(parser, options) if options.mcu else TARGET_NAMES # Get toolchains list toolchains = options.tool if options.tool else TOOLCHAINS if options.source_dir and not options.build_dir: args_error(parser, "argument --build is required by argument --source") # Get libraries list libraries = [] # Additional Libraries if options.rpc: libraries.extend(["rpc"]) if options.usb: libraries.append("usb") if options.dsp: libraries.extend(["dsp"]) if options.cpputest_lib: libraries.extend(["cpputest"]) # Build results failures = [] successes = [] skipped = [] end_warnings = [] if options.clean: clean_psa_autogen() for toolchain in toolchains: for target_name in targets: target = Target.get_target(target_name) try: toolchain_name, internal_tc_name, end_warnings = find_valid_toolchain( target, toolchain ) except NoValidToolchainException as e: print_end_warnings(e.end_warnings) args_error(parser, str(e)) tt_id = "%s::%s" % (internal_tc_name, target_name) if not target_supports_toolchain(target, toolchain_name): # Log this later print("%s skipped: toolchain not supported" % tt_id) skipped.append(tt_id) else: try: notifier = TerminalNotifier(options.verbose, options.silent) profile = extract_profile(parser, options, internal_tc_name) if options.source_dir: if target.is_PSA_target: generate_psa_sources( source_dirs=options.source_dir, ignore_paths=[options.build_dir] ) resource_filter = None if target.is_PSA_secure_target: resource_filter = OsAndSpeResourceFilter() lib_build_res = build_library( options.source_dir, options.build_dir, target, toolchain_name, jobs=options.jobs, clean=options.clean, archive=(not options.no_archive), macros=options.macros, name=options.artifact_name, build_profile=profile, ignore=options.ignore, notify=notifier, resource_filter=resource_filter ) else: lib_build_res = build_mbed_libs( target, toolchain_name, jobs=options.jobs, clean=options.clean, macros=options.macros, build_profile=profile, ignore=options.ignore, notify=notifier, ) for lib_id in libraries: build_lib( lib_id, target, toolchain_name, clean=options.clean, macros=options.macros, jobs=options.jobs, build_profile=profile, ignore=options.ignore, ) if lib_build_res: successes.append(tt_id) else: skipped.append(tt_id) except KeyboardInterrupt as e: print("\n[CTRL+c] exit") print_end_warnings(end_warnings) sys.exit(0) except Exception as e: if options.verbose: import traceback traceback.print_exc(file=sys.stdout) print_end_warnings(end_warnings) sys.exit(1) failures.append(tt_id) print(e) # Write summary of the builds print("\nCompleted in: (%.2f)s\n" % (time() - start)) for report, report_name in [(successes, "Build successes:"), (skipped, "Build skipped:"), (failures, "Build failures:"), ]: if report: print(print_build_results(report, report_name)) print_end_warnings(end_warnings) if failures: sys.exit(1)
base_source_paths = ROOT else: base_source_paths = options.source_dir # Default base source path is the current directory if not base_source_paths: base_source_paths = ['.'] build_report = {} build_properties = {} library_build_success = False profile = extract_profile(parser, options, toolchain) try: # Build sources notify = TerminalNotifier(options.verbose) build_library(base_source_paths, options.build_dir, mcu, toolchain, jobs=options.jobs, clean=options.clean, report=build_report, properties=build_properties, name="mbed-build", macros=options.macros, notify=notify, archive=False, app_config=config, build_profile=profile, ignore=options.ignore)
mcu = extract_mcus(parser, options)[0] # Toolchain if options.tool is None: args_error(parser, "argument -t/--tool is required") toolchain = options.tool[0] if (options.program is None) and (not options.source_dir): args_error(parser, "one of -p, -n, or --source is required") if options.source_dir and not options.build_dir: args_error( parser, "argument --build is required when argument --source is provided") notify = TerminalNotifier(options.verbose, options.silent, options.color) if not TOOLCHAIN_CLASSES[toolchain].check_executable(): search_path = TOOLCHAIN_PATHS[toolchain] or "No path set" args_error( parser, "Could not find executable for %s.\n" "Currently set search path: %s" % (toolchain, search_path)) # Test build_data_blob = {} if options.build_data else None for test_no in p: test = Test(test_no) if options.automated is not None: test.automated = options.automated if options.dependencies is not None: test.dependencies = options.dependencies if options.host_test is not None: test.host_test = options.host_test
def main(): error = False try: # Parse Options parser = get_default_options_parser(add_app_config=True) parser.add_argument("-D", action="append", dest="macros", help="Add a macro definition") parser.add_argument( "-j", "--jobs", type=int, dest="jobs", default=0, help= "Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)" ) parser.add_argument( "--source", dest="source_dir", type=argparse_filestring_type, default=None, help= "The source (input) directory (for sources other than tests). Defaults to current directory.", action="append") parser.add_argument("--build", dest="build_dir", type=argparse_dir_not_parent(ROOT), default=None, help="The build (output) directory") parser.add_argument( "-l", "--list", action="store_true", dest="list", default=False, help="List (recursively) available tests in order and exit") parser.add_argument( "-p", "--paths", dest="paths", type=argparse_many(argparse_filestring_type), default=None, help= "Limit the tests to those within the specified comma separated list of paths" ) format_choices = ["list", "json"] format_default_choice = "list" format_help = "Change the format in which tests are listed. Choices include: %s. Default: %s" % ( ", ".join(format_choices), format_default_choice) parser.add_argument("-f", "--format", dest="format", type=argparse_lowercase_type( format_choices, "format"), default=format_default_choice, help=format_help) parser.add_argument( "--continue-on-build-fail", action="store_true", dest="continue_on_build_fail", default=None, help="Continue trying to build all tests if a build failure occurs" ) #TODO validate the names instead of just passing through str parser.add_argument( "-n", "--names", dest="names", type=argparse_many(str), default=None, help="Limit the tests to a comma separated list of names") parser.add_argument("--test-config", dest="test_config", type=str, default=None, help="Test config for a module") parser.add_argument( "--test-spec", dest="test_spec", default=None, help= "Destination path for a test spec file that can be used by the Greentea automated test tool" ) parser.add_argument( "--build-report-junit", dest="build_report_junit", default=None, help="Destination path for a build report in the JUnit xml format") parser.add_argument("--build-data", dest="build_data", default=None, help="Dump build_data to this file") parser.add_argument("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Verbose diagnostic output") parser.add_argument( "--silent", action="store_true", dest="silent", default=False, help="Silent diagnostic output (no copy, compile notification)") parser.add_argument("--stats-depth", type=int, dest="stats_depth", default=2, help="Depth level for static memory report") parser.add_argument( "--ignore", dest="ignore", type=argparse_many(str), default=None, help= "Comma separated list of patterns to add to mbedignore (eg. ./main.cpp)" ) parser.add_argument("--icetea", action="store_true", dest="icetea", default=False, help="Only icetea tests") parser.add_argument("--greentea", action="store_true", dest="greentea", default=False, help="Only greentea tests") options = parser.parse_args() # Filter tests by path if specified if options.paths: all_paths = options.paths else: all_paths = ["."] all_tests = {} tests = {} end_warnings = [] # As default both test tools are enabled if not (options.greentea or options.icetea): options.greentea = True options.icetea = True # Target if options.mcu is None: args_error(parser, "argument -m/--mcu is required") mcu = extract_mcus(parser, options)[0] target = Target.get_target(mcu) # Toolchain if options.tool is None: args_error(parser, "argument -t/--tool is required") toolchain = options.tool[0] try: toolchain_name, internal_tc_name, end_warnings = find_valid_toolchain( target, toolchain) except NoValidToolchainException as e: print_end_warnings(e.end_warnings) args_error(parser, str(e)) # Assign config file. Precedence: test_config>app_config # TODO: merge configs if both given if options.test_config: config = get_test_config(options.test_config, mcu) if not config: args_error( parser, "argument --test-config contains invalid path or identifier" ) elif options.app_config: config = options.app_config else: config = Config.find_app_config(options.source_dir) if not config: config = get_default_config(options.source_dir or ['.'], mcu) # Find all tests in the relevant paths for path in all_paths: all_tests.update( find_tests(base_dir=path, target_name=mcu, toolchain_name=toolchain_name, icetea=options.icetea, greentea=options.greentea, app_config=config)) # Filter tests by name if specified if options.names: all_names = options.names all_names = [x.lower() for x in all_names] for name in all_names: if any( fnmatch.fnmatch(testname, name) for testname in all_tests): for testname, test in all_tests.items(): if fnmatch.fnmatch(testname, name): tests[testname] = test else: print("[Warning] Test with name '%s' was not found in the " "available tests" % (name)) else: tests = all_tests if options.list: # Print available tests in order and exit print_tests(tests, options.format) sys.exit(0) else: # Build all tests if not options.build_dir: args_error(parser, "argument --build is required") base_source_paths = options.source_dir # Default base source path is the current directory if not base_source_paths: base_source_paths = ['.'] build_report = {} build_properties = {} library_build_success = False profile = extract_profile(parser, options, internal_tc_name) try: resource_filter = None if target.is_PSA_secure_target: resource_filter = OsAndSpeResourceFilter() generate_psa_sources(source_dirs=base_source_paths, ignore_paths=[options.build_dir]) # Build sources notify = TerminalNotifier(options.verbose, options.silent) build_library(base_source_paths, options.build_dir, mcu, toolchain_name, jobs=options.jobs, clean=options.clean, report=build_report, properties=build_properties, name="mbed-build", macros=options.macros, notify=notify, archive=False, app_config=config, build_profile=profile, ignore=options.ignore, resource_filter=resource_filter) library_build_success = True except ToolException as e: # ToolException output is handled by the build log print("[ERROR] " + str(e)) pass except NotSupportedException as e: # NotSupportedException is handled by the build log print("[ERROR] " + str(e)) pass except Exception as e: if options.verbose: import traceback traceback.print_exc() # Some other exception occurred, print the error message print(e) if not library_build_success: print("Failed to build library") else: if target.is_PSA_secure_target: resource_filter = SpeOnlyResourceFilter() else: resource_filter = None # Build all the tests notify = TerminalNotifier(options.verbose, options.silent) test_build_success, test_build = build_tests( tests, [os.path.relpath(options.build_dir)], options.build_dir, mcu, toolchain_name, clean=options.clean, report=build_report, properties=build_properties, macros=options.macros, notify=notify, jobs=options.jobs, continue_on_build_fail=options.continue_on_build_fail, app_config=config, build_profile=profile, stats_depth=options.stats_depth, ignore=options.ignore, resource_filter=resource_filter) # If a path to a test spec is provided, write it to a file if options.test_spec: write_json_to_file(test_spec_from_test_builds(test_build), options.test_spec) # If a path to a JUnit build report spec is provided, write it to a file if options.build_report_junit: report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build") report_exporter.report_to_file( build_report, options.build_report_junit, test_suite_properties=build_properties) # Print memory map summary on screen if build_report: print() print(print_build_memory_usage(build_report)) print_report_exporter = ReportExporter(ResultExporterType.PRINT, package="build") status = print_report_exporter.report(build_report) if options.build_data: merge_build_data(options.build_data, build_report, "test") if status: sys.exit(0) else: sys.exit(1) except KeyboardInterrupt as e: print("\n[CTRL+c] exit") except ConfigException as e: # Catching ConfigException here to prevent a traceback print("[ERROR] %s" % str(e)) error = True except Exception as e: import traceback traceback.print_exc(file=sys.stdout) print("[ERROR] %s" % str(e)) error = True print_end_warnings(end_warnings) if error: sys.exit(1)
def main(): # Parse Options parser = get_default_options_parser(add_app_config=True) group = parser.add_mutually_exclusive_group(required=False) group.add_argument("-p", type=argparse_many(test_known), dest="program", help="The index of the desired test program: [0-%d]" % (len(TESTS) - 1)) group.add_argument("-n", type=argparse_many(test_name_known), dest="program", help="The name of the desired test program") group.add_argument("-L", "--list-tests", action="store_true", dest="list_tests", default=False, help="List available tests in order and exit") group.add_argument("-S", "--supported-toolchains", dest="supported_toolchains", default=False, const="matrix", choices=["matrix", "toolchains", "targets"], nargs="?", help="Displays supported matrix of MCUs and toolchains") parser.add_argument("-j", "--jobs", type=int, dest="jobs", default=0, help="Number of concurrent jobs. Default: 0/auto " "(based on host machine's number of CPUs)") parser.add_argument("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Verbose diagnostic output") parser.add_argument( "--silent", action="store_true", dest="silent", default=False, help="Silent diagnostic output (no copy, compile notification)") parser.add_argument("-D", action="append", dest="macros", help="Add a macro definition") parser.add_argument( '-f', '--filter', dest='general_filter_regex', default=None, help='For some commands you can use filter to filter out results') parser.add_argument("--stats-depth", type=int, dest="stats_depth", default=2, help="Depth level for static memory report") parser.add_argument("--automated", action="store_true", dest="automated", default=False, help="Automated test") parser.add_argument("--host", dest="host_test", default=None, help="Host test") parser.add_argument("--extra", dest="extra", default=None, help="Extra files") parser.add_argument("--peripherals", dest="peripherals", default=None, help="Required peripherals") parser.add_argument("--dep", dest="dependencies", default=None, help="Dependencies") parser.add_argument("--source", dest="source_dir", type=argparse_filestring_type, default=None, action="append", help="The source (input) directory") parser.add_argument("--duration", type=int, dest="duration", default=None, help="Duration of the test") parser.add_argument("--build", dest="build_dir", type=argparse_dir_not_parent(ROOT), default=None, help="The build (output) directory") parser.add_argument("-N", "--artifact-name", dest="artifact_name", default=None, help="The built project's name") parser.add_argument( "--ignore", dest="ignore", type=argparse_many(str), default=None, help="Comma separated list of patterns to add to mbedignore " "(eg. ./main.cpp)") parser.add_argument("-b", "--baud", type=int, dest="baud", default=None, help="The mbed serial baud rate") parser.add_argument("--rpc", action="store_true", dest="rpc", default=False, help="Link with RPC library") parser.add_argument("--usb", action="store_true", dest="usb", default=False, help="Link with USB Device library") parser.add_argument("--dsp", action="store_true", dest="dsp", default=False, help="Link with DSP library") parser.add_argument("--testlib", action="store_true", dest="testlib", default=False, help="Link with mbed test library") parser.add_argument("--build-data", dest="build_data", default=None, help="Dump build_data to this file") parser.add_argument("-l", "--linker", dest="linker_script", type=argparse_filestring_type, default=None, help="use the specified linker script") options = parser.parse_args() end_warnings = [] if options.supported_toolchains: if options.supported_toolchains == "matrix": print_large_string( mcu_toolchain_matrix( platform_filter=options.general_filter_regex, release_version=None)) elif options.supported_toolchains == "toolchains": print('\n'.join(get_toolchain_list())) elif options.supported_toolchains == "targets": print_large_string(mcu_target_list()) elif options.list_tests is True: print('\n'.join(map(str, sorted(TEST_MAP.values())))) else: # Target if options.mcu is None: args_error(parser, "argument -m/--mcu is required") mcu = extract_mcus(parser, options)[0] # Toolchain if options.tool is None: args_error(parser, "argument -t/--tool is required") toolchain = options.tool[0] target = Target.get_target(mcu) if (options.program is None) and (not options.source_dir): args_error(parser, "one of -p, -n, or --source is required") if options.source_dir and not options.build_dir: args_error( parser, "argument --build is required when argument --source is provided" ) notify = TerminalNotifier(options.verbose, options.silent, options.color) try: toolchain_name, internal_tc_name, end_warnings = find_valid_toolchain( target, toolchain) except NoValidToolchainException as e: print_end_warnings(e.end_warnings) args_error(parser, str(e)) if options.source_dir is not None: resource_filter = None wrapped_build_project(options.source_dir, options.build_dir, mcu, end_warnings, options, toolchain_name, notify=notify, build_profile=extract_profile( parser, options, internal_tc_name), resource_filter=resource_filter, **default_args_dict(options)) else: p = options.program # If 'p' was set via -n to list of numbers make this a single element # integer list if not isinstance(p, list): p = [p] build_data_blob = {} if options.build_data else None for test_no in p: test = Test(test_no) if options.automated is not None: test.automated = options.automated if options.dependencies is not None: test.dependencies = options.dependencies if options.host_test is not None: test.host_test = options.host_test if options.peripherals is not None: test.peripherals = options.peripherals if options.duration is not None: test.duration = options.duration if options.extra is not None: test.extra_files = options.extra if not test.is_supported(mcu, toolchain): print('The selected test is not supported on target ' '%s with toolchain %s' % (mcu, toolchain)) sys.exit() # Linking with extra libraries if options.rpc: test.dependencies.append(RPC_LIBRARY) if options.usb: test.dependencies.append(USB_LIBRARIES) if options.dsp: test.dependencies.append(DSP_LIBRARIES) if options.testlib: test.dependencies.append(TEST_MBED_LIB) build_dir = join(BUILD_DIR, "test", mcu, toolchain, test.id) if options.build_dir is not None: build_dir = options.build_dir wrapped_build_project(test.source_dir, build_dir, mcu, end_warnings, options, toolchain_name, set(test.dependencies), notify=notify, report=build_data_blob, inc_dirs=[dirname(MBED_LIBRARIES)], build_profile=extract_profile( parser, options, internal_tc_name), **default_args_dict(options)) if options.build_data: merge_build_data(options.build_data, build_data_blob, "application")
def main(): """Entry point""" # Parse Options parser = ArgumentParser() targetnames = TARGET_NAMES targetnames.sort() toolchainlist = list(EXPORTERS.keys()) toolchainlist.sort() parser.add_argument("-m", "--mcu", metavar="MCU", help="generate project for the given MCU ({})".format( ', '.join(targetnames))) parser.add_argument("-i", dest="ide", type=argparse_force_lowercase_type( toolchainlist, "toolchain"), help="The target IDE: %s" % str(toolchainlist)) parser.add_argument("-c", "--clean", action="store_true", default=False, help="clean the export directory") group = parser.add_mutually_exclusive_group(required=False) group.add_argument("-p", type=test_known, dest="program", help="The index of the desired test program: [0-%s]" % (len(TESTS) - 1)) group.add_argument("-n", type=test_name_known, dest="program", help="The name of the desired test program") parser.add_argument( "-b", dest="build", default=False, action="store_true", help="use the mbed library build, instead of the sources") group.add_argument("-L", "--list-tests", action="store_true", dest="list_tests", default=False, help="list available programs in order and exit") group.add_argument("-S", "--list-matrix", dest="supported_ides", default=False, const="matrix", choices=["matrix", "ides"], nargs="?", help="displays supported matrix of MCUs and IDEs") parser.add_argument("-E", action="store_true", dest="supported_ides_html", default=False, help="writes tools/export/README.md") parser.add_argument("--build", type=argparse_filestring_type, dest="build_dir", default=None, help="Directory for the exported project files") parser.add_argument("--source", action="append", type=argparse_filestring_type, dest="source_dir", default=[], help="The source (input) directory") parser.add_argument("-D", action="append", dest="macros", help="Add a macro definition") parser.add_argument("--profile", dest="profile", action="append", type=argparse_profile_filestring_type, help="Build profile to use. Can be either path to json" \ "file or one of the default one ({})".format(", ".join(list_profiles())), default=[]) parser.add_argument("--update-packs", dest="update_packs", action="store_true", default=False) parser.add_argument("--app-config", dest="app_config", default=None) options = parser.parse_args() # Print available tests in order and exit if options.list_tests is True: print('\n'.join([str(test) for test in sorted(TEST_MAP.values())])) sys.exit() # Only prints matrix of supported IDEs if options.supported_ides: if options.supported_ides == "matrix": print_large_string(mcu_ide_matrix()) elif options.supported_ides == "ides": print(mcu_ide_list()) exit(0) # Only prints matrix of supported IDEs if options.supported_ides_html: html = mcu_ide_matrix(verbose_html=True) try: with open("./export/README.md", "w") as readme: readme.write("Exporter IDE/Platform Support\n") readme.write("-----------------------------------\n") readme.write("\n") readme.write(html) except IOError as exc: print("I/O error({0}): {1}".format(exc.errno, exc.strerror)) except: print("Unexpected error:", sys.exc_info()[0]) raise exit(0) if options.update_packs: from tools.arm_pack_manager import Cache cache = Cache(True, True) cache.cache_everything() # Target if not options.mcu: args_error(parser, "argument -m/--mcu is required") # Toolchain if not options.ide: args_error(parser, "argument -i is required") # Clean Export Directory if options.clean: if exists(EXPORT_DIR): rmtree(EXPORT_DIR) zip_proj = not bool(options.source_dir) notify = TerminalNotifier() if (options.program is None) and (not options.source_dir): args_error(parser, "one of -p, -n, or --source is required") exporter, toolchain_name = get_exporter_toolchain(options.ide) mcu = extract_mcus(parser, options)[0] if not exporter.is_target_supported(mcu): args_error(parser, "%s not supported by %s" % (mcu, options.ide)) profile = extract_profile(parser, options, toolchain_name, fallback="debug") if options.clean: for cls in EXPORTERS.values(): try: cls.clean(basename(abspath(options.source_dir[0]))) except (NotImplementedError, IOError, OSError): pass for f in list(EXPORTERS.values())[0].CLEAN_FILES: try: remove(f) except (IOError, OSError): pass try: export(mcu, options.ide, build=options.build, src=options.source_dir, macros=options.macros, project_id=options.program, zip_proj=zip_proj, build_profile=profile, app_config=options.app_config, export_path=options.build_dir, notify=notify) except NotSupportedException as exc: print("[ERROR] %s" % str(exc))
if not TOOLCHAIN_CLASSES[toolchain].check_executable(): search_path = TOOLCHAIN_PATHS[toolchain] or "No path set" args_error(parser, "Could not find executable for %s.\n" "Currently set search path: %s" % (toolchain, search_path)) for toolchain in toolchains: for target in targets: tt_id = "%s::%s" % (toolchain, target) if toolchain not in TARGET_MAP[target].supported_toolchains: # Log this later print("%s skipped: toolchain not supported" % tt_id) skipped.append(tt_id) else: try: notifier = TerminalNotifier(options.verbose, options.silent) mcu = TARGET_MAP[target] profile = extract_profile(parser, options, toolchain) if mcu.is_PSA_secure_target: lib_build_res = build_library( ROOT, options.build_dir, mcu, toolchain, jobs=options.jobs, clean=options.clean, archive=(not options.no_archive), macros=options.macros, name=options.artifact_name, build_profile=profile, ignore=options.ignore, notify=notifier, )
with open(join("export_info.json"), "r") as export_info_file: export_info_data = json.load(export_info_file) region_list = [Region(*r) for r in export_info_data.get("region_list", [])] for index, region in enumerate(copy(region_list)): if region.name == "application": region_data = region._asdict() region_data["filename"] = hex_file region_list[index] = Region(**region_data) break else: raise Exception("No application region found") notify = TerminalNotifier() restrict_size = export_info_data.get("target", {}).get("restrict_size") merge_region_list(region_list, combined_hex_file, notify, restrict_size=restrict_size) update_regions = [r for r in region_list if r.name in UPDATE_WHITELIST] if update_regions: update_res = normpath( join(output_directory, generate_update_filename(output_name, None))) merge_region_list(update_regions, update_res, notify,