def get_target_config_data(self): # We consider the resolution order for our target and sort it by level reversed, # so that we first look at the top level target (the parent), then its direct children, # then the children's children and so on, until we reach self.target # TODO: this might not work so well in some multiple inheritance scenarios # At each step, look at two keys of the target data: # - config_parameters: used to define new configuration parameters # - config_overrides: used to override already defined configuration parameters params, json_data = {}, Target.get_json_target_data() resolution_order = [e[0] for e in sorted(Target.get_target(self.target).resolution_order, key = lambda e: e[1], reverse = True)] for tname in resolution_order: # Read the target data directly from its description t = json_data[tname] # Process definitions first self._process_config_parameters(t.get("config", {}), params, tname, "target") # Then process overrides for name, v in t.get("overrides", {}).items(): full_name = ConfigParameter.get_full_name(name, tname, "target") # If the parameter name is not defined or if there isn't a path from this target to the target where the # parameter was defined in the target inheritance tree, raise an error # We need to use 'defined_by[7:]' to remove the "target:" prefix from defined_by if (not full_name in params) or (not params[full_name].defined_by[7:] in Target.get_target(tname).resolution_order_names): raise ConfigException("Attempt to override undefined parameter '%s' in '%s'" % (name, ConfigParameter.get_display_name(tname, "target"))) # Otherwise update the value of the parameter params[full_name].set_value(v, tname, "target") return params
def __init__(self, target, top_level_dirs = []): app_config_location = None for s in (top_level_dirs or []): full_path = os.path.join(s, self.__mbed_app_config_name) if os.path.isfile(full_path): if app_config_location is not None: raise ConfigException("Duplicate '%s' file in '%s' and '%s'" % (self.__mbed_app_config_name, app_config_location, full_path)) else: app_config_location = full_path self.app_config_data = json_file_to_dict(app_config_location) if app_config_location else {} # Check the keys in the application configuration data unknown_keys = set(self.app_config_data.keys()) - self.__allowed_keys["application"] if unknown_keys: raise ConfigException("Unknown key(s) '%s' in %s" % (",".join(unknown_keys), self.__mbed_app_config_name)) # Update the list of targets with the ones defined in the application config, if applicable Target.add_py_targets(self.app_config_data.get("custom_targets", {})) self.lib_config_data = {} # Make sure that each config is processed only once self.processed_configs = {} self.target = target if isinstance(target, basestring) else target.name self.target_labels = Target.get_target(self.target).get_labels() self.cumulative_overrides = { key: ConfigCumulativeOverride(key) for key in Target._Target__cumulative_attributes } self._process_config_and_overrides(self.app_config_data, {}, "app", "application") self.target_labels = Target.get_target(self.target).get_labels()
def test_modify_existing_target(): """Set default targets file, then override base Target definition""" initial_target_json = """ { "Target": { "core": null, "default_toolchain": "ARM", "supported_toolchains": null, "extra_labels": [], "is_disk_virtual": false, "macros": [], "device_has": [], "features": [], "detect_code": [], "public": false, "default_lib": "std", "bootloader_supported": false }, "Test_Target": { "inherits": ["Target"], "core": "Cortex-M4", "supported_toolchains": ["ARM"] } }""" test_target_json = """ { "Target": { "core": "Cortex-M0", "default_toolchain": "GCC_ARM", "supported_toolchains": null, "extra_labels": [], "is_disk_virtual": false, "macros": [], "device_has": [], "features": [], "detect_code": [], "public": false, "default_lib": "std", "bootloader_supported": true } } """ with temp_target_file(initial_target_json, json_filename="targets.json") as targets_dir: Target.set_targets_json_location(os.path.join(targets_dir, "targets.json")) update_target_data() assert TARGET_MAP["Test_Target"].core == "Cortex-M4" assert TARGET_MAP["Test_Target"].default_toolchain == 'ARM' assert TARGET_MAP["Test_Target"].bootloader_supported == False with temp_target_file(test_target_json) as source_dir: Target.add_extra_targets(source_dir=source_dir) update_target_data() assert TARGET_MAP["Test_Target"].core == "Cortex-M4" # The existing target should not be modified by custom targets assert TARGET_MAP["Test_Target"].default_toolchain != 'GCC_ARM' assert TARGET_MAP["Test_Target"].bootloader_supported != True
def get_features(self): params, _ = self.get_config_data() self._check_required_parameters(params) self.cumulative_overrides['features'].update_target(Target.get_target(self.target)) features = Target.get_target(self.target).features for feature in features: if feature not in self.__allowed_features: raise ConfigException("Feature '%s' is not a supported features" % feature) return features
def __init__(self, target, top_level_dirs=None): """Construct a mbed configuration Positional arguments: target - the name of the mbed target used for this configuration instance Keyword argumets: top_level_dirs - a list of top level source directories (where mbed_abb_config.json could be found) NOTE: Construction of a Config object will look for the application configuration file in top_level_dirs. If found once, it'll parse it and check if it has a custom_targets function. If it does, it'll update the list of targets as needed. If more than one config file is found, an exception is raised. top_level_dirs may be None (in this case, the constructor will not search for a configuration file) """ app_config_location = None for directory in top_level_dirs or []: full_path = os.path.join(directory, self.__mbed_app_config_name) if os.path.isfile(full_path): if app_config_location is not None: raise ConfigException("Duplicate '%s' file in '%s' and '%s'" % (self.__mbed_app_config_name, app_config_location, full_path)) else: app_config_location = full_path self.app_config_data = json_file_to_dict(app_config_location) \ if app_config_location else {} # Check the keys in the application configuration data unknown_keys = set(self.app_config_data.keys()) - \ self.__allowed_keys["application"] if unknown_keys: raise ConfigException("Unknown key(s) '%s' in %s" % (",".join(unknown_keys), self.__mbed_app_config_name)) # Update the list of targets with the ones defined in the application # config, if applicable Target.add_py_targets(self.app_config_data.get("custom_targets", {})) self.lib_config_data = {} # Make sure that each config is processed only once self.processed_configs = {} self.target = target if isinstance(target, basestring) else target.name self.target_labels = Target.get_target(self.target).get_labels() self.cumulative_overrides = {key: ConfigCumulativeOverride(key) for key in Target.cumulative_attributes} self._process_config_and_overrides(self.app_config_data, {}, "app", "application") self.target_labels = Target.get_target(self.target).get_labels() self.config_errors = None
def extract_mcus(parser, options): try: if options.source_dir: for source_dir in options.source_dir: Target.add_extra_targets(source_dir) update_target_data() except KeyError: pass targetnames = TARGET_NAMES targetnames.sort() try: return argparse_many(argparse_force_uppercase_type(targetnames, "MCU"))(options.mcu) except ArgumentTypeError as exc: args_error(parser, "argument -m/--mcu: {}".format(str(exc)))
def test_add_extra_targets(): """Search for extra targets json in a source folder""" test_target_json = """ { "Test_Target": { "inherits": ["Target"] } } """ with temp_target_file(test_target_json) as source_dir: Target.add_extra_targets(source_dir=source_dir) update_target_data() assert 'Test_Target' in TARGET_MAP assert TARGET_MAP['Test_Target'].core is None, \ "attributes should be inherited from Target"
def temp_target_file(extra_target, json_filename='custom_targets.json'): """Create an extra targets temp file in a context manager :param extra_target: the contents of the extra targets temp file """ tempdir = tempfile.mkdtemp() try: targetfile = os.path.join(tempdir, json_filename) with open(targetfile, 'w') as f: f.write(extra_target) yield tempdir finally: # Reset extra targets Target.set_targets_json_location() # Delete temp files shutil.rmtree(tempdir)
def get_target_config_data(self): """Read and interpret configuration data defined by targets. We consider the resolution order for our target and sort it by level reversed, so that we first look at the top level target (the parent), then its direct children, then the children of those children and so on, until we reach self.target TODO: this might not work so well in some multiple inheritance scenarios At each step, look at two keys of the target data: - config_parameters: used to define new configuration parameters - config_overrides: used to override already defined configuration parameters Arguments: None """ params, json_data = {}, Target.get_json_target_data() resolution_order = [ e[0] for e in sorted(Target.get_target(self.target).resolution_order, key=lambda e: e[1], reverse=True) ] for tname in resolution_order: # Read the target data directly from its description target_data = json_data[tname] # Process definitions first _process_config_parameters(target_data.get("config", {}), params, tname, "target") # Then process overrides for name, val in target_data.get("overrides", {}).items(): full_name = ConfigParameter.get_full_name( name, tname, "target") # If the parameter name is not defined or if there isn't a path # from this target to the target where the parameter was defined # in the target inheritance tree, raise an error We need to use # 'defined_by[7:]' to remove the "target:" prefix from # defined_by if (full_name not in params) or \ (params[full_name].defined_by[7:] not in Target.get_target(tname).resolution_order_names): raise ConfigException( "Attempt to override undefined parameter '%s' in '%s'" % (name, ConfigParameter.get_display_name(tname, "target"))) # Otherwise update the value of the parameter params[full_name].set_value(val, tname, "target") return params
def get_features(self): """ Extract any features from the configuration data Arguments: None """ params, _ = self.get_config_data() self._check_required_parameters(params) self.cumulative_overrides['features']\ .update_target(Target.get_target(self.target)) features = Target.get_target(self.target).features for feature in features: if feature not in self.__allowed_features: raise ConfigException( "Feature '%s' is not a supported features" % feature) return features
def _psa_backend(target): """ Returns a target PSA backend. :param target: Target name as in targets.json :return: PSA backend as string (TFM/MBED_SPM) """ return 'TFM' if 'TFM' in Target.get_target(target).labels else 'MBED_SPM'
def _get_psa_secure_targets_list(): """ Creates a list of PSA secure targets. :return: List of PSA secure targets. """ return [str(t) for t in TARGET_NAMES if Target.get_target(t).is_PSA_secure_target]
def check_extra_labels(dict): """Check that extra_labels does not contain any Target names is a generator for errors """ for label in (dict.get("extra_labels", []) + dict.get("extra_labels_add", [])): if label in Target.get_json_target_data(): yield "%s is not allowed in extra_labels" % label
def orphans_cmd(): """Find and print all orphan targets""" orphans = Target.get_json_target_data().keys() for tgt in TARGET_MAP.values(): for name in tgt.resolution_order_names: if name in orphans: orphans.remove(name) if orphans: print dump_all([orphans], default_flow_style=False) return len(orphans)
def get_mbed_official_psa_release(): psa_targets_release_list = [] psa_secure_targets = [ t for t in TARGET_NAMES if Target.get_target(t).is_PSA_secure_target ] for t in psa_secure_targets: psa_targets_release_list.append( tuple([TARGET_MAP[t].name, TARGET_MAP[t].default_toolchain])) return psa_targets_release_list
def get_features(self): params, _ = self.get_config_data() self._check_required_parameters(params) features = ((set(Target.get_target(self.target).features) | self.added_features) - self.removed_features) for feature in features: if feature not in self.__allowed_features: raise ConfigException("Feature '%s' is not a supported features" % feature) return features
def main(): """ Perform the main function of this program """ if not os.path.exists(mbed_os_dir): print("Fatal: mbed-os directory does not exist.") print("Try running 'mbed deploy'") sys.exit(1) description = """ Generate pins.js for a specified mbed board, using target definitions from the mbed OS source tree. """ parser = argparse.ArgumentParser(description=description) parser.add_argument('board', help='mbed board name') parser.add_argument('-o', help='Output JavaScript file (default: %(default)s)', default=os.path.join(os.path.dirname(__file__), '../build', 'pins.js'), type=argparse.FileType('w')) parser.add_argument('-c', help='Output C++ file (default: %(default)s)', default=os.path.join(os.path.dirname(__file__), '../build', 'pins.cpp'), type=argparse.FileType('w')) args = parser.parse_args() board_name = args.board.upper() target = Target.get_target(board_name) directory_labels = ['TARGET_' + label for label in target.labels] + target.macros targets_dir = os.path.join(mbed_os_dir, 'targets') pins_file = find_file(targets_dir, directory_labels, 'PinNames.h') includes = enumerate_includes(targets_dir, directory_labels) defines = list(directory_labels) # enumerate pins from PinNames.h pins = enumerate_pins(pins_file, [os.path.dirname(__file__)] + list(includes), defines) # first sort alphabetically, then by length. pins = [(x, pins[x]) for x in pins] # turn dict into tuples, which can be sorted pins = sorted(pins, key=lambda x: (len(x[0]), x[0].lower())) write_pins_to_files(pins, args.o, args.c)
def __init__(self, target, top_level_dirs = []): app_config_location = None for s in (top_level_dirs or []): full_path = os.path.join(s, self.__mbed_app_config_name) if os.path.isfile(full_path): if app_config_location is not None: raise ConfigException("Duplicate '%s' file in '%s' and '%s'" % (self.__mbed_app_config_name, app_config_location, full_path)) else: app_config_location = full_path self.app_config_data = json_file_to_dict(app_config_location) if app_config_location else {} # Check the keys in the application configuration data unknown_keys = set(self.app_config_data.keys()) - self.__allowed_keys["application"] if unknown_keys: raise ConfigException("Unknown key(s) '%s' in %s" % (",".join(unknown_keys), self.__mbed_app_config_name)) # Update the list of targets with the ones defined in the application config, if applicable Target.add_py_targets(self.app_config_data.get("custom_targets", {})) self.lib_config_data = {} # Make sure that each config is processed only once self.processed_configs = {} self.target = target if isinstance(target, str) else target.name self.target_labels = Target.get_target(self.target).get_labels() self.added_features = set() self.removed_features = set() self.removed_unecessary_features = False
def main(): """ Perform the main function of this program """ if not os.path.exists('./mbed-os'): print("Fatal: mbed-os directory does not exist.") print("Try running 'make getlibs'") sys.exit(1) description = """ Generate pins.js for a specified mbed board, using target definitions from the mbed OS source tree. """ parser = argparse.ArgumentParser(description=description) parser.add_argument('board', help='mbed board name') parser.add_argument('-o', help='Output JavaScript file (default: %(default)s)', default='js/pins.js', type=argparse.FileType('w')) parser.add_argument('-c', help='Output C++ file (default: %(default)s)', default='source/pins.cpp', type=argparse.FileType('w')) args = parser.parse_args() board_name = args.board.upper() target = Target.get_target(board_name) directory_labels = ['TARGET_' + label for label in target.labels] + target.macros targets_dir = os.path.join('.', 'mbed-os', 'targets') pins_file = find_file(targets_dir, directory_labels, 'PinNames.h') includes = enumerate_includes(targets_dir, directory_labels) defines = list(directory_labels) # enumerate pins from PinNames.h pins = enumerate_pins(pins_file, ['./tools'] + list(includes), defines) # first sort alphabetically, then by length. pins = [(x, pins[x]) for x in pins] # turn dict into tuples, which can be sorted pins = sorted(pins, key=lambda x: (len(x[0]), x[0].lower())) write_pins_to_files(pins, args.o, args.c)
def main(): """ Perform the main function of this program """ if not os.path.exists('./mbed-os'): print("Fatal: mbed-os directory does not exist.") print("Try running 'make getlibs'") sys.exit(1) description = """ Generate pins.cpp for a specified mbed board, using target definitions from the mbed OS source tree. """ parser = argparse.ArgumentParser(description=description) parser.add_argument('board', help='mbed board name') parser.add_argument('-c', help='Output C++ file (default: %(default)s)', default='source/pins.cpp', type=argparse.FileType('w')) args = parser.parse_args() board_name = args.board.upper() target = Target.get_target(board_name) directory_labels = ['TARGET_' + label for label in target.labels] + target.macros targets_dir = os.path.join('.', 'mbed-os', 'targets') pins_file = find_file(targets_dir, directory_labels, 'PinNames.h') includes = enumerate_includes(targets_dir, directory_labels) defines = list(directory_labels) # enumerate pins from PinNames.h pins = enumerate_pins(pins_file, ['./tools'] + list(includes), defines) # first sort alphabetically, then by length. pins = sorted(pins, key=lambda x: (len(x), x.lower())) write_pins_to_file(pins, pins_file, args.c)
def get_mbed_official_psa_release(target=None): """ Creates a list of PSA targets with default toolchain and artifact delivery directory. :param target: Ask for specific target, None for all targets. :return: List of tuples (target, toolchain, delivery directory). """ psa_targets_release_list = [] psa_secure_targets = [ t for t in TARGET_NAMES if Target.get_target(t).is_PSA_secure_target ] if target is not None: if target not in psa_secure_targets: raise Exception("{} is not a PSA secure target".format(target)) psa_targets_release_list.append(_get_target_info(target)) else: for t in psa_secure_targets: psa_targets_release_list.append(_get_target_info(target)) return psa_targets_release_list
def _process_config_and_overrides(self, data, params, unit_name, unit_kind): self.config_errors = [] self._process_config_parameters(data.get("config", {}), params, unit_name, unit_kind) for label, overrides in data.get("target_overrides", {}).items(): # If the label is defined by the target or it has the special value "*", process the overrides if (label == '*') or (label in self.target_labels): # Check for invalid cumulative overrides in libraries if (unit_kind == 'library' and any(attr.startswith('target.extra_labels') for attr in overrides.iterkeys())): raise ConfigException("Target override '%s' in '%s' is only allowed at the application level" % ("target.extra_labels", ConfigParameter.get_display_name(unit_name, unit_kind, label))) # Parse out cumulative overrides for attr, cumulatives in self.cumulative_overrides.iteritems(): if 'target.'+attr in overrides: cumulatives.strict_cumulative_overrides(overrides['target.'+attr]) del overrides['target.'+attr] if 'target.'+attr+'_add' in overrides: cumulatives.add_cumulative_overrides(overrides['target.'+attr+'_add']) del overrides['target.'+attr+'_add'] if 'target.'+attr+'_remove' in overrides: cumulatives.remove_cumulative_overrides(overrides['target.'+attr+'_remove']) del overrides['target.'+attr+'_remove'] # Consider the others as overrides for name, v in overrides.items(): # Get the full name of the parameter full_name = ConfigParameter.get_full_name(name, unit_name, unit_kind, label) if full_name in params: params[full_name].set_value(v, unit_name, unit_kind, label) else: self.config_errors.append(ConfigException("Attempt to override undefined parameter '%s' in '%s'" % (full_name, ConfigParameter.get_display_name(unit_name, unit_kind, label)))) for cumulatives in self.cumulative_overrides.itervalues(): cumulatives.update_target(Target.get_target(self.target)) return params
else: all_paths = ["."] all_tests = {} tests = {} # As default both test tools are enabled if not (options.greentea or options.icetea): options.greentea = True options.icetea = True # Target if options.mcu is None: args_error(parser, "argument -m/--mcu is required") mcu = extract_mcus(parser, options)[0] mcu_secured = Target.get_target(mcu).is_PSA_secure_target # Toolchain if options.tool is None: args_error(parser, "argument -t/--tool is required") toolchain = options.tool[0] if not TOOLCHAIN_CLASSES[toolchain].check_executable(): search_path = TOOLCHAIN_PATHS[toolchain] or "No path set" args_error( parser, "Could not find executable for %s.\n" "Currently set search path: %s" % (toolchain, search_path)) # Assign config file. Precedence: test_config>app_config # TODO: merge configs if both given if options.test_config:
def main(): """Entry point""" # Parse Options options, parser = get_args(sys.argv[1:]) # Print available tests in order and exit if options.list_tests: print('\n'.join(str(test) for test in sorted(TEST_MAP.values()))) elif options.supported_ides: if options.supported_ides == "matrix": print_large_string(mcu_ide_matrix()) elif options.supported_ides == "ides": print(mcu_ide_list()) elif options.supported_ides_html: html = mcu_ide_matrix(verbose_html=True) with open("README.md", "w") as readme: readme.write("Exporter IDE/Platform Support\n") readme.write("-----------------------------------\n") readme.write("\n") readme.write(html) elif options.update_packs: from tools.arm_pack_manager import Cache cache = Cache(True, True) cache.cache_everything() else: # Check required arguments if not options.mcu: args_error(parser, "argument -m/--mcu is required") if not options.ide: args_error(parser, "argument -i is required") if (options.program is None) and (not options.source_dir): args_error(parser, "one of -p, -n, or --source is required") if options.clean: clean(options.source_dir) clean_psa_autogen() ide = resolve_exporter_alias(options.ide) exporter, toolchain_name = get_exporter_toolchain(ide) profile = extract_profile(parser, options, toolchain_name, fallback="debug") mcu = extract_mcus(parser, options)[0] if not exporter.is_target_supported(mcu): args_error(parser, "%s not supported by %s" % (mcu, ide)) try: target = Target.get_target(mcu) if target.is_PSA_target: generate_psa_sources(source_dirs=options.source_dir, ignore_paths=[]) resource_filter = None if target.is_PSA_secure_target: resource_filter = OsAndSpeResourceFilter() export(mcu, ide, build=options.build, src=options.source_dir, macros=options.macros, project_id=options.program, zip_proj=not bool(options.source_dir) or options.zip, build_profile=profile, app_config=options.app_config, export_path=options.build_dir, ignore=options.ignore, resource_filter=resource_filter) except NotSupportedException as exc: print("[Not Supported] %s" % str(exc)) exit(1) exit(0)
def extract_project_info(self, generate_config=False): """Extract comprehensive information in order to build a PlatformIO project src_paths - a list of paths that contain needed files to build project build_path - a path where mbed_config.h will be created target - suitable mbed target name framework_path = path to the root folder of the mbed framework package app_config - path to mbed_app.json ignore_dirs - doesn't work with GCC at the moment? """ # Default values for mbed build api functions if self.custom_target_path and isfile( join(self.custom_target_path, "custom_targets.json")): print ("Detected custom target file") Target.add_extra_targets(source_dir=self.custom_target_path) update_target_data() target = self.get_target_config() build_profile = self.get_build_profile() jobs = 1 # how many compilers we can run at once name = None # the name of the project dependencies_paths = None # libraries location to include when linking macros = None # additional macros inc_dirs = None # additional dirs where include files may be found ignore = self.ignore_dirs # list of paths to add to mbedignore clean = False # Rebuild everything if True # For cases when project and framework are on different # logic drives (Windows only) backup_cwd = os.getcwd() os.chdir(self.framework_path) # Convert src_path to a list if needed if not isinstance(self.src_paths, list): self.src_paths = [self.src_paths] self.src_paths = [relpath(s) for s in self.src_paths] # Pass all params to the unified prepare_toolchain() self.toolchain = prepare_toolchain( self.src_paths, self.build_path, target, self.toolchain_name, macros=macros, clean=clean, jobs=jobs, notify=self.notify, app_config=self.app_config, build_profile=build_profile, ignore=ignore) # The first path will give the name to the library if name is None: name = basename(normpath(abspath(self.src_paths[0]))) # Disabled for legacy libraries # for src_path in self.src_paths: # if not exists(src_path): # error_msg = "The library src folder doesn't exist:%s", src_path # raise Exception(error_msg) self.resources = MbedResourcesFixedPath(self.framework_path, self.notify).scan_with_toolchain( self.src_paths, self.toolchain, dependencies_paths, inc_dirs=inc_dirs) src_files = ( self.resources.s_sources + self.resources.c_sources + self.resources.cpp_sources ) if generate_config: self.generate_mbed_config_file() # Revert back project cwd os.chdir(backup_cwd) result = { "src_files": src_files, "inc_dirs": self.resources.inc_dirs, "ldscript": [self.resources.linker_script], "objs": self.resources.objects, "build_flags": {k: sorted(v) for k, v in self.toolchain.flags.items()}, "libs": [basename(l) for l in self.resources.libraries], "lib_paths": self.resources.lib_dirs, "syslibs": self.toolchain.sys_libs, "build_symbols": self.process_symbols( self.toolchain.get_symbols()), "hex": self.resources.hex_files, "bin": self.resources.bin_files } return result
else: all_paths = ["."] all_tests = {} tests = {} # As default both test tools are enabled if not (options.greentea or options.icetea): options.greentea = True options.icetea = True # Target if options.mcu is None: args_error(parser, "argument -m/--mcu is required") mcu = extract_mcus(parser, options)[0] target = Target.get_target(mcu) mcu_secured = target.is_PSA_secure_target # Toolchain if options.tool is None: args_error(parser, "argument -t/--tool is required") toolchain = options.tool[0] toolchain_name = get_toolchain_name(target, toolchain) if not TOOLCHAIN_CLASSES[toolchain_name].check_executable(): search_path = TOOLCHAIN_PATHS[toolchain_name] or "No path set" args_error( parser, "Could not find executable for %s.\n" "Currently set search path: %s" % (toolchain_name, search_path))
def _process_config_and_overrides(self, data, params, unit_name, unit_kind): """Process "config_parameters" and "target_config_overrides" into a given dictionary Positional arguments: data - the configuration data of the library/appliation params - storage for the discovered configuration parameters unit_name - the unit (library/application) that defines this parameter unit_kind - the kind of the unit ("library" or "application") """ self.config_errors = [] _process_config_parameters(data.get("config", {}), params, unit_name, unit_kind) for label, overrides in data.get("target_overrides", {}).items(): # If the label is defined by the target or it has the special value # "*", process the overrides if (label == '*') or (label in self.target_labels): # Check for invalid cumulative overrides in libraries if (unit_kind == 'library' and any(attr.startswith('target.extra_labels') for attr in overrides.iterkeys())): raise ConfigException( "Target override 'target.extra_labels' in " + ConfigParameter.get_display_name(unit_name, unit_kind, label) + " is only allowed at the application level") # Parse out cumulative overrides for attr, cumulatives in self.cumulative_overrides.iteritems(): if 'target.'+attr in overrides: cumulatives.strict_cumulative_overrides( overrides['target.'+attr]) del overrides['target.'+attr] if 'target.'+attr+'_add' in overrides: cumulatives.add_cumulative_overrides( overrides['target.'+attr+'_add']) del overrides['target.'+attr+'_add'] if 'target.'+attr+'_remove' in overrides: cumulatives.remove_cumulative_overrides( overrides['target.'+attr+'_remove']) del overrides['target.'+attr+'_remove'] # Consider the others as overrides for name, val in overrides.items(): # Get the full name of the parameter full_name = ConfigParameter.get_full_name(name, unit_name, unit_kind, label) if full_name in params: params[full_name].set_value(val, unit_name, unit_kind, label) else: self.config_errors.append( ConfigException( "Attempt to override undefined parameter" + (" '%s' in '%s'" % (full_name, ConfigParameter.get_display_name(unit_name, unit_kind, label))))) for cumulatives in self.cumulative_overrides.itervalues(): cumulatives.update_target(Target.get_target(self.target)) return params
parser = argparse.ArgumentParser(description=description) parser.add_argument('board', help='mbed board name') parser.add_argument('-o', help='Output JavaScript file (default: %(default)s)', default='js/pins.js', type=argparse.FileType('w')) parser.add_argument('-c', help='Output C++ file (default: %(default)s)', default='source/pins.cpp', type=argparse.FileType('w')) args = parser.parse_args() board_name = args.board.upper() target = Target.get_target(board_name) directory_labels = ['TARGET_' + label for label in target.labels] + target.macros targets_dir = os.path.join('.', 'mbed-os', 'targets') pins_file = find_file(targets_dir, directory_labels, 'PinNames.h') includes = enumerate_includes(targets_dir, directory_labels) defines = list(directory_labels) # enumerate pins from PinNames.h pins = enumerate_pins(pins_file, ['./tools'] + list(includes), defines) # first sort alphabetically, then by length. pins = [ (x, pins[x]) for x in pins] # turn dict into tuples, which can be sorted
def _process_config_and_overrides(self, data, params, unit_name, unit_kind): """Process "config_parameters" and "target_config_overrides" into a given dictionary Positional arguments: data - the configuration data of the library/appliation params - storage for the discovered configuration parameters unit_name - the unit (library/application) that defines this parameter unit_kind - the kind of the unit ("library" or "application") """ self.config_errors = [] _process_config_parameters(data.get("config", {}), params, unit_name, unit_kind) for label, overrides in data.get("target_overrides", {}).items(): # If the label is defined by the target or it has the special value # "*", process the overrides if (label == '*') or (label in self.target_labels): # Check for invalid cumulative overrides in libraries if (unit_kind == 'library' and any( attr.startswith('target.extra_labels') for attr in overrides.iterkeys())): raise ConfigException( "Target override 'target.extra_labels' in " + ConfigParameter.get_display_name( unit_name, unit_kind, label) + " is only allowed at the application level") # Parse out cumulative overrides for attr, cumulatives in self.cumulative_overrides.iteritems(): if 'target.' + attr in overrides: cumulatives.strict_cumulative_overrides( overrides['target.' + attr]) del overrides['target.' + attr] if 'target.' + attr + '_add' in overrides: cumulatives.add_cumulative_overrides( overrides['target.' + attr + '_add']) del overrides['target.' + attr + '_add'] if 'target.' + attr + '_remove' in overrides: cumulatives.remove_cumulative_overrides( overrides['target.' + attr + '_remove']) del overrides['target.' + attr + '_remove'] # Consider the others as overrides for name, val in overrides.items(): # Get the full name of the parameter full_name = ConfigParameter.get_full_name( name, unit_name, unit_kind, label) if full_name in params: params[full_name].set_value(val, unit_name, unit_kind, label) else: self.config_errors.append( ConfigException( "Attempt to override undefined parameter" + (" '%s' in '%s'" % (full_name, ConfigParameter.get_display_name( unit_name, unit_kind, label))))) for cumulatives in self.cumulative_overrides.itervalues(): cumulatives.update_target(Target.get_target(self.target)) return params
generated_path = os.path.join(project_root_dir, generated_rpath) custom_target_dir = args.custom_target_dir app_config_path = args.app_config mbedignore_file = args.mbedignore pathlib.Path(generated_path).mkdir(parents=True, exist_ok=True) with open(os.path.join(generated_path, "do-not-modify.txt"), 'w') as do_not_modify: do_not_modify.write( "Files in this folder were generated by configure_for_target.py") # Perform the scan of the Mbed OS dirs # ------------------------------------------------------------------------- if custom_target_dir is not None: Target.add_extra_targets(custom_target_dir) # profile constants # list of all profile JSONs profile_jsons = [ os.path.join(mbed_os_dir, "tools/profiles/develop.json"), os.path.join(mbed_os_dir, "tools/profiles/debug.json"), os.path.join(mbed_os_dir, "tools/profiles/release.json") ] # CMake build type matching each Mbed profile profile_cmake_names = ["RELWITHDEBINFO", "DEBUG", "RELEASE"] for target_name in target_names: print(">> Configuring build system for target: " + target_name) # Can NOT be the current directory, or it screws up some internal regexes inside mbed tools.
def test_modify_existing_target(self): """Set default targets file, then override base Target definition""" initial_target_json = """ { "Target": { "core": null, "default_toolchain": "ARM", "supported_toolchains": null, "extra_labels": [], "is_disk_virtual": false, "macros": [], "device_has": [], "features": [], "detect_code": [], "public": false, "default_lib": "std", "bootloader_supported": false }, "Test_Target": { "inherits": ["Target"], "core": "Cortex-M4", "supported_toolchains": ["ARM"] } }""" test_target_json = """ { "Target": { "core": "Cortex-M0", "default_toolchain": "GCC_ARM", "supported_toolchains": null, "extra_labels": [], "is_disk_virtual": false, "macros": [], "device_has": [], "features": [], "detect_code": [], "public": false, "default_lib": "std", "bootloader_supported": true } } """ with self.temp_target_file( initial_target_json, json_filename="targets.json") as targets_dir: Target.set_targets_json_location( os.path.join(targets_dir, "targets.json")) update_target_data() assert TARGET_MAP["Test_Target"].core == "Cortex-M4" assert TARGET_MAP["Test_Target"].default_toolchain == 'ARM' assert TARGET_MAP["Test_Target"].bootloader_supported == False with self.temp_target_file(test_target_json) as source_dir: Target.add_extra_targets(source_dir=source_dir) update_target_data() assert TARGET_MAP["Test_Target"].core == "Cortex-M4" # The existing target should not be modified by custom targets assert TARGET_MAP["Test_Target"].default_toolchain != 'GCC_ARM' assert TARGET_MAP["Test_Target"].bootloader_supported != True
parser = argparse.ArgumentParser(description=description) parser.add_argument('board', help='mbed board name') parser.add_argument('-o', help='Output JavaScript file (default: %(default)s)', default='js/pins.js', type=argparse.FileType('w')) parser.add_argument('-c', help='Output C++ file (default: %(default)s)', default='source/pins.cpp', type=argparse.FileType('w')) args = parser.parse_args() board_name = args.board.upper() target = Target(board_name) directory_labels = ['TARGET_' + label for label in target.get_labels()] + target.macros targets_dir = os.path.join('.', 'mbed-os', 'hal', 'targets') hal_dir = os.path.join(targets_dir, 'hal') pins_file = find_file(hal_dir, directory_labels, 'PinNames.h') includes = enumerate_includes(targets_dir, directory_labels) defines = list(directory_labels) # enumerate pins from PinNames.h pins = enumerate_pins(pins_file, ['./tools'] + list(includes), defines)
def __init__(self, tgt, top_level_dirs=None, app_config=None): """Construct a mbed configuration Positional arguments: target - the name of the mbed target used for this configuration instance Keyword argumets: top_level_dirs - a list of top level source directories (where mbed_app_config.json could be found) app_config - location of a chosen mbed_app.json file NOTE: Construction of a Config object will look for the application configuration file in top_level_dirs. If found once, it'll parse it and check if it has a custom_targets function. If it does, it'll update the list of targets as needed. If more than one config file is found, an exception is raised. top_level_dirs may be None (in this case, the constructor will not search for a configuration file) """ app_config_location = app_config if app_config_location is None: for directory in top_level_dirs or []: full_path = os.path.join(directory, self.__mbed_app_config_name) if os.path.isfile(full_path): if app_config_location is not None: raise ConfigException( "Duplicate '%s' file in '%s' and '%s'" % (self.__mbed_app_config_name, app_config_location, full_path)) else: app_config_location = full_path try: self.app_config_data = json_file_to_dict(app_config_location) \ if app_config_location else {} except ValueError as exc: sys.stderr.write(str(exc) + "\n") self.app_config_data = {} # Check the keys in the application configuration data unknown_keys = set(self.app_config_data.keys()) - \ self.__allowed_keys["application"] if unknown_keys: raise ConfigException( "Unknown key(s) '%s' in %s" % (",".join(unknown_keys), self.__mbed_app_config_name)) # Update the list of targets with the ones defined in the application # config, if applicable Target.add_py_targets(self.app_config_data.get("custom_targets", {})) self.lib_config_data = {} # Make sure that each config is processed only once self.processed_configs = {} if isinstance(tgt, basestring): if tgt in TARGET_MAP: self.target = TARGET_MAP[tgt] else: self.target = generate_py_target( self.app_config_data.get("custom_targets", {}), tgt) else: self.target = tgt self.target = deepcopy(self.target) self.target_labels = self.target.labels self.cumulative_overrides = { key: ConfigCumulativeOverride(key) for key in CUMULATIVE_ATTRIBUTES } self._process_config_and_overrides(self.app_config_data, {}, "app", "application") self.config_errors = None
def _psa_backend(target_name): return 'TFM' if 'TFM' in Target.get_target( target_name).labels else 'MBED_SPM'
def main(): start = time() # Parse Options parser = get_default_options_parser() parser.add_argument("--source", dest="source_dir", type=argparse_filestring_type, default=None, help="The source (input) directory", action="append") parser.add_argument("--build", dest="build_dir", type=argparse_dir_not_parent(ROOT), default=None, help="The build (output) directory") parser.add_argument("--no-archive", dest="no_archive", action="store_true", default=False, help="Do not produce archive (.ar) file, but rather .o") # Extra libraries parser.add_argument("-r", "--rtos", action="store_true", dest="rtos", default=False, help="Compile the rtos") parser.add_argument("--rpc", action="store_true", dest="rpc", default=False, help="Compile the rpc library") parser.add_argument("-u", "--usb", action="store_true", dest="usb", default=False, help="Compile the USB Device library") parser.add_argument("-d", "--dsp", action="store_true", dest="dsp", default=False, help="Compile the DSP library") parser.add_argument( "--cpputest", action="store_true", dest="cpputest_lib", default=False, help="Compiles 'cpputest' unit test library (library should be on the same directory level as mbed repository)") parser.add_argument("-D", action="append", dest="macros", help="Add a macro definition") parser.add_argument("-S", "--supported-toolchains", action="store_true", dest="supported_toolchains", default=False, help="Displays supported matrix of MCUs and toolchains") parser.add_argument('-f', '--filter', dest='general_filter_regex', default=None, help='For some commands you can use filter to filter out results') parser.add_argument("-j", "--jobs", type=int, dest="jobs", default=0, help="Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)") parser.add_argument("-N", "--artifact-name", dest="artifact_name", default=None, help="The built project's name") parser.add_argument("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Verbose diagnostic output") parser.add_argument("--silent", action="store_true", dest="silent", default=False, help="Silent diagnostic output (no copy, compile notification)") parser.add_argument("-x", "--extra-verbose-notifications", action="store_true", dest="extra_verbose_notify", default=False, help="Makes compiler more verbose, CI friendly.") parser.add_argument("--ignore", dest="ignore", type=argparse_many(str), default=None, help="Comma separated list of patterns to add to mbedignore (eg. ./main.cpp)") options = parser.parse_args() # Only prints matrix of supported toolchains if options.supported_toolchains: print(mcu_toolchain_matrix(platform_filter=options.general_filter_regex)) exit(0) # Get target list targets = extract_mcus(parser, options) if options.mcu else TARGET_NAMES # Get toolchains list toolchains = options.tool if options.tool else TOOLCHAINS if options.source_dir and not options.build_dir: args_error(parser, "argument --build is required by argument --source") # Get libraries list libraries = [] # Additional Libraries if options.rpc: libraries.extend(["rpc"]) if options.usb: libraries.append("usb") if options.dsp: libraries.extend(["dsp"]) if options.cpputest_lib: libraries.extend(["cpputest"]) # Build results failures = [] successes = [] skipped = [] end_warnings = [] if options.clean: clean_psa_autogen() for toolchain in toolchains: for target_name in targets: target = Target.get_target(target_name) try: toolchain_name, internal_tc_name, end_warnings = find_valid_toolchain( target, toolchain ) except NoValidToolchainException as e: print_end_warnings(e.end_warnings) args_error(parser, str(e)) tt_id = "%s::%s" % (internal_tc_name, target_name) if not target_supports_toolchain(target, toolchain_name): # Log this later print("%s skipped: toolchain not supported" % tt_id) skipped.append(tt_id) else: try: notifier = TerminalNotifier(options.verbose, options.silent) profile = extract_profile(parser, options, internal_tc_name) if options.source_dir: if target.is_PSA_target: generate_psa_sources( source_dirs=options.source_dir, ignore_paths=[options.build_dir] ) resource_filter = None if target.is_PSA_secure_target: resource_filter = OsAndSpeResourceFilter() lib_build_res = build_library( options.source_dir, options.build_dir, target, toolchain_name, jobs=options.jobs, clean=options.clean, archive=(not options.no_archive), macros=options.macros, name=options.artifact_name, build_profile=profile, ignore=options.ignore, notify=notifier, resource_filter=resource_filter ) else: lib_build_res = build_mbed_libs( target, toolchain_name, jobs=options.jobs, clean=options.clean, macros=options.macros, build_profile=profile, ignore=options.ignore, notify=notifier, ) for lib_id in libraries: build_lib( lib_id, target, toolchain_name, clean=options.clean, macros=options.macros, jobs=options.jobs, build_profile=profile, ignore=options.ignore, ) if lib_build_res: successes.append(tt_id) else: skipped.append(tt_id) except KeyboardInterrupt as e: print("\n[CTRL+c] exit") print_end_warnings(end_warnings) sys.exit(0) except Exception as e: if options.verbose: import traceback traceback.print_exc(file=sys.stdout) print_end_warnings(end_warnings) sys.exit(1) failures.append(tt_id) print(e) # Write summary of the builds print("\nCompleted in: (%.2f)s\n" % (time() - start)) for report, report_name in [(successes, "Build successes:"), (skipped, "Build skipped:"), (failures, "Build failures:"), ]: if report: print(print_build_results(report, report_name)) print_end_warnings(end_warnings) if failures: sys.exit(1)
def main(): # Parse Options parser = get_default_options_parser(add_app_config=True) group = parser.add_mutually_exclusive_group(required=False) group.add_argument("-p", type=argparse_many(test_known), dest="program", help="The index of the desired test program: [0-%d]" % (len(TESTS) - 1)) group.add_argument("-n", type=argparse_many(test_name_known), dest="program", help="The name of the desired test program") group.add_argument("-L", "--list-tests", action="store_true", dest="list_tests", default=False, help="List available tests in order and exit") group.add_argument("-S", "--supported-toolchains", dest="supported_toolchains", default=False, const="matrix", choices=["matrix", "toolchains", "targets"], nargs="?", help="Displays supported matrix of MCUs and toolchains") parser.add_argument("-j", "--jobs", type=int, dest="jobs", default=0, help="Number of concurrent jobs. Default: 0/auto " "(based on host machine's number of CPUs)") parser.add_argument("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Verbose diagnostic output") parser.add_argument( "--silent", action="store_true", dest="silent", default=False, help="Silent diagnostic output (no copy, compile notification)") parser.add_argument("-D", action="append", dest="macros", help="Add a macro definition") parser.add_argument( '-f', '--filter', dest='general_filter_regex', default=None, help='For some commands you can use filter to filter out results') parser.add_argument("--stats-depth", type=int, dest="stats_depth", default=2, help="Depth level for static memory report") parser.add_argument("--automated", action="store_true", dest="automated", default=False, help="Automated test") parser.add_argument("--host", dest="host_test", default=None, help="Host test") parser.add_argument("--extra", dest="extra", default=None, help="Extra files") parser.add_argument("--peripherals", dest="peripherals", default=None, help="Required peripherals") parser.add_argument("--dep", dest="dependencies", default=None, help="Dependencies") parser.add_argument("--source", dest="source_dir", type=argparse_filestring_type, default=None, action="append", help="The source (input) directory") parser.add_argument("--duration", type=int, dest="duration", default=None, help="Duration of the test") parser.add_argument("--build", dest="build_dir", type=argparse_dir_not_parent(ROOT), default=None, help="The build (output) directory") parser.add_argument("-N", "--artifact-name", dest="artifact_name", default=None, help="The built project's name") parser.add_argument( "--ignore", dest="ignore", type=argparse_many(str), default=None, help="Comma separated list of patterns to add to mbedignore " "(eg. ./main.cpp)") parser.add_argument("-b", "--baud", type=int, dest="baud", default=None, help="The mbed serial baud rate") parser.add_argument("--rpc", action="store_true", dest="rpc", default=False, help="Link with RPC library") parser.add_argument("--usb", action="store_true", dest="usb", default=False, help="Link with USB Device library") parser.add_argument("--dsp", action="store_true", dest="dsp", default=False, help="Link with DSP library") parser.add_argument("--testlib", action="store_true", dest="testlib", default=False, help="Link with mbed test library") parser.add_argument("--build-data", dest="build_data", default=None, help="Dump build_data to this file") parser.add_argument("-l", "--linker", dest="linker_script", type=argparse_filestring_type, default=None, help="use the specified linker script") options = parser.parse_args() end_warnings = [] if options.supported_toolchains: if options.supported_toolchains == "matrix": print_large_string( mcu_toolchain_matrix( platform_filter=options.general_filter_regex, release_version=None)) elif options.supported_toolchains == "toolchains": print('\n'.join(get_toolchain_list())) elif options.supported_toolchains == "targets": print_large_string(mcu_target_list()) elif options.list_tests is True: print('\n'.join(map(str, sorted(TEST_MAP.values())))) else: # Target if options.mcu is None: args_error(parser, "argument -m/--mcu is required") mcu = extract_mcus(parser, options)[0] # Toolchain if options.tool is None: args_error(parser, "argument -t/--tool is required") toolchain = options.tool[0] target = Target.get_target(mcu) if (options.program is None) and (not options.source_dir): args_error(parser, "one of -p, -n, or --source is required") if options.source_dir and not options.build_dir: args_error( parser, "argument --build is required when argument --source is provided" ) notify = TerminalNotifier(options.verbose, options.silent, options.color) try: toolchain_name, internal_tc_name, end_warnings = find_valid_toolchain( target, toolchain) except NoValidToolchainException as e: print_end_warnings(e.end_warnings) args_error(parser, str(e)) if options.source_dir is not None: resource_filter = None wrapped_build_project(options.source_dir, options.build_dir, mcu, end_warnings, options, toolchain_name, notify=notify, build_profile=extract_profile( parser, options, internal_tc_name), resource_filter=resource_filter, **default_args_dict(options)) else: p = options.program # If 'p' was set via -n to list of numbers make this a single element # integer list if not isinstance(p, list): p = [p] build_data_blob = {} if options.build_data else None for test_no in p: test = Test(test_no) if options.automated is not None: test.automated = options.automated if options.dependencies is not None: test.dependencies = options.dependencies if options.host_test is not None: test.host_test = options.host_test if options.peripherals is not None: test.peripherals = options.peripherals if options.duration is not None: test.duration = options.duration if options.extra is not None: test.extra_files = options.extra if not test.is_supported(mcu, toolchain): print('The selected test is not supported on target ' '%s with toolchain %s' % (mcu, toolchain)) sys.exit() # Linking with extra libraries if options.rpc: test.dependencies.append(RPC_LIBRARY) if options.usb: test.dependencies.append(USB_LIBRARIES) if options.dsp: test.dependencies.append(DSP_LIBRARIES) if options.testlib: test.dependencies.append(TEST_MBED_LIB) build_dir = join(BUILD_DIR, "test", mcu, toolchain, test.id) if options.build_dir is not None: build_dir = options.build_dir wrapped_build_project(test.source_dir, build_dir, mcu, end_warnings, options, toolchain_name, set(test.dependencies), notify=notify, report=build_data_blob, inc_dirs=[dirname(MBED_LIBRARIES)], build_profile=extract_profile( parser, options, internal_tc_name), **default_args_dict(options)) if options.build_data: merge_build_data(options.build_data, build_data_blob, "application")
elif options.supported_toolchains == "targets": print(mcu_target_list()) elif options.list_tests is True: print('\n'.join(map(str, sorted(TEST_MAP.values())))) else: # Target if options.mcu is None: args_error(parser, "argument -m/--mcu is required") mcu = extract_mcus(parser, options)[0] # Toolchain if options.tool is None: args_error(parser, "argument -t/--tool is required") toolchain = options.tool[0] if Target.get_target(mcu).is_PSA_secure_target and \ not is_relative_to_root(options.source_dir): options.source_dir = ROOT if (options.program is None) and (not options.source_dir): args_error(parser, "one of -p, -n, or --source is required") if options.source_dir and not options.build_dir: args_error( parser, "argument --build is required when argument --source is provided" ) notify = TerminalNotifier(options.verbose, options.silent, options.color)
def main(): error = False try: # Parse Options parser = get_default_options_parser(add_app_config=True) parser.add_argument("-D", action="append", dest="macros", help="Add a macro definition") parser.add_argument( "-j", "--jobs", type=int, dest="jobs", default=0, help= "Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)" ) parser.add_argument( "--source", dest="source_dir", type=argparse_filestring_type, default=None, help= "The source (input) directory (for sources other than tests). Defaults to current directory.", action="append") parser.add_argument("--build", dest="build_dir", type=argparse_dir_not_parent(ROOT), default=None, help="The build (output) directory") parser.add_argument( "-l", "--list", action="store_true", dest="list", default=False, help="List (recursively) available tests in order and exit") parser.add_argument( "-p", "--paths", dest="paths", type=argparse_many(argparse_filestring_type), default=None, help= "Limit the tests to those within the specified comma separated list of paths" ) format_choices = ["list", "json"] format_default_choice = "list" format_help = "Change the format in which tests are listed. Choices include: %s. Default: %s" % ( ", ".join(format_choices), format_default_choice) parser.add_argument("-f", "--format", dest="format", type=argparse_lowercase_type( format_choices, "format"), default=format_default_choice, help=format_help) parser.add_argument( "--continue-on-build-fail", action="store_true", dest="continue_on_build_fail", default=None, help="Continue trying to build all tests if a build failure occurs" ) #TODO validate the names instead of just passing through str parser.add_argument( "-n", "--names", dest="names", type=argparse_many(str), default=None, help="Limit the tests to a comma separated list of names") parser.add_argument("--test-config", dest="test_config", type=str, default=None, help="Test config for a module") parser.add_argument( "--test-spec", dest="test_spec", default=None, help= "Destination path for a test spec file that can be used by the Greentea automated test tool" ) parser.add_argument( "--build-report-junit", dest="build_report_junit", default=None, help="Destination path for a build report in the JUnit xml format") parser.add_argument("--build-data", dest="build_data", default=None, help="Dump build_data to this file") parser.add_argument("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Verbose diagnostic output") parser.add_argument( "--silent", action="store_true", dest="silent", default=False, help="Silent diagnostic output (no copy, compile notification)") parser.add_argument("--stats-depth", type=int, dest="stats_depth", default=2, help="Depth level for static memory report") parser.add_argument( "--ignore", dest="ignore", type=argparse_many(str), default=None, help= "Comma separated list of patterns to add to mbedignore (eg. ./main.cpp)" ) parser.add_argument("--icetea", action="store_true", dest="icetea", default=False, help="Only icetea tests") parser.add_argument("--greentea", action="store_true", dest="greentea", default=False, help="Only greentea tests") options = parser.parse_args() # Filter tests by path if specified if options.paths: all_paths = options.paths else: all_paths = ["."] all_tests = {} tests = {} end_warnings = [] # As default both test tools are enabled if not (options.greentea or options.icetea): options.greentea = True options.icetea = True # Target if options.mcu is None: args_error(parser, "argument -m/--mcu is required") mcu = extract_mcus(parser, options)[0] target = Target.get_target(mcu) # Toolchain if options.tool is None: args_error(parser, "argument -t/--tool is required") toolchain = options.tool[0] try: toolchain_name, internal_tc_name, end_warnings = find_valid_toolchain( target, toolchain) except NoValidToolchainException as e: print_end_warnings(e.end_warnings) args_error(parser, str(e)) # Assign config file. Precedence: test_config>app_config # TODO: merge configs if both given if options.test_config: config = get_test_config(options.test_config, mcu) if not config: args_error( parser, "argument --test-config contains invalid path or identifier" ) elif options.app_config: config = options.app_config else: config = Config.find_app_config(options.source_dir) if not config: config = get_default_config(options.source_dir or ['.'], mcu) # Find all tests in the relevant paths for path in all_paths: all_tests.update( find_tests(base_dir=path, target_name=mcu, toolchain_name=toolchain_name, icetea=options.icetea, greentea=options.greentea, app_config=config)) # Filter tests by name if specified if options.names: all_names = options.names all_names = [x.lower() for x in all_names] for name in all_names: if any( fnmatch.fnmatch(testname, name) for testname in all_tests): for testname, test in all_tests.items(): if fnmatch.fnmatch(testname, name): tests[testname] = test else: print("[Warning] Test with name '%s' was not found in the " "available tests" % (name)) else: tests = all_tests if options.list: # Print available tests in order and exit print_tests(tests, options.format) sys.exit(0) else: # Build all tests if not options.build_dir: args_error(parser, "argument --build is required") base_source_paths = options.source_dir # Default base source path is the current directory if not base_source_paths: base_source_paths = ['.'] build_report = {} build_properties = {} library_build_success = False profile = extract_profile(parser, options, internal_tc_name) try: resource_filter = None if target.is_PSA_secure_target: resource_filter = OsAndSpeResourceFilter() generate_psa_sources(source_dirs=base_source_paths, ignore_paths=[options.build_dir]) # Build sources notify = TerminalNotifier(options.verbose, options.silent) build_library(base_source_paths, options.build_dir, mcu, toolchain_name, jobs=options.jobs, clean=options.clean, report=build_report, properties=build_properties, name="mbed-build", macros=options.macros, notify=notify, archive=False, app_config=config, build_profile=profile, ignore=options.ignore, resource_filter=resource_filter) library_build_success = True except ToolException as e: # ToolException output is handled by the build log print("[ERROR] " + str(e)) pass except NotSupportedException as e: # NotSupportedException is handled by the build log print("[ERROR] " + str(e)) pass except Exception as e: if options.verbose: import traceback traceback.print_exc() # Some other exception occurred, print the error message print(e) if not library_build_success: print("Failed to build library") else: if target.is_PSA_secure_target: resource_filter = SpeOnlyResourceFilter() else: resource_filter = None # Build all the tests notify = TerminalNotifier(options.verbose, options.silent) test_build_success, test_build = build_tests( tests, [os.path.relpath(options.build_dir)], options.build_dir, mcu, toolchain_name, clean=options.clean, report=build_report, properties=build_properties, macros=options.macros, notify=notify, jobs=options.jobs, continue_on_build_fail=options.continue_on_build_fail, app_config=config, build_profile=profile, stats_depth=options.stats_depth, ignore=options.ignore, resource_filter=resource_filter) # If a path to a test spec is provided, write it to a file if options.test_spec: write_json_to_file(test_spec_from_test_builds(test_build), options.test_spec) # If a path to a JUnit build report spec is provided, write it to a file if options.build_report_junit: report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build") report_exporter.report_to_file( build_report, options.build_report_junit, test_suite_properties=build_properties) # Print memory map summary on screen if build_report: print() print(print_build_memory_usage(build_report)) print_report_exporter = ReportExporter(ResultExporterType.PRINT, package="build") status = print_report_exporter.report(build_report) if options.build_data: merge_build_data(options.build_data, build_report, "test") if status: sys.exit(0) else: sys.exit(1) except KeyboardInterrupt as e: print("\n[CTRL+c] exit") except ConfigException as e: # Catching ConfigException here to prevent a traceback print("[ERROR] %s" % str(e)) error = True except Exception as e: import traceback traceback.print_exc(file=sys.stdout) print("[ERROR] %s" % str(e)) error = True print_end_warnings(end_warnings) if error: sys.exit(1)