def scan_and_copy_resources(self, prj_path, trg_path): # Copy only the file for the required target and toolchain lib_builds = [] for src in ['lib', 'src']: resources = self.__scan_and_copy(join(prj_path, src), trg_path) lib_builds.extend(resources.lib_builds) # The repository files for repo_dir in resources.repo_dirs: repo_files = self.__scan_all(repo_dir) self.toolchain.copy_files(repo_files, trg_path, rel_path=join(prj_path, src)) # The libraries builds for bld in lib_builds: build_url = open(bld).read().strip() lib_data = self.build_url_resolver(build_url) lib_path = lib_data['path'].rstrip('\\/') self.__scan_and_copy(lib_path, join(trg_path, lib_data['name'])) # Create .hg dir in mbed build dir so it's ignored when versioning hgdir = join(trg_path, lib_data['name'], '.hg') mkdir(hgdir) fhandle = file(join(hgdir, 'keep.me'), 'a') fhandle.close() # Final scan of the actual exported resources self.resources = self.toolchain.scan_resources(trg_path) self.resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH)
def create_log_dir(first_time, exp_prefix, seed=0): exp_name = create_exp_name(exp_prefix, seed) log_dir = osp.join(config.LOCAL_LOG_DIR, exp_name) if osp.exists(log_dir) and first_time: print('warning: log directory already exists {}'.format(log_dir)) mkdir(log_dir) return log_dir
def write_to_file(self, report, file_name): if report is not None: dirname = os.path.dirname(file_name) if dirname: mkdir(dirname) with open(file_name, "w") as f: f.write(report)
def write_to_file(self, report, file_name): if report is not None: dirname = os.path.dirname(file_name) if dirname: mkdir(dirname) with open(file_name, 'w') as f: f.write(report)
def test_export(toolchain, target, expected_error=None): if toolchain is None and target is None: base_dir = join(EXPORT_TMP, "zip") else: base_dir = join(EXPORT_TMP, toolchain, target) temp_dir = join(base_dir, "temp") mkdir(temp_dir) zip_path, report = export(USER_PRJ, USR_PRJ_NAME, toolchain, target, base_dir, temp_dir, False, None, fake_build_url_resolver) if report['success']: move(zip_path, join(EXPORT_DIR, "export_%s_%s.zip" % (toolchain, target))) print "[OK]" else: if expected_error is None: print '[ERRROR] %s' % report['errormsg'] else: if (zip_path is None) and (expected_error in report['errormsg']): print '[OK]' else: print '[ERROR]' print ' zip:', zip_path print ' msg:', report['errormsg']
def setup_project(mcu, ide, program=None, source_dir=None, build=None): # Some libraries have extra macros (called by exporter symbols) to we need to pass # them to maintain compilation macros integrity between compiled library and # header files we might use with it if source_dir: # --source is used to generate IDE files to toolchain directly in the source tree and doesn't generate zip file project_dir = source_dir project_name = TESTS[program] if program else "Unnamed_Project" project_temp = join(source_dir[0], 'projectfiles', '%s_%s' % (ide, mcu)) mkdir(project_temp) else: test = get_test(program) if not build: # Substitute the library builds with the sources # TODO: Substitute also the other library build paths if MBED_LIBRARIES in test.dependencies: test.dependencies.remove(MBED_LIBRARIES) test.dependencies.append(MBED_BASE) # Build the project with the same directory structure of the mbed online IDE project_name = test.id project_dir = [join(EXPORT_WORKSPACE, project_name)] project_temp = EXPORT_TMP setup_user_prj(project_dir[0], test.source_dir, test.dependencies) return project_dir, project_name, project_temp
def test_export(toolchain, target, expected_error=None): if toolchain is None and target is None: base_dir = join(EXPORT_TMP, "zip") else: base_dir = join(EXPORT_TMP, toolchain, target) temp_dir = join(base_dir, "temp") mkdir(temp_dir) zip_path, report = export( USER_PRJ, USR_PRJ_NAME, toolchain, target, base_dir, temp_dir, False, None, fake_build_url_resolver ) if report["success"]: move(zip_path, join(EXPORT_DIR, "export_%s_%s.zip" % (toolchain, target))) print "[OK]" else: if expected_error is None: print "[ERRROR] %s" % report["errormsg"] else: if (zip_path is None) and (expected_error in report["errormsg"]): print "[OK]" else: print "[ERROR]" print " zip:", zip_path print " msg:", report["errormsg"]
def modify_sample(instructions, save_dir="/tmp/adv_apks", proc_number=4, vb=True): """ Modify the APK based on the given instructions {apk_path:[meta_instruction1, ...], ...} :param instructions: a list of meta-instr (APK_path:Operator$$Comp$$Specific name$$count) :param save_dir: :return """ if not isinstance(instructions, (dict, defaultdict)): logger.error("Incorrect instrctions.\n") return 1 # step 1: data preparation apk_names = list(instructions.keys()) # abs path meta_instrs = list(instructions.values()) # step 2: disassembly tmp_work_dir = os.path.join("/tmp", "apk_disassembly") try: if not os.path.exists(tmp_work_dir): utils.mkdir(tmp_work_dir) disassemble_apks(apk_names, tmp_work_dir, proc_number, verbose=vb) except Exception as ex: logger.exception(str(ex)) logger.error("apk disassembly error: " + str(ex) + "\n") raise Exception("APK disassembly error: " + str(ex) + "\n") # step 3: modification modify_disassembly(apk_names, tmp_work_dir, meta_instrs, proc_number, verbose=vb) # step 4: assembly utils.mkdir(save_dir) try: new_apk_names = assemble_apks(apk_names, save_dir, tmp_work_dir, proc_number, verbose=vb) except Exception as ex: logger.exception(str(ex)) logger.error("apk assembly error: " + str(ex) + "\n") raise Exception("APK assembly error: " + str(ex) + "\n") # step 5: sign apks try: sign_apks(new_apk_names, proc_number, vb) except Exception as ex: logger.exception(str(ex)) logger.error("apk signing error: " + str(ex) + "\n") raise Exception("APK signing error: " + str(ex) + "\n") return 0 # execute successfully
def gen_file(self, template_file, data, target_file, **kwargs): """Generates a project file from a template using jinja""" target_text = self._gen_file_inner(template_file, data, target_file, **kwargs) target_path = self.gen_file_dest(target_file) mkdir(dirname(target_path)) logging.debug("Generating: %s", target_path) open(target_path, "w").write(target_text) self.generated_files += [target_path]
def relative_object_path(self, build_path, base_dir, source): source_dir, name, _ = split_path(source) obj_dir = join(build_path, relpath(source_dir, base_dir)) if obj_dir is not self.prev_dir: self.prev_dir = obj_dir mkdir(obj_dir) return join(obj_dir, name + '.o')
def scan_and_copy_resources(self, prj_paths, trg_path, relative=False): # Copy only the file for the required target and toolchain lib_builds = [] # Create the configuration object if isinstance(prj_paths, basestring): prj_paths = [prj_paths] config = Config(self.target, prj_paths) for src in ['lib', 'src']: resources = self.__scan_and_copy(join(prj_paths[0], src), trg_path) for path in prj_paths[1:]: resources.add(self.__scan_and_copy(join(path, src), trg_path)) lib_builds.extend(resources.lib_builds) # The repository files #for repo_dir in resources.repo_dirs: # repo_files = self.__scan_all(repo_dir) # for path in prj_paths: # self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src)) # The libraries builds for bld in lib_builds: build_url = open(bld).read().strip() lib_data = self.build_url_resolver(build_url) lib_path = lib_data['path'].rstrip('\\/') self.__scan_and_copy(lib_path, join(trg_path, lib_data['name'])) # Create .hg dir in mbed build dir so it's ignored when versioning hgdir = join(trg_path, lib_data['name'], '.hg') mkdir(hgdir) fhandle = file(join(hgdir, 'keep.me'), 'a') fhandle.close() if not relative: # Final scan of the actual exported resources resources = self.toolchain.scan_resources(trg_path) resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH) else: # use the prj_dir (source, not destination) resources = self.toolchain.scan_resources(prj_paths[0]) for path in prj_paths[1:]: resources.add(toolchain.scan_resources(path)) # Loads the resources into the config system which might expand/modify resources based on config data self.resources = config.load_resources(resources) if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED" ) and self.MBED_CONFIG_HEADER_SUPPORTED: # Add the configuration file to the target directory self.config_header = self.toolchain.MBED_CONFIG_FILE_NAME config.get_config_data_header(join(trg_path, self.config_header)) self.config_macros = [] self.resources.inc_dirs.append(".") else: # And add the configuration macros to the toolchain self.config_macros = config.get_config_data_macros()
def scan_and_copy_resources(self, prj_paths, trg_path, relative=False): # Copy only the file for the required target and toolchain lib_builds = [] # Create the configuration object if isinstance(prj_paths, basestring): prj_paths = [prj_paths] config = Config(self.target, prj_paths) for src in ["lib", "src"]: resources = self.__scan_and_copy(join(prj_paths[0], src), trg_path) for path in prj_paths[1:]: resources.add(self.__scan_and_copy(join(path, src), trg_path)) lib_builds.extend(resources.lib_builds) # The repository files # for repo_dir in resources.repo_dirs: # repo_files = self.__scan_all(repo_dir) # for path in prj_paths: # self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src)) # The libraries builds for bld in lib_builds: build_url = open(bld).read().strip() lib_data = self.build_url_resolver(build_url) lib_path = lib_data["path"].rstrip("\\/") self.__scan_and_copy(lib_path, join(trg_path, lib_data["name"])) # Create .hg dir in mbed build dir so it's ignored when versioning hgdir = join(trg_path, lib_data["name"], ".hg") mkdir(hgdir) fhandle = file(join(hgdir, "keep.me"), "a") fhandle.close() if not relative: # Final scan of the actual exported resources resources = self.toolchain.scan_resources(trg_path) resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH) else: # use the prj_dir (source, not destination) resources = self.toolchain.scan_resources(prj_paths[0]) for path in prj_paths[1:]: resources.add(toolchain.scan_resources(path)) # Loads the resources into the config system which might expand/modify resources based on config data self.resources = config.load_resources(resources) if hasattr(self, "MBED_CONFIG_HEADER_SUPPORTED") and self.MBED_CONFIG_HEADER_SUPPORTED: # Add the configuration file to the target directory self.config_header = self.toolchain.MBED_CONFIG_FILE_NAME config.get_config_data_header(join(trg_path, self.config_header)) self.config_macros = [] self.resources.inc_dirs.append(".") else: # And add the configuration macros to the toolchain self.config_macros = config.get_config_data_macros()
def main(args): # define experiment settings args.savedir = os.path.join(args.savedir, 'exp-{}'.format(time.strftime("%Y%m%d-%H%M%S"))) utils.mkdir(args.savedir) # define logger log_format = '%(asctime)s %(message)s' logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=log_format, datefmt='%Y/%m/%d %I:%M:%S %p') fh = logging.FileHandler(os.path.join(args.savedir, 'log.txt')) fh.setFormatter(logging.Formatter(log_format)) logger = logging.getLogger() logger.addHandler(fh)
def copy_folder(src, dest): """ Copy contents of folder in mbed-os listed path Args: src - src folder path dest - destination folder path """ files = os.listdir(src) for file in files: abs_src_file = join(src, file) if os.path.isfile(abs_src_file): abs_dst_file = join(dest, file) mkdir(dirname(abs_dst_file)) copy_file(abs_src_file, abs_dst_file)
def copy_folder(src, dst): """ Copy contents of folder in mbed-os listed path. :param src: Source folder path. :param dst: Destination folder path. :return: None. """ files = os.listdir(src) for f in files: abs_src_file = join(src, f) if isfile(abs_src_file): abs_dst_file = join(dst, f) mkdir(dirname(abs_dst_file)) copy_file(abs_src_file, abs_dst_file)
def setup_user_prj(user_dir, prj_path, lib_paths=None): """ Setup a project with the same directory structure of the mbed online IDE """ mkdir(user_dir) # Project Path copy_tree(prj_path, join(user_dir, "src")) # Project Libraries user_lib = join(user_dir, "lib") mkdir(user_lib) if lib_paths is not None: for lib_path in lib_paths: copy_tree(lib_path, join(user_lib, basename(lib_path)))
def assemble(self, source, object, includes): # Preprocess first, then assemble dir = join(dirname(object), '.temp') mkdir(dir) tempfile = join(dir, basename(object) + '.E.s') # Build preprocess assemble command cmd_pre = copy(self.asm) cmd_pre.extend(self.get_compile_options( self.get_symbols(True), includes, True)) cmd_pre.extend(["-E", "-o", tempfile, source]) # Build main assemble command cmd = self.asm + ["-o", object, tempfile] # Return command array, don't execute return [cmd_pre, cmd]
def scan_and_copy_resources(self, prj_paths, trg_path, relative=False): # Copy only the file for the required target and toolchain lib_builds = [] # Create the configuration object cfg = Config(self.target, prj_paths) for src in ['lib', 'src']: resources = reduce(add, [ self.__scan_and_copy(join(path, src), trg_path) for path in prj_paths ]) lib_builds.extend(resources.lib_builds) # The repository files for repo_dir in resources.repo_dirs: repo_files = self.__scan_all(repo_dir) for path in proj_paths: self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src)) # The libraries builds for bld in lib_builds: build_url = open(bld).read().strip() lib_data = self.build_url_resolver(build_url) lib_path = lib_data['path'].rstrip('\\/') self.__scan_and_copy(lib_path, join(trg_path, lib_data['name'])) # Create .hg dir in mbed build dir so it's ignored when versioning hgdir = join(trg_path, lib_data['name'], '.hg') mkdir(hgdir) fhandle = file(join(hgdir, 'keep.me'), 'a') fhandle.close() if not relative: # Final scan of the actual exported resources self.resources = self.toolchain.scan_resources(trg_path) self.resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH) else: # use the prj_dir (source, not destination) self.resources = reduce( add, [self.toolchain.scan_resources(path) for path in prj_paths]) # Add all JSON files discovered during scanning to the configuration object cfg.add_config_files(self.resources.json_files) # Get data from the configuration system self.config_macros = cfg.get_config_data_macros()
def assemble(self, source, object, includes): # Preprocess first, then assemble dir = join(dirname(object), '.temp') mkdir(dir) tempfile = join(dir, basename(object) + '.E.s') # Build preprocess assemble command cmd_pre = self.asm + self.get_compile_options(self.get_symbols(True), includes) + ["-E", "-o", tempfile, source] # Build main assemble command cmd = self.asm + ["-o", object, tempfile] # Call cmdline hook cmd_pre = self.hook.get_cmdline_assembler(cmd_pre) cmd = self.hook.get_cmdline_assembler(cmd) # Return command array, don't execute return [cmd_pre, cmd]
def scan_and_copy_resources(self, prj_paths, trg_path, relative=False): # Copy only the file for the required target and toolchain lib_builds = [] # Create the configuration object config = Config(self.target, prj_paths) for src in ['lib', 'src']: resources = reduce(add, [self.__scan_and_copy(join(path, src), trg_path) for path in prj_paths]) lib_builds.extend(resources.lib_builds) # The repository files for repo_dir in resources.repo_dirs: repo_files = self.__scan_all(repo_dir) for path in proj_paths : self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src)) # The libraries builds for bld in lib_builds: build_url = open(bld).read().strip() lib_data = self.build_url_resolver(build_url) lib_path = lib_data['path'].rstrip('\\/') self.__scan_and_copy(lib_path, join(trg_path, lib_data['name'])) # Create .hg dir in mbed build dir so it's ignored when versioning hgdir = join(trg_path, lib_data['name'], '.hg') mkdir(hgdir) fhandle = file(join(hgdir, 'keep.me'), 'a') fhandle.close() if not relative: # Final scan of the actual exported resources resources = self.toolchain.scan_resources(trg_path) resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH) else: # use the prj_dir (source, not destination) resources = self.toolchain.scan_resources(prj_paths[0]) for path in prj_paths[1:]: resources.add(toolchain.scan_resources(path)) # Loads the resources into the config system which might expand/modify resources based on config data self.resources = config.load_resources(resources) # And add the configuration macros to the toolchain self.config_macros = config.get_config_data_macros()
def copy_files(self, files_paths, trg_path, rel_path=None): # Handle a single file if type(files_paths) != ListType: files_paths = [files_paths] for source in files_paths: if source is None: files_paths.remove(source) for source in files_paths: if rel_path is not None: relative_path = relpath(source, rel_path) else: _, relative_path = split(source) target = join(trg_path, relative_path) if (target != source) and (self.need_update(target, [source])): self.progress("copy", relative_path) mkdir(dirname(target)) copyfile(source, target)
def link_program(self, r, tmp_path, name): needed_update = False ext = "bin" if hasattr(self.target, "OUTPUT_EXT"): ext = self.target.OUTPUT_EXT if hasattr(self.target, "OUTPUT_NAMING"): self.var("binary_naming", self.target.OUTPUT_NAMING) if self.target.OUTPUT_NAMING == "8.3": name = name[0:8] ext = ext[0:3] # Create destination directory head, tail = split(name) new_path = join(tmp_path, head) mkdir(new_path) filename = name + "." + ext elf = join(tmp_path, name + ".elf") bin = join(tmp_path, filename) map = join(tmp_path, name + ".map") if self.need_update(elf, r.objects + r.libraries + [r.linker_script]): needed_update = True self.progress("link", name) self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script) if self.need_update(bin, [elf]): needed_update = True self.progress("elf2bin", name) self.binary(r, elf, bin) self.mem_stats(map) self.var("compile_succeded", True) self.var("binary", filename) return bin, needed_update
def link_program(self, r, tmp_path, name): needed_update = False ext = 'bin' if hasattr(self.target, 'OUTPUT_EXT'): ext = self.target.OUTPUT_EXT if hasattr(self.target, 'OUTPUT_NAMING'): self.var("binary_naming", self.target.OUTPUT_NAMING) if self.target.OUTPUT_NAMING == "8.3": name = name[0:8] ext = ext[0:3] # Create destination directory head, tail = split(name) new_path = join(tmp_path, head) mkdir(new_path) filename = name + '.' + ext elf = join(tmp_path, name + '.elf') bin = join(tmp_path, filename) map = join(tmp_path, name + '.map') if self.need_update(elf, r.objects + r.libraries + [r.linker_script]): needed_update = True self.progress("link", name) self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script) if self.need_update(bin, [elf]): needed_update = True self.progress("elf2bin", name) self.binary(r, elf, bin) self.mem_stats(map) self.var("compile_succeded", True) self.var("binary", filename) return bin, needed_update
def import_files(repo_path, data_files, data_folders): """ Imports files and directories to mbed-os :param repo_path: Path to the repo copying from. :param data_files: List of files to be copied. (with destination) :param data_folders: List of directories to be copied. (with destination) :return: None """ # Remove all files listed in .json from mbed-os repo to avoid duplications for fh in data_files: src_file = fh['src_file'] del_file(os.path.basename(src_file)) dest_file = join(ROOT, fh['dest_file']) if isfile(dest_file): os.remove(join(ROOT, dest_file)) rel_log.debug("Deleted %s", fh['dest_file']) for folder in data_folders: dest_folder = folder['dest_folder'] delete_dir_files(dest_folder) rel_log.debug("Deleted: %s", folder['dest_folder']) rel_log.info("Removed files/folders listed in json file") # Copy all the files listed in json file to mbed-os for fh in data_files: repo_file = join(repo_path, fh['src_file']) mbed_path = join(ROOT, fh['dest_file']) mkdir(dirname(mbed_path)) copy_file(repo_file, mbed_path) rel_log.debug("Copied %s to %s", normpath(repo_file), normpath(mbed_path)) for folder in data_folders: repo_folder = join(repo_path, folder['src_folder']) mbed_path = join(ROOT, folder['dest_folder']) copy_folder(repo_folder, mbed_path) rel_log.debug("Copied %s to %s", normpath(repo_folder), normpath(mbed_path))
def build_library(src_paths, build_path, target, toolchain_name, dependencies_paths=None, options=None, name=None, clean=False, archive=True, notify=None, verbose=False, macros=None, inc_dirs=None, inc_dirs_ext=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False, project_id=None): """ src_path: the path of the source directory build_path: the path of the build directory target: ['LPC1768', 'LPC11U24', 'LPC2368'] toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR'] library_paths: List of paths to additional libraries clean: Rebuild everything if True notify: Notify function for logs verbose: Write the actual tools command lines if True inc_dirs: additional include directories which should be included in build inc_dirs_ext: additional include directories which should be copied to library directory """ if type(src_paths) != ListType: src_paths = [src_paths] # The first path will give the name to the library project_name = basename(src_paths[0] if src_paths[0] != "." and src_paths[0] != "./" else getcwd()) if name is None: # We will use default project name based on project folder name name = project_name if report != None: start = time() # If project_id is specified, use that over the default name id_name = project_id.upper() if project_id else name.upper() description = name vendor_label = target.extra_labels[0] cur_result = None prep_report(report, target.name, toolchain_name, id_name) cur_result = create_result(target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, target.name, toolchain_name, vendor_label) for src_path in src_paths: if not exists(src_path): error_msg = "The library source folder does not exist: %s", src_path if report != None: cur_result["output"] = error_msg cur_result["result"] = "FAIL" add_result_to_report(report, cur_result) raise Exception(error_msg) try: # Toolchain instance toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose) toolchain.VERBOSE = verbose toolchain.jobs = jobs toolchain.build_all = clean toolchain.info("Building library %s (%s, %s)" % (name, target.name, toolchain_name)) # Scan Resources resources = None for path in src_paths: # Scan resources resource = toolchain.scan_resources(path) # Copy headers, objects and static libraries - all files needed for static lib toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path) toolchain.copy_files(resource.objects, build_path, rel_path=resource.base_path) toolchain.copy_files(resource.libraries, build_path, rel_path=resource.base_path) if resource.linker_script: toolchain.copy_files(resource.linker_script, build_path, rel_path=resource.base_path) # Extend resources collection if not resources: resources = resource else: resources.add(resource) # We need to add if necessary additional include directories if inc_dirs: if type(inc_dirs) == ListType: resources.inc_dirs.extend(inc_dirs) else: resources.inc_dirs.append(inc_dirs) # Add extra include directories / files which are required by library # This files usually are not in the same directory as source files so # previous scan will not include them if inc_dirs_ext is not None: for inc_ext in inc_dirs_ext: resources.add(toolchain.scan_resources(inc_ext)) # Dependencies Include Paths if dependencies_paths is not None: for path in dependencies_paths: lib_resources = toolchain.scan_resources(path) resources.inc_dirs.extend(lib_resources.inc_dirs) if archive: # Use temp path when building archive tmp_path = join(build_path, '.temp') mkdir(tmp_path) else: tmp_path = build_path # Handle configuration config = Config(target) # Update the configuration with any .json files found while scanning config.add_config_files(resources.json_files) # And add the configuration macros to the toolchain toolchain.add_macros(config.get_config_data_macros()) # Compile Sources for path in src_paths: src = toolchain.scan_resources(path) objects = toolchain.compile_sources(src, abspath(tmp_path), resources.inc_dirs) resources.objects.extend(objects) if archive: toolchain.build_library(objects, build_path, name) if report != None: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() cur_result["result"] = "OK" add_result_to_report(report, cur_result) except Exception, e: if report != None: end = time() if isinstance(e, ToolException): cur_result["result"] = "FAIL" elif isinstance(e, NotSupportedException): cur_result["result"] = "NOT_SUPPORTED" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output add_result_to_report(report, cur_result) # Let Exception propagate raise e
def build_lib(lib_id, target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False): """ Legacy method for building mbed libraries Function builds library in proper directory using all dependencies and macros defined by user. """ lib = Library(lib_id) if not lib.is_supported(target, toolchain_name): print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain) return False # We need to combine macros from parameter list with macros from library definition MACROS = lib.macros if lib.macros else [] if macros: macros.extend(MACROS) else: macros = MACROS src_paths = lib.source_dir build_path = lib.build_dir dependencies_paths = lib.dependencies inc_dirs = lib.inc_dirs inc_dirs_ext = lib.inc_dirs_ext """ src_path: the path of the source directory build_path: the path of the build directory target: ['LPC1768', 'LPC11U24', 'LPC2368'] toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR'] library_paths: List of paths to additional libraries clean: Rebuild everything if True notify: Notify function for logs verbose: Write the actual tools command lines if True inc_dirs: additional include directories which should be included in build inc_dirs_ext: additional include directories which should be copied to library directory """ if type(src_paths) != ListType: src_paths = [src_paths] # The first path will give the name to the library name = basename(src_paths[0]) if report != None: start = time() id_name = name.upper() description = name vendor_label = target.extra_labels[0] cur_result = None prep_report(report, target.name, toolchain_name, id_name) cur_result = create_result(target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, target.name, toolchain_name, vendor_label) for src_path in src_paths: if not exists(src_path): error_msg = "The library source folder does not exist: %s", src_path if report != None: cur_result["output"] = error_msg cur_result["result"] = "FAIL" add_result_to_report(report, cur_result) raise Exception(error_msg) try: # Toolchain instance toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose) toolchain.VERBOSE = verbose toolchain.jobs = jobs toolchain.build_all = clean toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name)) # Scan Resources resources = [] for src_path in src_paths: resources.append(toolchain.scan_resources(src_path)) # Add extra include directories / files which are required by library # This files usually are not in the same directory as source files so # previous scan will not include them if inc_dirs_ext is not None: for inc_ext in inc_dirs_ext: resources.append(toolchain.scan_resources(inc_ext)) # Dependencies Include Paths dependencies_include_dir = [] if dependencies_paths is not None: for path in dependencies_paths: lib_resources = toolchain.scan_resources(path) dependencies_include_dir.extend(lib_resources.inc_dirs) if inc_dirs: dependencies_include_dir.extend(inc_dirs) # Create the desired build directory structure bin_path = join(build_path, toolchain.obj_path) mkdir(bin_path) tmp_path = join(build_path, '.temp', toolchain.obj_path) mkdir(tmp_path) # Copy Headers for resource in resources: toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path) dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs) # Compile Sources objects = [] for resource in resources: objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir)) needed_update = toolchain.build_library(objects, bin_path, name) if report != None and needed_update: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() cur_result["result"] = "OK" add_result_to_report(report, cur_result) except Exception, e: if report != None: end = time() cur_result["result"] = "FAIL" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output add_result_to_report(report, cur_result) # Let Exception propagate raise e
def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False): """ Function returns True is library was built and false if building was skipped """ if report != None: start = time() id_name = "MBED" description = "mbed SDK" vendor_label = target.extra_labels[0] cur_result = None prep_report(report, target.name, toolchain_name, id_name) cur_result = create_result(target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, target.name, toolchain_name, vendor_label) # Check toolchain support if toolchain_name not in target.supported_toolchains: supported_toolchains_text = ", ".join(target.supported_toolchains) print '%s target is not yet supported by toolchain %s' % ( target.name, toolchain_name) print '%s target supports %s toolchain%s' % ( target.name, supported_toolchains_text, 's' if len(target.supported_toolchains) > 1 else '') if report != None: cur_result["result"] = "SKIP" add_result_to_report(report, cur_result) return False try: # Toolchain toolchain = TOOLCHAIN_CLASSES[toolchain_name]( target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose) toolchain.VERBOSE = verbose toolchain.jobs = jobs toolchain.build_all = clean # Source and Build Paths BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name) BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name) mkdir(BUILD_TOOLCHAIN) TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path) mkdir(TMP_PATH) # CMSIS toolchain.info("Building library %s (%s, %s)" % ('CMSIS', target.name, toolchain_name)) cmsis_src = join(MBED_TARGETS_PATH, "cmsis") resources = toolchain.scan_resources(cmsis_src) toolchain.copy_files(resources.headers, BUILD_TARGET) toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN) toolchain.copy_files(resources.bin_files, BUILD_TOOLCHAIN) objects = toolchain.compile_sources(resources, TMP_PATH) toolchain.copy_files(objects, BUILD_TOOLCHAIN) # mbed toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name)) # Common Headers toolchain.copy_files( toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES) toolchain.copy_files( toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES) # Target specific sources HAL_SRC = join(MBED_TARGETS_PATH, "hal") hal_implementation = toolchain.scan_resources(HAL_SRC) toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries, BUILD_TARGET, resources=hal_implementation) incdirs = toolchain.scan_resources(BUILD_TARGET).inc_dirs objects = toolchain.compile_sources(hal_implementation, TMP_PATH, [MBED_LIBRARIES] + incdirs) # Common Sources mbed_resources = toolchain.scan_resources(MBED_COMMON) objects += toolchain.compile_sources(mbed_resources, TMP_PATH, [MBED_LIBRARIES] + incdirs) # A number of compiled files need to be copied as objects as opposed to # being part of the mbed library, for reasons that have to do with the way # the linker search for symbols in archives. These are: # - retarget.o: to make sure that the C standard lib symbols get overridden # - board.o: mbed_die is weak # - mbed_overrides.o: this contains platform overrides of various weak SDK functions separate_names, separate_objects = [ 'retarget.o', 'board.o', 'mbed_overrides.o' ], [] for o in objects: for name in separate_names: if o.endswith(name): separate_objects.append(o) for o in separate_objects: objects.remove(o) toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed") for o in separate_objects: toolchain.copy_files(o, BUILD_TOOLCHAIN) if report != None: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() cur_result["result"] = "OK" add_result_to_report(report, cur_result) return True except Exception, e: if report != None: end = time() cur_result["result"] = "FAIL" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output cur_result["output"] += str(e) add_result_to_report(report, cur_result) # Let Exception propagate raise e
def build_project(src_path, build_path, target, toolchain_name, libraries_paths=None, options=None, linker_script=None, clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None, jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None, extra_verbose=False, config=None): """ This function builds project. Project can be for example one test / UT """ # Convert src_path to a list if needed src_paths = [src_path] if type(src_path) != ListType else src_path # We need to remove all paths which are repeated to avoid # multiple compilations and linking with the same objects src_paths = [src_paths[0]] + list(set(src_paths[1:])) first_src_path = src_paths[0] if src_paths[0] != "." and src_paths[0] != "./" else getcwd() abs_path = abspath(first_src_path) project_name = basename(normpath(abs_path)) # If the configuration object was not yet created, create it now config = config or Config(target, src_paths) # If the 'target' argument is a string, convert it to a target instance if isinstance(target, str): try: target = TARGET_MAP[target] except KeyError: raise KeyError("Target '%s' not found" % target) # Toolchain instance try: toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose) except KeyError as e: raise KeyError("Toolchain %s not supported" % toolchain_name) toolchain.VERBOSE = verbose toolchain.jobs = jobs toolchain.build_all = clean if name is None: # We will use default project name based on project folder name name = project_name toolchain.info("Building project %s (%s, %s)" % (project_name, target.name, toolchain_name)) else: # User used custom global project name to have the same name for the toolchain.info("Building project %s to %s (%s, %s)" % (project_name, name, target.name, toolchain_name)) if report != None: start = time() # If project_id is specified, use that over the default name id_name = project_id.upper() if project_id else name.upper() description = project_description if project_description else name vendor_label = target.extra_labels[0] cur_result = None prep_report(report, target.name, toolchain_name, id_name) cur_result = create_result(target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, target.name, toolchain_name, vendor_label) try: # Scan src_path and libraries_paths for resources resources = toolchain.scan_resources(src_paths[0]) for path in src_paths[1:]: resources.add(toolchain.scan_resources(path)) if libraries_paths is not None: src_paths.extend(libraries_paths) for path in libraries_paths: resources.add(toolchain.scan_resources(path)) if linker_script is not None: resources.linker_script = linker_script # Build Directory if clean: if exists(build_path): rmtree(build_path) mkdir(build_path) # We need to add if necessary additional include directories if inc_dirs: if type(inc_dirs) == ListType: resources.inc_dirs.extend(inc_dirs) else: resources.inc_dirs.append(inc_dirs) # Update the configuration with any .json files found while scanning config.add_config_files(resources.json_files) # And add the configuration macros to the toolchain toolchain.add_macros(config.get_config_data_macros()) # Compile Sources for path in src_paths: src = toolchain.scan_resources(path) objects = toolchain.compile_sources(src, build_path, resources.inc_dirs) resources.objects.extend(objects) # Link Program res, _ = toolchain.link_program(resources, build_path, name) if report != None: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() cur_result["result"] = "OK" add_result_to_report(report, cur_result) return res except Exception, e: if report != None: end = time() if isinstance(e, NotSupportedException): cur_result["result"] = "NOT_SUPPORTED" else: cur_result["result"] = "FAIL" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output add_result_to_report(report, cur_result) # Let Exception propagate raise e
def attack(self): save_dir = cfg.config.get('attack', self.attack_method_name) if not os.path.exists(save_dir): utils.mkdir(save_dir) perturbations = None pristine_feature_vec = None adv_feature_vec = None labels = self.gt_labels try: pristine_feature_vec, adv_feature_vec, labels = self.generate_perturbations( ) save_path = os.path.join( save_dir, "pristine_{}.data".format( method_params_dict[self.attack_method_name].get('ord', ''))) utils.dumpdata_np(pristine_feature_vec, save_path) save_path = cfg.config.get('attack', 'advX') utils.dumpdata_np(adv_feature_vec, save_path) # backup save_path = os.path.join( save_dir, "{}_{}.data".format( self.attack_method_name, method_params_dict[self.attack_method_name].get('ord', ''))) utils.dumpdata_np(adv_feature_vec, save_path) save_path = os.path.join( save_dir, "{}_{}.label".format( self.attack_method_name, method_params_dict[self.attack_method_name].get('ord', ''))) utils.dumpdata_np(labels, save_path) if self.feature_reverser.normalizer is not None: perturbations = utils.normalize_inverse(adv_feature_vec, self.feature_reverser.normalizer) - \ utils.normalize_inverse(pristine_feature_vec, self.feature_reverser.normalizer) else: perturbations = adv_feature_vec - pristine_feature_vec except Exception as ex: logger.exception(ex) logger.error(str(ex)) logger.error("Failed to generate perturbations.") return 1 if perturbations is None: adv_feat_save_dir = cfg.config.get('attack', self.attack_method_name) adv_data_path = os.path.join( adv_feat_save_dir, '{}_{}.data'.format( self.attack_method_name, method_params_dict[self.attack_method_name].get('ord', ''))) pris_data_path = os.path.join( adv_feat_save_dir, "pristine_{}.data".format( method_params_dict[self.attack_method_name].get('ord', ''))) if os.path.exists(adv_data_path) and os.path.exists( pris_data_path): adv_feature_vec = utils.readdata_np(adv_data_path) pristine_feature_vec = utils.readdata_np(pris_data_path) else: raise ValueError("No perturbations.") if self.feature_reverser.normalizer is not None: perturbations = utils.normalize_inverse(adv_feature_vec, self.feature_reverser.normalizer) - \ utils.normalize_inverse(pristine_feature_vec, self.feature_reverser.normalizer) else: perturbations = adv_feature_vec - pristine_feature_vec logger.warn( "Perturbations generated from snapshot with degree {:.5f}". format(np.mean(np.sum(np.abs(perturbations), axis=1)))) if not self.is_smaple_level: # collect info. # (1) scale of perturbations perturbations_amount_l0 = np.mean( np.sum(np.abs(perturbations) > 1e-6, axis=1)) perturbations_amount_l1 = np.mean( np.sum(np.abs(perturbations), axis=1)) perturbations_amount_l2 = np.mean( np.sqrt(np.sum(np.square(perturbations), axis=1))) msg = "Average scale of perturbations on adversarial feature vector measured by l0 norm {:.5f}, l1 norm {:.5f}, l2 norm {:.5f}" print( msg.format(perturbations_amount_l0, perturbations_amount_l1, perturbations_amount_l2)) logger.info( msg.format(perturbations_amount_l0, perturbations_amount_l1, perturbations_amount_l2)) # (2) accuracy on pristine feature vector and perturbed feature vector acc_prist = self.targeted_model.test_rpst(pristine_feature_vec, self.gt_labels, is_single_class=True) print("Accuracy on pristine features:", acc_prist) logger.info( "Accuracy on pristine features:{:.5f}".format(acc_prist)) acc_pert = self.targeted_model.test_rpst(adv_feature_vec, labels, is_single_class=True) print("Accuracy on perturbed features:", acc_pert) logger.info( "Accuracy on perturbed features:{:.5f}".format(acc_pert)) else: try: save_dir = os.path.join(save_dir, 'adv_apks') adv_features, perturbations = \ self.generate_exc_malware_sample(perturbations, save_dir) test_adv_dir = cfg.config.get('attack', 'adv_sample_dir') if os.path.exists(test_adv_dir): shutil.rmtree(test_adv_dir, ignore_errors=True) shutil.copytree(save_dir, cfg.config.get('attack', 'adv_sample_dir')) except Exception as ex: logger.error(str(ex)) logger.exception(ex) logger.error("Failed to modify the APKs.") return 2 # we dump the apk information here. # If the malicious functionality should be checked, please run ./oracle/run_oracle.py # self.estimate_functionality(save_dir) # todo # collect info. # (1) scale of perturbations perturbations_amount_l0 = np.mean( np.sum(np.abs(perturbations) > 1e-6, axis=1)) perturbations_amount_l1 = np.mean( np.sum(np.abs(perturbations), axis=1)) perturbations_amount_l2 = np.mean( np.sqrt(np.sum(np.square(perturbations), axis=1))) msg = "Average scale of perturbations on adversarial feature vector measured by l0 norm {:.5f}, l1 norm {:.5f}, l2 norm {:.5f}" print( msg.format(perturbations_amount_l0, perturbations_amount_l1, perturbations_amount_l2)) logger.info( msg.format(perturbations_amount_l0, perturbations_amount_l1, perturbations_amount_l2)) # (2) accuracy on pristine feature vector and perturbed feature vector acc_prinst = self.targeted_model.test_rpst(pristine_feature_vec, self.gt_labels, is_single_class=True) print("Accuracy on pristine features:", acc_prinst) logger.info( "Accuracy on pristine features:{:.5f}".format(acc_prinst)) acc_pert = self.targeted_model.test_rpst(adv_feature_vec, labels, is_single_class=True) print("Accuracy on perturbed features:", acc_pert) logger.info( "Accuracy on perturbed features:{:.5f}".format(acc_pert)) # (3) perturbations and accuracy on adversarial malware samples if adv_features is None: adv_apk_names = os.listdir(save_dir) adv_apk_paths = [ os.path.join(save_dir, name) for name in adv_apk_names ] adv_features = self.targeted_model.feature_extraction( adv_apk_paths) utils.dumpdata_np(adv_features, cfg.config.get('attack', 'radvX')) perturbations = adv_features - pristine_feature_vec perturbations_amount_l0 = np.mean( np.sum(np.abs(perturbations) > 1e-6, axis=1)) perturbations_amount_l1 = np.mean( np.sum(np.abs(perturbations), axis=1)) perturbations_amount_l2 = np.mean( np.sqrt(np.sum(np.square(perturbations), axis=1))) msg = "Average scale of perturbations on adversarial malware measured by l0 norm {:.5f}, l1 norm {:.5f}, l2 norm {:.5f}" print( msg.format(perturbations_amount_l0, perturbations_amount_l1, perturbations_amount_l2)) logger.info( msg.format(perturbations_amount_l0, perturbations_amount_l1, perturbations_amount_l2)) acc_adv_mal = self.targeted_model.test_rpst(adv_features, self.gt_labels, is_single_class=True) print("Accuracy on adversarial malware samples:", acc_adv_mal) logger.info( "Accuracy on adversarial malware samples:{:.5f}".format( acc_adv_mal)) return 0
def build_lib(lib_id, target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False): """ Legacy method for building mbed libraries Function builds library in proper directory using all dependencies and macros defined by user. """ lib = Library(lib_id) if not lib.is_supported(target, toolchain_name): print 'Library "%s" is not yet supported on target %s with toolchain %s' % ( lib_id, target.name, toolchain) return False # We need to combine macros from parameter list with macros from library definition MACROS = lib.macros if lib.macros else [] if macros: macros.extend(MACROS) else: macros = MACROS src_paths = lib.source_dir build_path = lib.build_dir dependencies_paths = lib.dependencies inc_dirs = lib.inc_dirs inc_dirs_ext = lib.inc_dirs_ext """ src_path: the path of the source directory build_path: the path of the build directory target: ['LPC1768', 'LPC11U24', 'LPC2368'] toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR'] library_paths: List of paths to additional libraries clean: Rebuild everything if True notify: Notify function for logs verbose: Write the actual tools command lines if True inc_dirs: additional include directories which should be included in build inc_dirs_ext: additional include directories which should be copied to library directory """ if type(src_paths) != ListType: src_paths = [src_paths] # The first path will give the name to the library name = basename(src_paths[0]) if report != None: start = time() id_name = name.upper() description = name vendor_label = target.extra_labels[0] cur_result = None prep_report(report, target.name, toolchain_name, id_name) cur_result = create_result(target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, target.name, toolchain_name, vendor_label) for src_path in src_paths: if not exists(src_path): error_msg = "The library source folder does not exist: %s", src_path if report != None: cur_result["output"] = error_msg cur_result["result"] = "FAIL" add_result_to_report(report, cur_result) raise Exception(error_msg) try: # Toolchain instance toolchain = TOOLCHAIN_CLASSES[toolchain_name]( target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose) toolchain.VERBOSE = verbose toolchain.jobs = jobs toolchain.build_all = clean toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name)) # Scan Resources resources = [] for src_path in src_paths: resources.append(toolchain.scan_resources(src_path)) # Add extra include directories / files which are required by library # This files usually are not in the same directory as source files so # previous scan will not include them if inc_dirs_ext is not None: for inc_ext in inc_dirs_ext: resources.append(toolchain.scan_resources(inc_ext)) # Dependencies Include Paths dependencies_include_dir = [] if dependencies_paths is not None: for path in dependencies_paths: lib_resources = toolchain.scan_resources(path) dependencies_include_dir.extend(lib_resources.inc_dirs) if inc_dirs: dependencies_include_dir.extend(inc_dirs) # Create the desired build directory structure bin_path = join(build_path, toolchain.obj_path) mkdir(bin_path) tmp_path = join(build_path, '.temp', toolchain.obj_path) mkdir(tmp_path) # Copy Headers for resource in resources: toolchain.copy_files(resource.headers, build_path, resources=resource) dependencies_include_dir.extend( toolchain.scan_resources(build_path).inc_dirs) # Compile Sources objects = [] for resource in resources: objects.extend( toolchain.compile_sources(resource, tmp_path, dependencies_include_dir)) needed_update = toolchain.build_library(objects, bin_path, name) if report != None and needed_update: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() cur_result["result"] = "OK" add_result_to_report(report, cur_result) return True except Exception, e: if report != None: end = time() cur_result["result"] = "FAIL" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output add_result_to_report(report, cur_result) # Let Exception propagate raise e
def build_project(src_paths, build_path, target, toolchain_name, libraries_paths=None, linker_script=None, clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None, jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None, extra_verbose=False, config=None, app_config=None, build_profile=None): """ Build a project. A project may be a test or a user program. Positional arguments: src_paths - a path or list of paths that contain all files needed to build the project build_path - the directory where all of the object files will be placed target - the MCU or board that the project will compile for toolchain_name - the name of the build tools Keyword arguments: libraries_paths - The location of libraries to include when linking linker_script - the file that drives the linker to do it's job clean - Rebuild everything if True notify - Notify function for logs verbose - Write the actual tools command lines used if True name - the name of the project macros - additional macros inc_dirs - additional directories where include files may be found jobs - how many compilers we can run at once silent - suppress printing of progress indicators report - a dict where a result may be appended properties - UUUUHHHHH beats me project_id - the name put in the report project_description - the human-readable version of what this thing does extra_verbose - even more output! config - a Config object to use instead of creating one app_config - location of a chosen mbed_app.json file build_profile - a dict of flags that will be passed to the compiler """ # Convert src_path to a list if needed if type(src_paths) != ListType: src_paths = [src_paths] # Extend src_paths wiht libraries_paths if libraries_paths is not None: src_paths.extend(libraries_paths) inc_dirs.extend(map(dirname, libraries_paths)) # Build Directory if clean and exists(build_path): rmtree(build_path) mkdir(build_path) # Pass all params to the unified prepare_toolchain() toolchain = prepare_toolchain( src_paths, build_path, target, toolchain_name, macros=macros, clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose, config=config, app_config=app_config, build_profile=build_profile) # The first path will give the name to the library if name is None: name = basename(normpath(abspath(src_paths[0]))) toolchain.info("Building project %s (%s, %s)" % (name, toolchain.target.name, toolchain_name)) # Initialize reporting if report != None: start = time() # If project_id is specified, use that over the default name id_name = project_id.upper() if project_id else name.upper() description = project_description if project_description else name vendor_label = toolchain.target.extra_labels[0] prep_report(report, toolchain.target.name, toolchain_name, id_name) cur_result = create_result(toolchain.target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, toolchain.target.name, toolchain_name, vendor_label) try: # Call unified scan_resources resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) # Change linker script if specified if linker_script is not None: resources.linker_script = linker_script # Compile Sources objects = toolchain.compile_sources(resources, resources.inc_dirs) resources.objects.extend(objects) # Link Program if toolchain.config.has_regions: res, _ = toolchain.link_program(resources, build_path, name + "_application") region_list = list(toolchain.config.regions) region_list = [r._replace(filename=res) if r.active else r for r in region_list] res = join(build_path, name) + ".bin" merge_region_list(region_list, res) else: res, _ = toolchain.link_program(resources, build_path, name) memap_instance = getattr(toolchain, 'memap_instance', None) memap_table = '' if memap_instance: # Write output to stdout in text (pretty table) format memap_table = memap_instance.generate_output('table') if not silent: print memap_table # Write output to file in JSON format map_out = join(build_path, name + "_map.json") memap_instance.generate_output('json', map_out) # Write output to file in CSV format for the CI map_csv = join(build_path, name + "_map.csv") memap_instance.generate_output('csv-ci', map_csv) resources.detect_duplicates(toolchain) if report != None: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() + memap_table cur_result["result"] = "OK" cur_result["memory_usage"] = toolchain.map_outputs add_result_to_report(report, cur_result) return res except Exception as exc: if report != None: end = time() if isinstance(exc, NotSupportedException): cur_result["result"] = "NOT_SUPPORTED" else: cur_result["result"] = "FAIL" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output add_result_to_report(report, cur_result) # Let Exception propagate raise
def static_analysis_scan_library(src_paths, build_path, target, toolchain_name, cppcheck_cmd, cppcheck_msg_format, dependencies_paths=None, options=None, name=None, clean=False, notify=None, verbose=False, macros=None, jobs=1, extra_verbose=False): """ Function scans library (or just some set of sources/headers) for staticly detectable defects """ if type(src_paths) != ListType: src_paths = [src_paths] for src_path in src_paths: if not exists(src_path): raise Exception("The library source folder does not exist: %s", src_path) # Toolchain instance toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose) toolchain.VERBOSE = verbose toolchain.jobs = jobs # The first path will give the name to the library name = basename(src_paths[0]) toolchain.info("Static analysis for library %s (%s, %s)" % (name.upper(), target.name, toolchain_name)) # Scan Resources resources = [] for src_path in src_paths: resources.append(toolchain.scan_resources(src_path)) # Dependencies Include Paths dependencies_include_dir = [] if dependencies_paths is not None: for path in dependencies_paths: lib_resources = toolchain.scan_resources(path) dependencies_include_dir.extend(lib_resources.inc_dirs) # Create the desired build directory structure bin_path = join(build_path, toolchain.obj_path) mkdir(bin_path) tmp_path = join(build_path, '.temp', toolchain.obj_path) mkdir(tmp_path) # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line includes = ["-I%s" % i for i in dependencies_include_dir + src_paths] c_sources = " " cpp_sources = " " macros = ['-D%s' % s for s in toolchain.get_symbols() + toolchain.macros] # Copy Headers for resource in resources: toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path) includes += ["-I%s" % i for i in resource.inc_dirs] c_sources += " ".join(resource.c_sources) + " " cpp_sources += " ".join(resource.cpp_sources) + " " dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs) includes = map(str.strip, includes) macros = map(str.strip, macros) check_cmd = cppcheck_cmd check_cmd += cppcheck_msg_format check_cmd += includes check_cmd += macros # We need to pass some parameters via file to avoid "command line too long in some OSs" # Temporary file is created to store e.g. cppcheck list of files for command line tmp_file = tempfile.NamedTemporaryFile(delete=False) tmp_file.writelines(line + '\n' for line in c_sources.split()) tmp_file.writelines(line + '\n' for line in cpp_sources.split()) tmp_file.close() check_cmd += ["--file-list=%s"% tmp_file.name] # This will allow us to grab result from both stdio and stderr outputs (so we can show them) # We assume static code analysis tool is outputting defects on STDERR _stdout, _stderr, _rc = run_cmd_ext(check_cmd) if verbose: print _stdout print _stderr
def build_library(src_paths, build_path, target, toolchain_name, dependencies_paths=None, name=None, clean=False, archive=True, notify=None, verbose=False, macros=None, inc_dirs=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False, project_id=None, remove_config_header_file=False, app_config=None, build_profile=None): """ Build a library Positional arguments: src_paths - a path or list of paths that contain all files needed to build the library build_path - the directory where all of the object files will be placed target - the MCU or board that the project will compile for toolchain_name - the name of the build tools Keyword arguments: dependencies_paths - The location of libraries to include when linking name - the name of the library clean - Rebuild everything if True archive - whether the library will create an archive file notify - Notify function for logs verbose - Write the actual tools command lines used if True macros - additional macros inc_dirs - additional directories where include files may be found jobs - how many compilers we can run at once silent - suppress printing of progress indicators report - a dict where a result may be appended properties - UUUUHHHHH beats me extra_verbose - even more output! project_id - the name that goes in the report remove_config_header_file - delete config header file when done building app_config - location of a chosen mbed_app.json file build_profile - a dict of flags that will be passed to the compiler """ # Convert src_path to a list if needed if type(src_paths) != ListType: src_paths = [src_paths] # Build path if archive: # Use temp path when building archive tmp_path = join(build_path, '.temp') mkdir(tmp_path) else: tmp_path = build_path # Clean the build directory if clean and exists(tmp_path): rmtree(tmp_path) mkdir(tmp_path) # Pass all params to the unified prepare_toolchain() toolchain = prepare_toolchain( src_paths, build_path, target, toolchain_name, macros=macros, clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose, app_config=app_config, build_profile=build_profile) # The first path will give the name to the library if name is None: name = basename(normpath(abspath(src_paths[0]))) toolchain.info("Building library %s (%s, %s)" % (name, toolchain.target.name, toolchain_name)) # Initialize reporting if report != None: start = time() # If project_id is specified, use that over the default name id_name = project_id.upper() if project_id else name.upper() description = name vendor_label = toolchain.target.extra_labels[0] prep_report(report, toolchain.target.name, toolchain_name, id_name) cur_result = create_result(toolchain.target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, toolchain.target.name, toolchain_name, vendor_label) for src_path in src_paths: if not exists(src_path): error_msg = "The library source folder does not exist: %s", src_path if report != None: cur_result["output"] = error_msg cur_result["result"] = "FAIL" add_result_to_report(report, cur_result) raise Exception(error_msg) try: # Call unified scan_resources resources = scan_resources(src_paths, toolchain, dependencies_paths=dependencies_paths, inc_dirs=inc_dirs) # Copy headers, objects and static libraries - all files needed for # static lib toolchain.copy_files(resources.headers, build_path, resources=resources) toolchain.copy_files(resources.objects, build_path, resources=resources) toolchain.copy_files(resources.libraries, build_path, resources=resources) toolchain.copy_files(resources.json_files, build_path, resources=resources) if resources.linker_script: toolchain.copy_files(resources.linker_script, build_path, resources=resources) if resources.hex_files: toolchain.copy_files(resources.hex_files, build_path, resources=resources) # Compile Sources objects = toolchain.compile_sources(resources, resources.inc_dirs) resources.objects.extend(objects) if archive: toolchain.build_library(objects, build_path, name) if remove_config_header_file: config_header_path = toolchain.get_config_header() if config_header_path: remove(config_header_path) if report != None: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() cur_result["result"] = "OK" add_result_to_report(report, cur_result) return True except Exception as exc: if report != None: end = time() if isinstance(exc, ToolException): cur_result["result"] = "FAIL" elif isinstance(exc, NotSupportedException): cur_result["result"] = "NOT_SUPPORTED" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output add_result_to_report(report, cur_result) # Let Exception propagate raise
def build_project(src_paths, build_path, target, toolchain_name, libraries_paths=None, options=None, linker_script=None, clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None, jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None, extra_verbose=False, config=None): """ This function builds project. Project can be for example one test / UT """ # Convert src_path to a list if needed if type(src_paths) != ListType: src_paths = [src_paths] # Extend src_paths wiht libraries_paths if libraries_paths is not None: src_paths.extend(libraries_paths) # Build Directory if clean: if exists(build_path): rmtree(build_path) mkdir(build_path) # Pass all params to the unified prepare_toolchain() toolchain = prepare_toolchain(src_paths, target, toolchain_name, macros=macros, options=options, clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose, config=config) # The first path will give the name to the library if name is None: name = basename(normpath(abspath(src_paths[0]))) toolchain.info("Building project %s (%s, %s)" % (name, toolchain.target.name, toolchain_name)) # Initialize reporting if report != None: start = time() # If project_id is specified, use that over the default name id_name = project_id.upper() if project_id else name.upper() description = project_description if project_description else name vendor_label = toolchain.target.extra_labels[0] prep_report(report, toolchain.target.name, toolchain_name, id_name) cur_result = create_result(toolchain.target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, toolchain.target.name, toolchain_name, vendor_label) try: # Call unified scan_resources resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) # Change linker script if specified if linker_script is not None: resources.linker_script = linker_script # Compile Sources objects = toolchain.compile_sources(resources, build_path, resources.inc_dirs) resources.objects.extend(objects) # Link Program res, _ = toolchain.link_program(resources, build_path, name) if report != None: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() cur_result["result"] = "OK" cur_result["memory_usage"] = toolchain.map_outputs add_result_to_report(report, cur_result) return res except Exception, e: if report != None: end = time() if isinstance(e, NotSupportedException): cur_result["result"] = "NOT_SUPPORTED" else: cur_result["result"] = "FAIL" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output add_result_to_report(report, cur_result) # Let Exception propagate raise
def build_lib(lib_id, target, toolchain_name, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False, build_profile=None): """ Legacy method for building mbed libraries Positional arguments: lib_id - the library's unique identifier target - the MCU or board that the project will compile for toolchain_name - the name of the build tools Keyword arguments: clean - Rebuild everything if True verbose - Write the actual tools command lines used if True macros - additional macros notify - Notify function for logs jobs - how many compilers we can run at once silent - suppress printing of progress indicators report - a dict where a result may be appended properties - UUUUHHHHH beats me extra_verbose - even more output! build_profile - a dict of flags that will be passed to the compiler """ lib = Library(lib_id) if not lib.is_supported(target, toolchain_name): print('Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain_name)) return False # We need to combine macros from parameter list with macros from library # definition lib_macros = lib.macros if lib.macros else [] if macros: macros.extend(lib_macros) else: macros = lib_macros src_paths = lib.source_dir build_path = lib.build_dir dependencies_paths = lib.dependencies inc_dirs = lib.inc_dirs inc_dirs_ext = lib.inc_dirs_ext if type(src_paths) != ListType: src_paths = [src_paths] # The first path will give the name to the library name = basename(src_paths[0]) if report != None: start = time() id_name = name.upper() description = name vendor_label = target.extra_labels[0] cur_result = None prep_report(report, target.name, toolchain_name, id_name) cur_result = create_result(target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, target.name, toolchain_name, vendor_label) for src_path in src_paths: if not exists(src_path): error_msg = "The library source folder does not exist: %s", src_path if report != None: cur_result["output"] = error_msg cur_result["result"] = "FAIL" add_result_to_report(report, cur_result) raise Exception(error_msg) try: # Toolchain instance # Create the desired build directory structure bin_path = join(build_path, mbed2_obj_path(target.name, toolchain_name)) mkdir(bin_path) tmp_path = join(build_path, '.temp', mbed2_obj_path(target.name, toolchain_name)) mkdir(tmp_path) toolchain = prepare_toolchain( src_paths, tmp_path, target, toolchain_name, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose, build_profile=build_profile, jobs=jobs, clean=clean) toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name)) # Take into account the library configuration (MBED_CONFIG_FILE) config = toolchain.config config.add_config_files([MBED_CONFIG_FILE]) # Scan Resources resources = [] for src_path in src_paths: resources.append(toolchain.scan_resources(src_path)) # Add extra include directories / files which are required by library # This files usually are not in the same directory as source files so # previous scan will not include them if inc_dirs_ext is not None: for inc_ext in inc_dirs_ext: resources.append(toolchain.scan_resources(inc_ext)) # Dependencies Include Paths dependencies_include_dir = [] if dependencies_paths is not None: for path in dependencies_paths: lib_resources = toolchain.scan_resources(path) dependencies_include_dir.extend(lib_resources.inc_dirs) dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs)) if inc_dirs: dependencies_include_dir.extend(inc_dirs) # Add other discovered configuration data to the configuration object for res in resources: config.load_resources(res) toolchain.set_config_data(toolchain.config.get_config_data()) # Copy Headers for resource in resources: toolchain.copy_files(resource.headers, build_path, resources=resource) dependencies_include_dir.extend( toolchain.scan_resources(build_path).inc_dirs) # Compile Sources objects = [] for resource in resources: objects.extend(toolchain.compile_sources(resource, dependencies_include_dir)) needed_update = toolchain.build_library(objects, bin_path, name) if report != None and needed_update: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() cur_result["result"] = "OK" add_result_to_report(report, cur_result) return True except Exception: if report != None: end = time() cur_result["result"] = "FAIL" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output add_result_to_report(report, cur_result) # Let Exception propagate raise
zip = True clean = True # source_dir = use relative paths, otherwise sources are copied sources_relative = True if options.source_dir else False for mcu in mcus.split(','): # Program Number or name p, n, src, ide = options.program, options.program_name, options.source_dir, options.ide if src is not None: # --source is used to generate IDE files to toolchain directly in the source tree and doesn't generate zip file project_dir = options.source_dir project_name = n if n else "Unnamed_Project" project_temp = path.join(options.source_dir[0], 'projectfiles', ide) mkdir(project_temp) lib_symbols = [] if options.macros: lib_symbols += options.macros zip = False # don't create zip clean = False # don't cleanup because we use the actual source tree to generate IDE files else: if n is not None and p is not None: args_error(parser, "[ERROR] specify either '-n' or '-p', not both") if n: if not n in TEST_MAP.keys(): # Check if there is an alias for this in private_settings.py if getattr(ps, "test_alias", None) is not None: alias = ps.test_alias.get(n, "") if not alias in TEST_MAP.keys(): args_error(parser, "[ERROR] Program with name '%s' not found" % n)
verbose=options.verbose, notify=notify, jobs=options.jobs, continue_on_build_fail=options.continue_on_build_fail, app_config=options.app_config, build_profile=profile) # If a path to a test spec is provided, write it to a file if options.test_spec: test_spec_data = test_spec_from_test_builds(test_build) # Create the target dir for the test spec if necessary # mkdir will not create the dir if it already exists test_spec_dir = os.path.dirname(options.test_spec) if test_spec_dir: mkdir(test_spec_dir) try: with open(options.test_spec, 'w') as f: f.write(json.dumps(test_spec_data, indent=2)) except IOError, e: print "[ERROR] Error writing test spec to file" print e # If a path to a JUnit build report spec is provided, write it to a file if options.build_report_junit: report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build") report_exporter.report_to_file(build_report, options.build_report_junit, test_suite_properties=build_properties) # Print memory map summary on screen if build_report:
def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False): """ Function returns True is library was built and false if building was skipped """ if report != None: start = time() id_name = "MBED" description = "mbed SDK" vendor_label = target.extra_labels[0] cur_result = None prep_report(report, target.name, toolchain_name, id_name) cur_result = create_result(target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, target.name, toolchain_name, vendor_label) # Check toolchain support if toolchain_name not in target.supported_toolchains: supported_toolchains_text = ", ".join(target.supported_toolchains) print '%s target is not yet supported by toolchain %s' % (target.name, toolchain_name) print '%s target supports %s toolchain%s' % (target.name, supported_toolchains_text, 's' if len(target.supported_toolchains) > 1 else '') if report != None: cur_result["result"] = "SKIP" add_result_to_report(report, cur_result) return False try: # Toolchain toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose) toolchain.VERBOSE = verbose toolchain.jobs = jobs toolchain.build_all = clean # Source and Build Paths BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name) BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name) mkdir(BUILD_TOOLCHAIN) TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path) mkdir(TMP_PATH) # CMSIS toolchain.info("Building library %s (%s, %s)"% ('CMSIS', target.name, toolchain_name)) cmsis_src = join(MBED_TARGETS_PATH, "cmsis") resources = toolchain.scan_resources(cmsis_src) toolchain.copy_files(resources.headers, BUILD_TARGET) toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN) toolchain.copy_files(resources.bin_files, BUILD_TOOLCHAIN) objects = toolchain.compile_sources(resources, TMP_PATH) toolchain.copy_files(objects, BUILD_TOOLCHAIN) # mbed toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name)) # Common Headers toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES) toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES) # Target specific sources HAL_SRC = join(MBED_TARGETS_PATH, "hal") hal_implementation = toolchain.scan_resources(HAL_SRC) toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries, BUILD_TARGET, HAL_SRC) incdirs = toolchain.scan_resources(BUILD_TARGET).inc_dirs objects = toolchain.compile_sources(hal_implementation, TMP_PATH, [MBED_LIBRARIES] + incdirs) # Common Sources mbed_resources = toolchain.scan_resources(MBED_COMMON) objects += toolchain.compile_sources(mbed_resources, TMP_PATH, [MBED_LIBRARIES] + incdirs) # A number of compiled files need to be copied as objects as opposed to # being part of the mbed library, for reasons that have to do with the way # the linker search for symbols in archives. These are: # - retarget.o: to make sure that the C standard lib symbols get overridden # - board.o: mbed_die is weak # - mbed_overrides.o: this contains platform overrides of various weak SDK functions separate_names, separate_objects = ['retarget.o', 'board.o', 'mbed_overrides.o'], [] for o in objects: for name in separate_names: if o.endswith(name): separate_objects.append(o) for o in separate_objects: objects.remove(o) toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed") for o in separate_objects: toolchain.copy_files(o, BUILD_TOOLCHAIN) if report != None: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() cur_result["result"] = "OK" add_result_to_report(report, cur_result) return True except Exception, e: if report != None: end = time() cur_result["result"] = "FAIL" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output cur_result["output"] += str(e) add_result_to_report(report, cur_result) # Let Exception propagate raise e
def __gen_dir(self, dirname): settings = join(self.export_dir, dirname) mkdir(settings)
def static_analysis_scan(target, toolchain_name, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False): # Toolchain toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose) toolchain.VERBOSE = verbose toolchain.jobs = jobs toolchain.build_all = clean # Source and Build Paths BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name) BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name) mkdir(BUILD_TOOLCHAIN) TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path) mkdir(TMP_PATH) # CMSIS toolchain.info("Static analysis for %s (%s, %s)" % ('CMSIS', target.name, toolchain_name)) cmsis_src = join(MBED_TARGETS_PATH, "cmsis") resources = toolchain.scan_resources(cmsis_src) # Copy files before analysis toolchain.copy_files(resources.headers, BUILD_TARGET) toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN) # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line includes = ["-I%s"% i for i in resources.inc_dirs] includes.append("-I%s"% str(BUILD_TARGET)) c_sources = " ".join(resources.c_sources) cpp_sources = " ".join(resources.cpp_sources) macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros] includes = map(str.strip, includes) macros = map(str.strip, macros) check_cmd = CPPCHECK_CMD check_cmd += CPPCHECK_MSG_FORMAT check_cmd += includes check_cmd += macros # We need to pass some params via file to avoid "command line too long in some OSs" tmp_file = tempfile.NamedTemporaryFile(delete=False) tmp_file.writelines(line + '\n' for line in c_sources.split()) tmp_file.writelines(line + '\n' for line in cpp_sources.split()) tmp_file.close() check_cmd += ["--file-list=%s"% tmp_file.name] _stdout, _stderr, _rc = run_cmd(check_cmd) if verbose: print _stdout print _stderr # ========================================================================= # MBED toolchain.info("Static analysis for %s (%s, %s)" % ('MBED', target.name, toolchain_name)) # Common Headers toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES) toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES) # Target specific sources HAL_SRC = join(MBED_TARGETS_PATH, "hal") hal_implementation = toolchain.scan_resources(HAL_SRC) # Copy files before analysis toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files, BUILD_TARGET, HAL_SRC) incdirs = toolchain.scan_resources(BUILD_TARGET) target_includes = ["-I%s" % i for i in incdirs.inc_dirs] target_includes.append("-I%s"% str(BUILD_TARGET)) target_includes.append("-I%s"% str(HAL_SRC)) target_c_sources = " ".join(incdirs.c_sources) target_cpp_sources = " ".join(incdirs.cpp_sources) target_macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros] # Common Sources mbed_resources = toolchain.scan_resources(MBED_COMMON) # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line mbed_includes = ["-I%s" % i for i in mbed_resources.inc_dirs] mbed_includes.append("-I%s"% str(BUILD_TARGET)) mbed_includes.append("-I%s"% str(MBED_COMMON)) mbed_includes.append("-I%s"% str(MBED_API)) mbed_includes.append("-I%s"% str(MBED_HAL)) mbed_c_sources = " ".join(mbed_resources.c_sources) mbed_cpp_sources = " ".join(mbed_resources.cpp_sources) target_includes = map(str.strip, target_includes) mbed_includes = map(str.strip, mbed_includes) target_macros = map(str.strip, target_macros) check_cmd = CPPCHECK_CMD check_cmd += CPPCHECK_MSG_FORMAT check_cmd += target_includes check_cmd += mbed_includes check_cmd += target_macros # We need to pass some parames via file to avoid "command line too long in some OSs" tmp_file = tempfile.NamedTemporaryFile(delete=False) tmp_file.writelines(line + '\n' for line in target_c_sources.split()) tmp_file.writelines(line + '\n' for line in target_cpp_sources.split()) tmp_file.writelines(line + '\n' for line in mbed_c_sources.split()) tmp_file.writelines(line + '\n' for line in mbed_cpp_sources.split()) tmp_file.close() check_cmd += ["--file-list=%s"% tmp_file.name] _stdout, _stderr, _rc = run_cmd_ext(check_cmd) if verbose: print _stdout print _stderr
macros=options.macros, verbose=options.verbose, notify=notify, jobs=options.jobs, continue_on_build_fail=options.continue_on_build_fail, app_config=options.app_config) # If a path to a test spec is provided, write it to a file if options.test_spec: test_spec_data = test_spec_from_test_builds(test_build) # Create the target dir for the test spec if necessary # mkdir will not create the dir if it already exists test_spec_dir = os.path.dirname(options.test_spec) if test_spec_dir: mkdir(test_spec_dir) try: with open(options.test_spec, 'w') as f: f.write(json.dumps(test_spec_data, indent=2)) except IOError, e: print "[ERROR] Error writing test spec to file" print e # If a path to a JUnit build report spec is provided, write it to a file if options.build_report_junit: report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build") report_exporter.report_to_file( build_report, options.build_report_junit,
def build_mbed_libs(target, toolchain_name, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False, build_profile=None): """ Function returns True is library was built and false if building was skipped Positional arguments: target - the MCU or board that the project will compile for toolchain_name - the name of the build tools Keyword arguments: verbose - Write the actual tools command lines used if True clean - Rebuild everything if True macros - additional macros notify - Notify function for logs jobs - how many compilers we can run at once silent - suppress printing of progress indicators report - a dict where a result may be appended properties - UUUUHHHHH beats me extra_verbose - even more output! build_profile - a dict of flags that will be passed to the compiler """ if report != None: start = time() id_name = "MBED" description = "mbed SDK" vendor_label = target.extra_labels[0] cur_result = None prep_report(report, target.name, toolchain_name, id_name) cur_result = create_result(target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, target.name, toolchain_name, vendor_label) # Check toolchain support if toolchain_name not in target.supported_toolchains: supported_toolchains_text = ", ".join(target.supported_toolchains) print('%s target is not yet supported by toolchain %s' % (target.name, toolchain_name)) print('%s target supports %s toolchain%s' % (target.name, supported_toolchains_text, 's' if len(target.supported_toolchains) > 1 else '')) if report != None: cur_result["result"] = "SKIP" add_result_to_report(report, cur_result) return False try: # Source and Build Paths build_target = join(MBED_LIBRARIES, "TARGET_" + target.name) build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name)) mkdir(build_toolchain) # Toolchain tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name)) mkdir(tmp_path) toolchain = prepare_toolchain( [""], tmp_path, target, toolchain_name, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose, build_profile=build_profile, jobs=jobs, clean=clean) # Take into account the library configuration (MBED_CONFIG_FILE) config = toolchain.config config.add_config_files([MBED_CONFIG_FILE]) toolchain.set_config_data(toolchain.config.get_config_data()) # CMSIS toolchain.info("Building library %s (%s, %s)" % ('CMSIS', target.name, toolchain_name)) cmsis_src = MBED_CMSIS_PATH resources = toolchain.scan_resources(cmsis_src) toolchain.copy_files(resources.headers, build_target) toolchain.copy_files(resources.linker_script, build_toolchain) toolchain.copy_files(resources.bin_files, build_toolchain) objects = toolchain.compile_sources(resources, tmp_path) toolchain.copy_files(objects, build_toolchain) # mbed toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name)) # Common Headers toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES) library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES] for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS), (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM), (MBED_HAL, MBED_LIBRARIES_HAL)]: resources = toolchain.scan_resources(dir) toolchain.copy_files(resources.headers, dest) library_incdirs.append(dest) # Target specific sources hal_src = MBED_TARGETS_PATH hal_implementation = toolchain.scan_resources(hal_src) toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries + [MBED_CONFIG_FILE], build_target, resources=hal_implementation) toolchain.copy_files(hal_implementation.linker_script, build_toolchain) toolchain.copy_files(hal_implementation.bin_files, build_toolchain) incdirs = toolchain.scan_resources(build_target).inc_dirs objects = toolchain.compile_sources(hal_implementation, library_incdirs + incdirs) toolchain.copy_files(objects, build_toolchain) # Common Sources mbed_resources = None for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]: mbed_resources += toolchain.scan_resources(dir) objects = toolchain.compile_sources(mbed_resources, library_incdirs + incdirs) # A number of compiled files need to be copied as objects as opposed to # way the linker search for symbols in archives. These are: # - retarget.o: to make sure that the C standard lib symbols get # overridden # - board.o: mbed_die is weak # - mbed_overrides.o: this contains platform overrides of various # weak SDK functions separate_names, separate_objects = ['retarget.o', 'board.o', 'mbed_overrides.o'], [] for obj in objects: for name in separate_names: if obj.endswith(name): separate_objects.append(obj) for obj in separate_objects: objects.remove(obj) toolchain.build_library(objects, build_toolchain, "mbed") for obj in separate_objects: toolchain.copy_files(obj, build_toolchain) if report != None: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() cur_result["result"] = "OK" add_result_to_report(report, cur_result) return True except Exception as exc: if report != None: end = time() cur_result["result"] = "FAIL" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output cur_result["output"] += str(exc) add_result_to_report(report, cur_result) # Let Exception propagate raise
line_acc_arr.append(line_acc) min_dis += min_dis_ total_dis += total_dis_ if i % 100 == 0: batch100_loss = np.mean(loss_arr) print( f'\nepoch:{epoch} step:{i} loss:{batch100_loss} time:{time.time()-start_time} \ line_acc:{np.mean(line_acc_arr)} acc:{min_dis}/{total_dis} {(total_dis-min_dis)/total_dis if not total_dis == 0 else 0}' ) print(f'pred:{pred_text[0]}') print(f'true:{text[0]}') start_time = time.time() loss_arr = [] line_acc_arr = [] min_dis = 0 total_dis = 0 if i % train_cfg['save_step'] == 0 and i > 0: model.eval() test_loader_dict = dataloader.get_test_loader(data_cfg, char2index) Eval.valid(model, model_cfg, test_loader_dict, index2char, device) model.train() save_path = os.path.join(train_cfg['workdir'], 'checkpoints') utils.mkdir(save_path) torch.save(model.state_dict(), f'{save_path}/{model_cfg["method"]}_{epoch}_{i}.pth') if i % train_cfg['decay_steps'] == 0: adjust_learning_rate(optimizer, global_step)
def __gen_dir(self, dirname): settings = join(self.inputDir, dirname) mkdir(settings)
def static_analysis_scan(target, toolchain_name, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False): # Toolchain toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose) toolchain.VERBOSE = verbose toolchain.jobs = jobs toolchain.build_all = clean # Source and Build Paths BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name) BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name) mkdir(BUILD_TOOLCHAIN) TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path) mkdir(TMP_PATH) # CMSIS toolchain.info("Static analysis for %s (%s, %s)" % ('CMSIS', target.name, toolchain_name)) cmsis_src = join(MBED_TARGETS_PATH, "cmsis") resources = toolchain.scan_resources(cmsis_src) # Copy files before analysis toolchain.copy_files(resources.headers, BUILD_TARGET) toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN) # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line includes = ["-I%s" % i for i in resources.inc_dirs] includes.append("-I%s" % str(BUILD_TARGET)) c_sources = " ".join(resources.c_sources) cpp_sources = " ".join(resources.cpp_sources) macros = ["-D%s" % s for s in toolchain.get_symbols() + toolchain.macros] includes = map(str.strip, includes) macros = map(str.strip, macros) check_cmd = CPPCHECK_CMD check_cmd += CPPCHECK_MSG_FORMAT check_cmd += includes check_cmd += macros # We need to pass some params via file to avoid "command line too long in some OSs" tmp_file = tempfile.NamedTemporaryFile(delete=False) tmp_file.writelines(line + '\n' for line in c_sources.split()) tmp_file.writelines(line + '\n' for line in cpp_sources.split()) tmp_file.close() check_cmd += ["--file-list=%s" % tmp_file.name] _stdout, _stderr, _rc = run_cmd(check_cmd) if verbose: print _stdout print _stderr # ========================================================================= # MBED toolchain.info("Static analysis for %s (%s, %s)" % ('MBED', target.name, toolchain_name)) # Common Headers toolchain.copy_files( toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES) toolchain.copy_files( toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES) # Target specific sources HAL_SRC = join(MBED_TARGETS_PATH, "hal") hal_implementation = toolchain.scan_resources(HAL_SRC) # Copy files before analysis toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files, BUILD_TARGET, resources=hal_implementation) incdirs = toolchain.scan_resources(BUILD_TARGET) target_includes = ["-I%s" % i for i in incdirs.inc_dirs] target_includes.append("-I%s" % str(BUILD_TARGET)) target_includes.append("-I%s" % str(HAL_SRC)) target_c_sources = " ".join(incdirs.c_sources) target_cpp_sources = " ".join(incdirs.cpp_sources) target_macros = [ "-D%s" % s for s in toolchain.get_symbols() + toolchain.macros ] # Common Sources mbed_resources = toolchain.scan_resources(MBED_COMMON) # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line mbed_includes = ["-I%s" % i for i in mbed_resources.inc_dirs] mbed_includes.append("-I%s" % str(BUILD_TARGET)) mbed_includes.append("-I%s" % str(MBED_COMMON)) mbed_includes.append("-I%s" % str(MBED_API)) mbed_includes.append("-I%s" % str(MBED_HAL)) mbed_c_sources = " ".join(mbed_resources.c_sources) mbed_cpp_sources = " ".join(mbed_resources.cpp_sources) target_includes = map(str.strip, target_includes) mbed_includes = map(str.strip, mbed_includes) target_macros = map(str.strip, target_macros) check_cmd = CPPCHECK_CMD check_cmd += CPPCHECK_MSG_FORMAT check_cmd += target_includes check_cmd += mbed_includes check_cmd += target_macros # We need to pass some parames via file to avoid "command line too long in some OSs" tmp_file = tempfile.NamedTemporaryFile(delete=False) tmp_file.writelines(line + '\n' for line in target_c_sources.split()) tmp_file.writelines(line + '\n' for line in target_cpp_sources.split()) tmp_file.writelines(line + '\n' for line in mbed_c_sources.split()) tmp_file.writelines(line + '\n' for line in mbed_cpp_sources.split()) tmp_file.close() check_cmd += ["--file-list=%s" % tmp_file.name] _stdout, _stderr, _rc = run_cmd_ext(check_cmd) if verbose: print _stdout print _stderr
def __gen_dir(self, dir_name): """ Method that creates directory """ settings = join(self.export_dir, dir_name) mkdir(settings)
def static_analysis_scan_library(src_paths, build_path, target, toolchain_name, cppcheck_cmd, cppcheck_msg_format, dependencies_paths=None, options=None, name=None, clean=False, notify=None, verbose=False, macros=None, jobs=1, extra_verbose=False): """ Function scans library (or just some set of sources/headers) for staticly detectable defects """ if type(src_paths) != ListType: src_paths = [src_paths] for src_path in src_paths: if not exists(src_path): raise Exception("The library source folder does not exist: %s", src_path) # Toolchain instance toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose) toolchain.VERBOSE = verbose toolchain.jobs = jobs # The first path will give the name to the library name = basename(src_paths[0]) toolchain.info("Static analysis for library %s (%s, %s)" % (name.upper(), target.name, toolchain_name)) # Scan Resources resources = [] for src_path in src_paths: resources.append(toolchain.scan_resources(src_path)) # Dependencies Include Paths dependencies_include_dir = [] if dependencies_paths is not None: for path in dependencies_paths: lib_resources = toolchain.scan_resources(path) dependencies_include_dir.extend(lib_resources.inc_dirs) # Create the desired build directory structure bin_path = join(build_path, toolchain.obj_path) mkdir(bin_path) tmp_path = join(build_path, '.temp', toolchain.obj_path) mkdir(tmp_path) # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line includes = ["-I%s" % i for i in dependencies_include_dir + src_paths] c_sources = " " cpp_sources = " " macros = ['-D%s' % s for s in toolchain.get_symbols() + toolchain.macros] # Copy Headers for resource in resources: toolchain.copy_files(resource.headers, build_path, resources=resource) includes += ["-I%s" % i for i in resource.inc_dirs] c_sources += " ".join(resource.c_sources) + " " cpp_sources += " ".join(resource.cpp_sources) + " " dependencies_include_dir.extend( toolchain.scan_resources(build_path).inc_dirs) includes = map(str.strip, includes) macros = map(str.strip, macros) check_cmd = cppcheck_cmd check_cmd += cppcheck_msg_format check_cmd += includes check_cmd += macros # We need to pass some parameters via file to avoid "command line too long in some OSs" # Temporary file is created to store e.g. cppcheck list of files for command line tmp_file = tempfile.NamedTemporaryFile(delete=False) tmp_file.writelines(line + '\n' for line in c_sources.split()) tmp_file.writelines(line + '\n' for line in cpp_sources.split()) tmp_file.close() check_cmd += ["--file-list=%s" % tmp_file.name] # This will allow us to grab result from both stdio and stderr outputs (so we can show them) # We assume static code analysis tool is outputting defects on STDERR _stdout, _stderr, _rc = run_cmd_ext(check_cmd) if verbose: print _stdout print _stderr
def relative_object_path(self, build_path, base_dir, source): source_dir, name, _ = split_path(source) obj_dir = join(build_path, relpath(source_dir, base_dir)) mkdir(obj_dir) return join(obj_dir, name + ".o")
def build_library(src_paths, build_path, target, toolchain_name, dependencies_paths=None, options=None, name=None, clean=False, archive=True, notify=None, verbose=False, macros=None, inc_dirs=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False, project_id=None): """ Prepares resource related objects - toolchain, target, config src_paths: the paths to source directories build_path: the path of the build directory target: ['LPC1768', 'LPC11U24', 'LPC2368'] toolchain_name: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR'] clean: Rebuild everything if True notify: Notify function for logs verbose: Write the actual tools command lines if True inc_dirs: additional include directories which should be included in build """ # Convert src_path to a list if needed if type(src_paths) != ListType: src_paths = [src_paths] # Build path if archive: # Use temp path when building archive tmp_path = join(build_path, '.temp') mkdir(tmp_path) else: tmp_path = build_path # Clean the build directory if clean: if exists(tmp_path): rmtree(tmp_path) mkdir(tmp_path) # Pass all params to the unified prepare_toolchain() toolchain = prepare_toolchain(src_paths, target, toolchain_name, macros=macros, options=options, clean=clean, jobs=jobs, notify=notify, silent=silent, verbose=verbose, extra_verbose=extra_verbose) # The first path will give the name to the library if name is None: name = basename(normpath(abspath(src_paths[0]))) toolchain.info("Building library %s (%s, %s)" % (name, toolchain.target.name, toolchain_name)) # Initialize reporting if report != None: start = time() # If project_id is specified, use that over the default name id_name = project_id.upper() if project_id else name.upper() description = name vendor_label = toolchain.target.extra_labels[0] prep_report(report, toolchain.target.name, toolchain_name, id_name) cur_result = create_result(toolchain.target.name, toolchain_name, id_name, description) if properties != None: prep_properties(properties, toolchain.target.name, toolchain_name, vendor_label) for src_path in src_paths: if not exists(src_path): error_msg = "The library source folder does not exist: %s", src_path if report != None: cur_result["output"] = error_msg cur_result["result"] = "FAIL" add_result_to_report(report, cur_result) raise Exception(error_msg) try: # Call unified scan_resources resources = scan_resources(src_paths, toolchain, dependencies_paths=dependencies_paths, inc_dirs=inc_dirs) # Copy headers, objects and static libraries - all files needed for static lib toolchain.copy_files(resources.headers, build_path, resources=resources) toolchain.copy_files(resources.objects, build_path, resources=resources) toolchain.copy_files(resources.libraries, build_path, resources=resources) if resources.linker_script: toolchain.copy_files(resources.linker_script, build_path, resources=resources) if resources.hex_files: toolchain.copy_files(resources.hex_files, build_path, resources=resources) # Compile Sources objects = toolchain.compile_sources(resources, abspath(tmp_path), resources.inc_dirs) resources.objects.extend(objects) if archive: toolchain.build_library(objects, build_path, name) if report != None: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() cur_result["result"] = "OK" add_result_to_report(report, cur_result) return True except Exception, e: if report != None: end = time() if isinstance(e, ToolException): cur_result["result"] = "FAIL" elif isinstance(e, NotSupportedException): cur_result["result"] = "NOT_SUPPORTED" cur_result["elapsed_time"] = end - start toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output add_result_to_report(report, cur_result) # Let Exception propagate raise e
def parse_args(**kwargs): cfg = kwargs parser = argparse.ArgumentParser(description='Training With Pytorch') parser.add_argument('--mode', default='train', type=str, choices=['prune', 'train', 'test']) parser.add_argument('--gray', default='false', type=str2bool) parser.add_argument('--channel', default=3, type=int) parser.add_argument('--num-classes', default=62, type=int) parser.add_argument('--cutmix', default='false', type=str2bool) parser.add_argument('--teacher_arch', default='', type=str, choices=[ '', 'mobilenet_025', 'mobilenet_05', 'mobilenet_075', 'mobilenet_1', 'mobilenet_2', 'resnet18_cbam', 'resnet34_cbam', 'resnet50_cbam', 'resnet101_cbam', 'resnet152_cbam', 'ghostnet', 'peelenet', 'tsing_net', 'vovnet27_slim', 'vovnet39', 'vovnet57' ]) parser.add_argument('--teacher_resume', default='', type=str, metavar='PATH') parser.add_argument('--teacher_T', default=-1, type=int) parser.add_argument('--teacher_alpha', default=0.9, type=float) parser.add_argument('--arch', default='mobilenet_05', type=str, choices=[ 'mobilenet_025', 'mobilenet_05', 'mobilenet_075', 'mobilenet_1', 'mobilenet_2', 'resnet18_cbam', 'resnet34_cbam', 'resnet50_cbam', 'resnet101_cbam', 'resnet152_cbam', 'ghostnet', 'peelenet', 'tsing_net', 'vovnet27_slim', 'vovnet39', 'vovnet57' ]) parser.add_argument('--resume', default='', type=str, metavar='PATH') parser.add_argument('--prune_s', default=0.001, type=float) parser.add_argument('--prune_mode', default='constant', type=str, choices=['', 'constant', 'global', 'locality']) parser.add_argument('--prune_percent', default=0.5, type=float) parser.add_argument('--binary_model', default='normal', type=str, choices=['normal', 'bnn', 'bwn', 'xnor', 'bireal']) parser.add_argument('--start-epoch', default=1, type=int) parser.add_argument('--save_epochs', default=5, type=int) parser.add_argument('--snapshot', default='', type=str) parser.add_argument('--warmup', default=-1, type=int) parser.add_argument('--devices-id', default='0,1', type=str) parser.add_argument('-j', '--workers', default=6, type=int) parser.add_argument('--epochs', default=40, type=int) parser.add_argument('--milestones', default='15,25,30', type=str) parser.add_argument('-b', '--batch-size', default=128, type=int) parser.add_argument('-vb', '--val-batch-size', default=512, type=int) parser.add_argument('--base-lr', '--learning-rate', default=0.001, type=float) parser.add_argument('--adjust_lr', default='normal', type=str, choices=['normal', 'cosine', 'finetune']) parser.add_argument('--optimizer_type', default='sgd', type=str, choices=['sgd', 'adam']) parser.add_argument('--size-average', default='mean', type=str) parser.add_argument('--resample-num', default=132, type=int) parser.add_argument('--print-freq', '-p', default=20, type=int) parser.add_argument('--data_type', default='regresion', type=str, choices=["regresion", "recognation", "detector"]) parser.add_argument('--loss_type', default='mesLoss', type=str, choices=['mseLoss','wingloss','awingloss','labelSmooth','focalLoss','lsoftmax', \ 'arcface', 'sphereface', 'cosface']) # regresion data or recognation data parser.add_argument('--train_file', default='', type=str) parser.add_argument('--test_file', default='', type=str) parser.add_argument('--root', default='') # detector data parser.add_argument('--dataset_type', default='VOC', choices=['VOC', 'COCO'], type=str, help='VOC or COCO') parser.add_argument('--dataset_root', default='data/VOCdevkit/', help='Dataset root directory path') parser.add_argument('--log-file', default='output.log', type=str) parser.add_argument('--log-mode', default='w', type=str) args = parser.parse_args() # some other operations args.devices_id = [int(d) for d in args.devices_id.split(',')] args.milestones = [int(m) for m in args.milestones.split(',')] args.channel = 1 if args.gray else 3 snapshot_dir = osp.split(args.snapshot)[0] mkdir(snapshot_dir) cfg.update(vars(args)) return edict(cfg)