def __init__(self, target): target_info = self.check_supported(target) if not target_info: raise TargetNotSupportedException("Target not supported in CMSIS pack") self.url = target_info['pdsc_file'] self.pdsc_url, self.pdsc_id, _ = split_path(self.url) self.pack_url, self.pack_id, _ = split_path(target_info['pack_file']) self.dname = target_info["_cpu_name"] self.core = target_info["_core"] self.dfpu = target_info['processor']['fpu'] self.debug, self.dvendor = self.vendor_debug(target_info['vendor']) self.dendian = target_info['processor'].get('endianness','Little-endian') self.debug_svd = target_info.get('debug', '') self.compile_header = target_info['compile']['header'] self.target_info = target_info
def relative_object_path(self, build_path, base_dir, source): source_dir, name, _ = split_path(source) obj_dir = join(build_path, relpath(source_dir, base_dir)) if obj_dir is not self.prev_dir: self.prev_dir = obj_dir mkdir(obj_dir) return join(obj_dir, name + '.o')
def __init__(self, target): target_info = self.check_supported(target) if not target_info: raise TargetNotSupportedException( "Target not supported in CMSIS pack") self.url = target_info['pdsc_file'] self.pdsc_url, self.pdsc_id, _ = split_path(self.url) self.pack_url, self.pack_id, _ = split_path(target_info['pack_file']) self.dname = target_info["_cpu_name"] self.core = target_info["_core"] self.dfpu = target_info['processor']['fpu'] self.debug, self.dvendor = self.vendor_debug(target_info['vendor']) self.dendian = target_info['processor'].get('endianness', 'Little-endian') self.debug_svd = target_info.get('debug', '') self.compile_header = target_info['compile']['header'] self.target_info = target_info
def compile_sources(self, resources, build_path, inc_dirs=None): # Web IDE progress bar for project build files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources self.to_be_compiled = len(files_to_compile) self.compiled = 0 inc_paths = resources.inc_dirs if inc_dirs is not None: inc_paths.extend(inc_dirs) # De-duplicate include paths inc_paths = set(inc_paths) # Sort include paths for consistency inc_paths = sorted(set(inc_paths)) # Unique id of all include paths self.inc_md5 = md5(" ".join(inc_paths)).hexdigest() # Where to store response files self.build_dir = build_path objects = [] queue = [] prev_dir = None # Sort compile queue for consistency files_to_compile.sort() work_dir = getcwd() for source in files_to_compile: _, name, _ = split_path(source) object = self.relative_object_path(build_path, resources.file_basepath[source], source) # Queue mode (multiprocessing) commands = self.compile_command(source, object, inc_paths) if commands is not None: queue.append( { "source": source, "object": object, "commands": commands, "work_dir": work_dir, "chroot": self.CHROOT, } ) else: objects.append(object) # Use queues/multiprocessing if cpu count is higher than setting jobs = self.jobs if self.jobs else cpu_count() if jobs > CPU_COUNT_MIN and len(queue) > jobs: return self.compile_queue(queue, objects) else: return self.compile_seq(queue, objects)
def compile_sources(self, resources, build_path, inc_dirs=None): # Web IDE progress bar for project build files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources self.to_be_compiled = len(files_to_compile) self.compiled = 0 inc_paths = resources.inc_dirs if inc_dirs is not None: inc_paths.extend(inc_dirs) # De-duplicate include paths inc_paths = set(inc_paths) # Sort include paths for consistency inc_paths = sorted(set(inc_paths)) # Unique id of all include paths self.inc_md5 = md5(' '.join(inc_paths)).hexdigest() # Where to store response files self.build_dir = build_path objects = [] queue = [] prev_dir = None # The dependency checking for C/C++ is delegated to the compiler base_path = resources.base_path # Sort compile queue for consistency files_to_compile.sort() work_dir = getcwd() for source in files_to_compile: _, name, _ = split_path(source) object = self.relative_object_path(build_path, base_path, source) # Queue mode (multiprocessing) commands = self.compile_command(source, object, inc_paths) if commands is not None: queue.append({ 'source': source, 'object': object, 'commands': commands, 'work_dir': work_dir, 'chroot': self.CHROOT }) else: objects.append(object) # Use queues/multiprocessing if cpu count is higher than setting jobs = self.jobs if self.jobs else cpu_count() if jobs > CPU_COUNT_MIN and len(queue) > jobs: return self.compile_queue(queue, objects) else: return self.compile_seq(queue, objects)
def relative_object_path(self, build_path, base_dir, source): source_dir, name, _ = split_path(source) obj_dir = join(build_path, relpath(source_dir, base_dir)) mkdir(obj_dir) return join(obj_dir, name + ".o")
from tools.regions import merge_region_list, UPDATE_WHITELIST from tools.notifier.term import TerminalNotifier from tools.config import Region from tools.utils import split_path, run_cmd_ext, generate_update_filename if __name__ == "__main__": parser = ArgumentParser() parser.add_argument("toolchain_path", help="Path to the Keil folder") parser.add_argument("linker_output", help="Path to the built axf file") options = parser.parse_args() axf_file = normpath(options.linker_output) output_directory, output_name, output_ext = split_path(axf_file) hex_file = join(output_directory, output_name + ".hex") combined_hex_file = join(output_directory, output_name + "_combined.hex") command = [ join(normpath(options.toolchain_path), "ARM/ARMCC/bin/fromelf.exe"), "--i32", "--output", hex_file, axf_file ] stdout, stderr, retcode = run_cmd_ext(command) if retcode: err_msg = ("Failed to convert axf to hex.\r\n" "Command: {}\r\n" "retcode: {}\r\n" "stdout: {}\r\n" "stderr: {}").format(command, retcode, stdout, stderr)