def load_plugin(name, config, plugin_config): log.info(f"load plugin '{name}'") search_name = name search_version = None if ':' in name: search_name, search_version = name.split(':') if search_name.endswith("-plugin") == False: search_name = f"{search_name}-plugin" package = load_installed_package(search_name) if package is None: raise PluginException(f"{name}: plugin not installed") return if search_version and package.version != Version(search_version): raise PluginException( f"{name}: require version '{search_version}' but '{package.version}' found" ) try: spec = importlib.util.spec_from_file_location( "plugin", os.path.join(package.path, '__init__.py')) plugin = importlib.util.module_from_spec(spec) plugin.args = plugin_config spec.loader.exec_module(plugin) except Exception as e: print_stack() log.debug(e) raise PluginException(f"{name}: failed to load plugin")
def load_project(path='army.toml'): # TODO find a way to add line to error message file = os.path.expanduser(path) if os.path.exists(file)==False: raise ProjectException(f"{file}: file not found") content = {} try: log.info(f"Load project '{file}'") content = toml.load(file) log.debug(f"content: {content}") except toml.decoder.TomlDecodeError as e: print_stack() log.debug(e) raise ProjectException(f"{format(e)}") except Exception as e: print_stack() log.debug(e) raise ProjectException(f"{format(e)}") project = Project(data=content) project.check() return project
def package(ctx, **kwargs): log.info(f"package") config = ctx.parent.config project = None if os.path.exists('army.toml'): try: # load project configuration project = load_project() except Exception as e: print_stack() log.debug(e) if project is None: log.info(f"no project loaded") exit(1) try: file = project.package(os.getcwd(), 'output') except Exception as e: print_stack() log.debug(e) print(f"packaging failed: {e}") exit(1) print(f"{os.path.relpath(file, os.getcwd())} generated")
def repos(ctx, **kwargs): log.info(f"repos") config = ctx.parent.config # build repositories list repositories = load_repositories(config, prefix) if len(repositories) == 0: print("no repository configured") return column_name = ['name'] column_type = ['type'] column_uri = ['uri'] for r in repositories: column_name.append(r.name) column_type.append(r.type) column_uri.append(r.uri) max_name = len(max(column_name, key=len)) max_type = len(max(column_type, key=len)) max_uri = len(max(column_uri, key=len)) if len(column_name) > 0: for i in range(len(column_name)): print(f"{column_name[i].ljust(max_name, ' ')} | ", end='') print(f"{column_type[i].ljust(max_type)} | ", end='') print(f"{column_uri[i].ljust(max_uri)}", end='') print()
def rtt_console(ctx, speed, detach, viewer, **kwargs): # def rtt_console(ctx, tty, baud, echo, detach, **kwargs): log.info(f"rtt-console") opts = [] if echo == True: opts.append("-c") command = [] jlinkexe = locate_jlink() log.debug(f"jlink path: {jlinkexe}") try: command += [ "picocom", f"/dev/{tty}", "-b", f"{baud}", "-l", "--imap=lfcrlf", "--omap=crlf", "--escape=a" ] command += opts if detach == True: command += ["&"] # TODO add check picocom is installed subprocess.check_call(command) except Exception as e: print_stack() log.error(f"{e}")
def list(ctx, **kwargs): log.info(f"list") # load configuration config = ctx.parent.config packages = load_installed_packages(prefix=prefix) if len(packages) == 0: print('no package found', file=sys.stderr) return column_package = ['package'] column_version = ['version'] column_description = ['description'] column_repo = ['repository'] # for package in packages: column_package.append(package.name) column_version.append(str(package.version)) column_description.append(package.description) column_repo.append(package.repository.name) max_package = len(max(column_package, key=len)) max_version = len(max(column_version, key=len)) max_description = len(max(column_description, key=len)) max_repo = len(max(column_repo, key=len)) for i in range(len(column_repo)): print(f"{column_package[i].ljust(max_package)} | ", end='') print(f"{column_version[i].ljust(max_version)} | ", end='') print(f"{column_description[i].ljust(max_description)} | ", end='') print(f"{column_repo[i].ljust(max_repo, ' ')}", end='') print()
def load_project_packages(project, target): loaded = [] to_load = [] for dependency in project.dependencies: to_load.append((dependency, project.dependencies[dependency])) for dependency in project.target[target].dependencies: to_load.append( (dependency, project.target[target].dependencies[dependency])) dependencies = [] while len(to_load) > 0: dependency, version_range = to_load.pop(0) if dependency not in loaded: installed = load_installed_package(dependency, version_range=version_range) if installed is None: raise PackageException(f"{dependency}: package not installed") dependencies.append(installed) for subdependency in installed.dependencies: to_load.append( (subdependency, installed.dependencies[subdependency])) loaded.append(dependency) else: log.info(f"{dependency} already loaded, skip") return dependencies
def logout(ctx, name, **kwargs): log.info(f"logout {name}") config = ctx.parent.config # build repositories list repositories = load_repositories(config, prefix) repo = None for repository in repositories: if repository.name == name: repo = repository if repo is None: print(f"{name}: repository not found", file=sys.stderr) exit(1) service_id = f"army.{name}" try: repo.logout() except Exception as e: print_stack() log.debug(e) print(f"{name}: {e}", file=sys.stderr) exit(1) print("logged out")
def publish(ctx, name, force, **kwargs): log.info(f"publish") config = ctx.parent.config project = None if os.path.exists('army.toml'): try: # load project configuration project = load_project() except Exception as e: print_stack() log.debug(e) if project is None: log.info(f"no project loaded") exit(1) # build repositories list repositories = load_repositories(config, prefix) repo = None for repository in repositories: if repository.name == name: repo = repository if repo is None: print(f"{name}: repository not found", file=sys.stderr) exit(1) if repo.load_credentials() == False: print(f"{name}: no credentials found", file=sys.stderr) exit(1) # package try: file = project.package(os.getcwd(), 'output') print(f"{os.path.relpath(file, os.getcwd())} generated") except Exception as e: print_stack() log.debug(e) print(f"packaging failed: {e}") exit(1) # TODO check version is tagged and files are commited and pushed # publish try: repo.publish(project, file, overwrite=force) except Exception as e: print_stack() log.debug(e) print(f"publishing failed: {e}") exit(1) print(f"{os.path.relpath(file, os.getcwd())} published")
def init(ctx, **kwargs): log.info(f"init") # load configuration config = ctx.parent.config project_config = None try: # load project configuration project_config = load_project(config) config = project_config except Exception as e: print_stack() log.debug(e) log.info(f"no project loaded") print("Not yet implemented")
def package(self, path, output_path): # execute prebuild step if os.path.exists(os.path.expanduser(os.path.join(path, 'pkg', 'prebuild'))): log.info("execute prebuild script") subprocess.check_call([os.path.join(os.path.expanduser(path), 'pkg', 'prebuild')]) # create temporary folder d = tempfile.TemporaryDirectory() files = [] for include in self.packaging.include: files.append(include) files.append('army.toml') # copy files for include in files: source = os.path.join(os.path.expanduser(path), include) dest = d.name if os.path.exists(source)==False: raise ProjectException(f"{include}: package item does not exists") try: if os.path.isfile(source): shutil.copy(source, dest) else: shutil.copytree(source, os.path.join(dest, os.path.basename(source)), dirs_exist_ok=True) except Exception as e: print_stack() log.debug(e) raise ProjectException(f"{e}") # TODO add exclude # create zip file pkg_name = f"{self.name}-{self.version}.zip" pkg_path = os.path.join(os.path.expanduser(path), output_path, pkg_name) if os.path.exists(pkg_path): log.info(f"remove existing file {pkg_path}") os.remove(pkg_path) if os.path.exists(output_path)==False: os.mkdir(output_path) log.info(f"create file {pkg_path}") with zipfile.ZipFile(pkg_path, 'w', zipfile.ZIP_DEFLATED) as zf: for root, dirs, files in os.walk(d.name): for file in files: zf.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), d.name)) # execute postbuild step if os.path.exists(os.path.expanduser(os.path.join(path, 'pkg', 'postbuild'))): log.info("execute postbuild script") subprocess.check_call([os.path.join(os.path.expanduser(path), 'pkg', 'postbuild')]) return pkg_path
def cli(ctx, target, version, **kwargs): global config global project global target_name global default_target ctx.config = config ctx.project = project ctx.target = default_target ctx.target_name = target_name if target is not None: if target in project.target: ctx.target = project.target[target] ctx.target_name = target else: print(f"{target}: target not defined in project", file=sys.stderr) exit(1) log.info(f"current target: {target}")
def search(ctx, name, **kwargs): log.info(f"search {name}") # load configuration config = ctx.parent.config # build repositories list repositories = load_repositories(config, prefix) packages = [] for r in repositories: res = r.search(name) if len(res)>0: for pkg in res: packages.append(res[pkg]) if len(packages)==0: print(f'No matches found for "{name}"', file=sys.stderr) return column_repo = ['repository'] column_package = ['package'] column_version = ['version'] column_description = ['description'] # for package in packages: column_repo.append(package.repository.name) column_package.append(package.name) column_version.append(str(package.version)) column_description.append(package.description) max_repo = len(max(column_repo, key=len)) max_package = len(max(column_package, key=len)) max_version = len(max(column_version, key=len)) max_description = len(max(column_description, key=len)) for i in range(len(column_repo)): print(f"{column_repo[i].ljust(max_repo, ' ')} | ", end='') print(f"{column_package[i].ljust(max_package)} | ", end='') print(f"{column_version[i].ljust(max_version)} | ", end='') print(f"{column_description[i].ljust(max_version)}", end='') print()
def clean(ctx, **kwargs): log.info(f"clean") # load configuration config = ctx.config # load profile profile = ctx.profile # set code build path output_path = 'output' # load project project = ctx.project if project is None: print(f"no project found", sys.stderr) exit(1) # load dependencies try: dependencies = load_project_packages(project) log.debug(f"dependencies: {dependencies}") except Exception as e: print_stack() print(f"{e}", file=sys.stderr) clean_exit() # get arch from profile arch, arch_package = get_arch(profile, project, dependencies) # set build path if arch.mpu is None: build_path = os.path.join(output_path, arch.cpu) else: build_path = os.path.join(output_path, arch.mpu) log.info(f"clean path: {build_path}") if os.path.exists(build_path) == True: shutil.rmtree(build_path) print(f"cleaned")
def _load_installed_package(path): # TODO find a way to add line to error message file = os.path.expanduser(os.path.join(path, 'army.toml')) if os.path.exists(file) == False: raise PackageException(f"{file}: file not found") content = {} try: log.info(f"load installed package '{file}'") content = toml.load(file) log.debug(f"content: {content}") except Exception as e: print_stack() log.debug(e) raise PackageException(f"{format(e)}") project = InstalledPackage(data=content, path=path) project.check() return project
def update(ctx, **kwargs): log.info(f"update") config = ctx.parent.config # build repositories list repositories = load_repositories(config, prefix) if len(repositories)==0: print("no repository configured") return for r in repositories: print(f"update {r.name}") try: r.update() except Exception as e: print_stack() log.debug(f"{type(e)} {e}") print(f"{r.name}: {e}", file=sys.stderr) print("updated")
def load(self): # load project file # TODO find a way to add line to error message file = os.path.expanduser( os.path.join(prefix or "", self.uri, 'army.toml')) if os.path.exists(file) == False: raise LocalGitRepositoryException(f"{file}: file not found") content = {} try: log.info(f"Load git repository '{file}'") content = toml.load(file) log.debug(f"content: {content}") except Exception as e: print_stack() log.debug(e) raise LocalGitRepositoryException(f"{file}: {format(e)}") self._project = RepositoryPackage(data=content, repository=self) self._project.check()
def login(ctx, name, token, **kwargs): log.info(f"login {name}") config = ctx.parent.config # build repositories list repositories = load_repositories(config, prefix) repo = None for repository in repositories: if repository.name == name: repo = repository if repo is None: print(f"{name}: repository not found", file=sys.stderr) exit(1) if token == True: token = getpass.getpass(prompt='token: ', stream=None) try: repo.login(token=token) except Exception as e: print_stack() log.debug(e) print(f"{name}: {e}", file=sys.stderr) exit(1) else: user = input("login: "******"{name}: {e}", file=sys.stderr) exit(1) print("logged in")
def uninstall(ctx, name, **kwargs): log.info(f"uninstall {name} {kwargs}") config = ctx.parent.config if len(name) == 0: print("nothing to uninstall", file=sys.stderr) exit(1) # build repositories list repositories = load_repositories(config, prefix) packages = [] for package in name: pkg = load_installed_package(package, prefix=prefix) if pkg is None: print(f"{package}: package not installed", file=sys.stderr) exit(1) packages.append(pkg) for package in packages: package.uninstall()
def load_configuration_repository_file(path, parent=None): # TODO find a way to add line to error message file = os.path.expanduser(path) if os.path.exists(file) == False: raise ConfigException(f"{file}: file not found") config = {} try: log.info(f"Load config '{path}'") config = toml.load(file) log.debug(f"content: {config}") except Exception as e: print_stack() log.debug(e) raise ConfigException(f"{format(e)}") try: res = ArmyConfigRepository(value=config, parent=parent) except Exception as e: print_stack() log.debug(e) raise ConfigException(f"{format(e)}") return res
def load(self): # Download package try: uri, org = self.repository._decompose_uri() g = github.Github(self.repository._user, self.repository._password) except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GithubRepositoryException(f"{e}") try: organization = g.get_organization(org) except UnknownObjectException as e: print_stack() log.debug(f"{type(e)} {e}") raise GithubRepositoryException(f"{org}: not found") except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GithubRepositoryException(f"{e}") try: # get repo repo = organization.get_repo(self.name) # locate release release = None for r in repo.get_releases(): if r.title == f"{self.name}-{self.version}": release = r break except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GithubRepositoryException(f"{e}") if release is None: raise GithubRepositoryException( f"{self.name}-{self.version}: no release found from github repository" ) try: # search zip file asset = None for a in release.get_assets(): if a.name == f"{self.name}-{self.version}.zip": asset = a except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GithubRepositoryException(f"{e}") if asset is None: raise GithubRepositoryException( f"{self.name}-{self.version}.zip: not found from github repository" ) try: # download unzip package tmpd = tempfile.mkdtemp() tmpf = tempfile.mktemp() # TODO: add credentials for private repository r = requests.get(asset.browser_download_url, allow_redirects=True) with open(tmpf, mode="wb") as f: f.write(r.content) file = zipfile.ZipFile(tmpf) file.extractall(path=tmpd, members=file.namelist()) self._source_path = tmpd except Exception as e: os.remove(tmpf) print_stack() log.debug(f"{type(e)} {e}") raise GithubRepositoryException(f"{e}") # load army.toml try: file = os.path.join(tmpd, "army.toml") log.info(f"Load package '{file}'") self._data = toml.load(file) log.debug(f"content: {self._data}") except toml.decoder.TomlDecodeError as e: os.remove(tmpf) print_stack() log.debug(f"{type(e)} {e}") raise GithubRepositoryException(f"{e}") os.remove(tmpf)
def load(self): # Download package try: uri, groupuri = self.repository._decompose_uri() g = gitlab.Gitlab(uri, private_token=self.repository._token) g.auth() except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{e}") try: group = g.groups.get(groupuri) project = None for p in group.projects.list(all=True): if p.name==self.name: project = g.projects.get(p.id) break if project is None: raise GitlabRepositoryException(f"{self.name}: project not found inside repository {self.repository.name}") except gitlab.exceptions.GitlabGetError as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{self.name}: project not found inside repository {self.repository.name}") except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{e}") try: # locate release release = None for r in project.releases.list(): if r.tag_name==f"v{self.version}": release = r break except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{e}") if release is None: raise GitlabRepositoryException(f"{self.name}-{self.version}: no release found from github repository") try: # search zip file asset = None for link in release.assets['links']: if link['name']==f"{self.name}-{self.version}.zip": asset = link except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{e}") if asset is None: raise GitlabRepositoryException(f"{self.name}-{self.version}.zip: not found from github repository") try: # download unzip package tmpd = tempfile.mkdtemp() tmpf = tempfile.mktemp() # TODO: need this bug to be corrected to make this working # https://gitlab.com/gitlab-org/gitlab/-/issues/28978 headers = {'Private-Token': self.repository._token} r = requests.get(asset['url'], headers=headers, allow_redirects=True) with open(tmpf, mode="wb") as f: f.write(r.content) file = zipfile.ZipFile(tmpf) file.extractall(path=tmpd, members=file.namelist()) self._source_path = tmpd except Exception as e: os.remove(tmpf) print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{e}") # load army.toml try: file = os.path.join(tmpd, "army.toml") log.info(f"Load package '{file}'") self._data = toml.load(file) log.debug(f"content: {self._data}") except toml.decoder.TomlDecodeError as e: os.remove(tmpf) print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{e}") os.remove(tmpf)
def publish(self, package, file, overwrite=False): try: uri, groupuri = self._decompose_uri() g = gitlab.Gitlab(uri, private_token=self._token) g.auth() except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{e}") try: group = g.groups.get(groupuri) project = None for p in group.projects.list(all=True): if p.name==package.name: project = g.projects.get(p.id) break if project is None: raise GitlabRepositoryException(f"{package.name}: project not found inside repository {self._name}") except gitlab.exceptions.GitlabGetError as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{package.name}: project not found inside repository {self._name}") except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{e}") release = None tag = None try: # check if release already exists for r in project.releases.list(): if r.tag_name==f"v{package.version}": release = r # check if tag already exists for t in project.tags.list(): if t.name==f"v{package.version}": tag = t except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{package.name}: {e}") # remove release if release is not None: if overwrite==True: log.info(f"remove release v{package.version}") try: project.releases.delete(f"v{package.version}") except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{package.name}: {e}") else: raise GitlabRepositoryException(f"release v{package.version} already exists") # remove release if tag is not None: if overwrite==True: log.info(f"remove tag v{package.version}") try: project.tags.delete(f"v{package.version}") except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{package.name}: {e}") else: raise GitlabRepositoryException(f"tag v{package.version} already exists") try: # create tag tag = project.tags.create({'tag_name': f"v{package.version}", 'ref': 'master'}) # create release release = project.releases.create({'name': f"{package.name}-{package.version}", 'tag_name': f"v{package.version}", 'description': ''}) asset = project.upload(f"{package.name}-{package.version}.zip", filepath=file) url = f"{self._uri}/{project.name}{asset['url']}" release.add_link(name=f"{package.name}-{package.version}.zip", url=url, type='package') except Exception as e: print_stack() log.debug(f"{type(e)} {e}") raise GitlabRepositoryException(f"{package.name}: {e}")
def add_cmake_files(build_path, dependencies, arch, arch_package, target): global tools_path # # build list of includes # includes = get_cmake_target_includes(target) # includes += get_cmake_includes(dependencies) # copy army.cmake try: shutil.copy( os.path.join(os.path.expanduser(tools_path), "cmake", "army.cmake"), build_path) except Exception as e: print_stack() log.error(f"{e}") exit(1) with open(os.path.join(build_path, "army.cmake"), "a") as fa: print("\n# dependencies section definition", file=fa) with open(os.path.join(build_path, "dependencies.cmake"), "w") as fd: # add target print("\n# target definition", file=fa) if target is not None: # add pre definitions if 'pre' in target: for pre in target['pre']: print(f'include_army_package_file(_ {pre})', file=fd) # add definition print(f'include_army_package_file(_ {target["definition"]})', file=fd) # add post definitions if 'post' in target: for post in target['post']: print(f'include_army_package_file(_ {post})', file=fd) for dependency in dependencies: if 'cmake' in dependency.definition: print(f'set({dependency.name}_path "{dependency.path}")', file=fa) print( f'set({dependency.name}_definition "{os.path.join(dependency.path, dependency.definition["cmake"])}")', file=fa) print(f"include_army_package({dependency.name})", file=fd) os.putenv(f"package_{dependency.name}_path", dependency.path) os.putenv(f"package_{dependency.name}_definition", dependency.definition["cmake"]) log.info(f"Adding dependency: {dependency}") # add arch print("\n# arch definition", file=fa) if arch.mpu_definition is not None: if arch_package is None: print( f'include_army_package_file(_ {arch.mpu_definition})', file=fd) else: os.putenv(f"package_{arch_package.name}_path", arch_package.path) print( f'include_army_package_file({arch_package.name} {arch.mpu_definition})', file=fd)
def compile(ctx, debug, instrument, jobs, **kwargs): log.info(f"compile") # load configuration config = ctx.config # load profile profile = ctx.profile # load project project = ctx.project if project is None: print(f"no project found", sys.stderr) exit(1) cmake_opts = [] make_opts = [] # set code build path output_path = 'output' # set home directory cmake_opts.append("-H.") # load dependencies try: dependencies = load_project_packages(project) log.debug(f"dependencies: {dependencies}") except Exception as e: print_stack() print(f"{e}", file=sys.stderr) clean_exit() # add toolchain try: toolchain_name = profile.data["/tools/toolchain/name"] toolchain_definition = profile.data["/tools/toolchain/definition"] toolchain_path = profile.data["/tools/toolchain/path"] cmake_opts.append(f"-DCMAKE_TOOLCHAIN_FILE='{toolchain_definition}'") except Exception as e: print_stack() log.error(e) print("No toolchain definition provided by profile", file=sys.stderr) exit(1) # get arch from profile arch, arch_package = get_arch(profile, project, dependencies) # get target from profile target = get_target(profile) if debug == True and instrument == True: print(f"debug and instrument can not be used simultaneously", file=sys.stderr) exit(1) if debug == True: cmake_opts.append("-DCMAKE_BUILD_TYPE=Debug") elif instrument == True: cmake_opts.append("-DCMAKE_BUILD_TYPE=RelWithDebInfo") else: cmake_opts.append("-DCMAKE_BUILD_TYPE=Release") if get_log_level() != "fatal": cmake_opts.append("-DCMAKE_VERBOSE_MAKEFILE=ON") else: cmake_opts.append("-DCMAKE_VERBOSE_MAKEFILE=OFF") cmake_opts.append("-DCMAKE_COLOR_MAKEFILE=ON") # Suppress developer warnings. Suppress warnings that are meant for the author of the CMakeLists.txt files cmake_opts.append("-Wno-dev") # search for toolchain binaries locate_tools(profile) # set build path if arch.mpu is None: build_path = os.path.join(output_path, arch.cpu) print(f"Build using toolchain {toolchain_name} for arch {arch.cpu}") else: build_path = os.path.join(output_path, arch.mpu) print(f"Build using toolchain {toolchain_name} for mpu {arch.mpu}") log.info(f"build_path: {build_path}") cmake_opts.append(f"-B{build_path}") # for ccache os.putenv("CCACHE_LOGFILE", os.path.abspath(os.path.join(build_path, "ccache.log"))) # add path os.putenv("tools_path", os.path.abspath(tools_path)) os.putenv("toolchain_path", os.path.abspath(toolchain_path)) os.putenv("project_path", os.path.abspath(os.getcwd())) os.putenv("c_path", profile.data['/tools/c/path']) os.putenv("cxx_path", profile.data['/tools/c++/path']) os.putenv("asm_path", profile.data['/tools/asm/path']) os.putenv("ar_path", profile.data['/tools/ar/path']) os.putenv("ld_path", profile.data['/tools/ld/path']) os.putenv("objcopy_path", profile.data['/tools/objcopy/path']) os.putenv("objdump_path", profile.data['/tools/objdump/path']) os.putenv("size_path", profile.data['/tools/size/path']) os.putenv("nm_path", profile.data['/tools/nm/path']) # add arch vars os.putenv("cpu", arch.cpu) os.putenv("mpu", arch.mpu) if arch_package is None: os.putenv("arch_package", "_") else: os.putenv("arch_package", arch_package.name) os.putenv("arch_path", arch.cpu_definition) try: log.info(f"cmake options: {' '.join(cmake_opts)}") # # create output folder os.makedirs(build_path, exist_ok=True) # add smake files add_cmake_files(build_path, dependencies, arch, arch_package, target) # TODO force rebuild elf file even if not changed # find ${PROJECT_PATH}/output -name "*.elf" -exec rm -f {} \; 2>/dev/null if get_log_level() == 'debug': os.system("env") SystemExit(_program('cmake', ['--version'])) # generate cmake files res = SystemExit(_program('cmake', cmake_opts)) if res.code > 0: log.error(f"Build failed") exit(1) except Exception as e: print_stack() log.error(f"{e}") clean_exit() make_opts.append(f"-j{jobs}") # # enable color output # os.putenv("GCC_COLORS", 'error=01;31:warning=01;35:note=01;36:caret=01;32:locus=01:quote=01') cwd = os.getcwd() try: log.info(f"make options: {' '.join(make_opts)}") # build now os.chdir(build_path) subprocess.check_call(['make'] + make_opts) except Exception as e: print_stack() log.error(f"{e}") os.chdir(cwd) clean_exit() os.chdir(cwd)
def flash(ctx, timeout, **kwargs): log.info(f"flash") # load configuration config = ctx.config # load profile profile = ctx.profile # load project project = ctx.project if project is None: print(f"no project found", sys.stderr) exit(1) # load dependencies try: dependencies = load_project_packages(project) log.debug(f"dependencies: {dependencies}") except Exception as e: print_stack() print(f"{e}", file=sys.stderr) clean_exit() # get arch from profile arch, arch_package = get_arch(profile, project, dependencies) # get target from profile target = get_target(profile) if arch.mpu is None: print("Missing mpu informations from arch", file=sys.stderr) exit(1) # set code build path output_path = 'output' build_path = os.path.join(output_path, arch.mpu) log.info(f"build_path: {build_path}") device = arch.mpu if device.startswith("ATSAMD"): device = device.replace("ATSAMD", "SAMD") log.info(f"Flash {device} with JLink") # hex_file = os.path.join(build_path, "bin/firmware.hex") binfile = os.path.join(build_path, "bin/firmware.bin") # jlinkexe = locate_jlink(profile) log.debug(f"jlink path: {jlinkexe}") # TODO: en cas d'immpossibilité de programmation il y a probablement une mauvaise configuration du proc # voir http://forum.segger.com/index.php?page=Thread&postID=11854, avec Ozone changer la zone mémoire 00804000 # 0x00804000 contains the calibration data AUX0–NVM User # 0x00804000 = FF C7 E0 D8 5D FC FF FF FF FF FF FF FF FF FF FF # [[ $opt_erase -ne 0 ]] && erase=erase # [[ $opt_erase -ne 0 ]] || erase=r if os.path.exists('/etc/udev/rules.d/99-jlink.rules') == False: print( f"Can not execute jlink with current user, add '{os.path.join(tools_path, 'jlink/99-jlink.rules')}' inside '/etc/udev/rules.d/'", file=sys.stderr) exit(1) try: # commandline = [ # f"{os.path.join(tools_path, jlinkexe)}", # "-device", f"at{device}", # "-if", "swd", # "-speed", "12000" # ] # log.info(" ".join(commandline)) # p = Popen(commandline, stdout=PIPE, stdin=PIPE, stderr=PIPE) # # stdout_data = p.communicate(input=b'data_to_write')[0] # # print(stdout_data.decode('utf-8')) # commands = [ # "connect", # "r", # # {erase} # f"loadfile {hex_file}", # "exit" # ] # for command in commands: # p.stdin.write(f"{command}\n".encode('utf-8')) # p.stdin.flush() # line = p.stdout.readline() # while line: # print(line.decode('utf-8'), end='') # line = p.stdout.readline() # # p.stdin.close() # p.terminate() # p.wait(timeout=0.2) jlink = JLink(f"AT{device}") jlink.open(timeout) jlink.connect(timeout) jlink.erase(timeout) jlink.flash_file( '/home/seb/git/bootloader/output/SAMD21G18A/bin/firmware.bin', power_on=True, timeout=timeout) jlink.reset(timeout, halt=False) except Exception as e: print_stack() print(f"{e}", file=sys.stderr) exit(1)
def install(ctx, name, link, reinstall, **kwargs): log.info(f"install {name} {kwargs}") _global = kwargs['global'] # not in parameters due to conflict with global keywoard # load configuration config = ctx.parent.config # load project project = ctx.parent.project if project is None: log.info(f"no project loaded") if len(name)==0 and project is None: print("nothing to install", file=sys.stderr) exit(1) # build repositories list repositories = load_repositories(config, prefix) for repository in repositories: if repository.load_credentials()==False: print(f"{repository.name}: warning: load credentials failed, update may fail due to rate limitation", file=sys.stderr) packages = [] if len(name)==0: if project is None: log.error(f"{os.getcwd()}: army.toml not found") exit(1) # get target config target = ctx.parent.target # if target is None: # print(f"no target specified", file=sys.stderr) # exit(1) for package in project.dependencies: pkg, repo = _find_package(package, project.dependencies[package], repositories, priority_dev=link) packages.append(PackageDependency(package=pkg, repository=repo)) if target is not None: for package in target.dependencies: pkg, repo = _find_package(package, target.dependencies[package], repositories, priority_dev=link) packages.append(PackageDependency(package=pkg, repository=repo)) for plugin in project.plugins: pkg, repo = _find_package(plugin, project.plugins[plugin], repositories, plugin=True, priority_dev=link) packages.append(PackageDependency(package=pkg, repository=repo)) if target is not None: for plugin in target.plugins: pkg, repo = _find_package(plugin, target.plugins[plugin], repositories, plugin=True, priority_dev=link) packages.append(PackageDependency(package=pkg, repository=repo)) else: for package in name: if '@' in package: chunks = package.split('@') if len(chunks)==3: package = f"{chunks[0]}@{chunks[1]}" version = chunks[2] elif len(chunks)==2: try: # check if version is valid test_version = VersionRange(chunks[1], ["0.0.0"]) package, version = chunks except: version = 'latest' else: print(f"{package}: naming error", file=sys.stderr) exit(1) else: version = 'latest' pkg, repo = _find_package(package, version, repositories, priority_dev=link) packages.append(PackageDependency(package=pkg, repository=repo)) # locate install folder if _global: path = os.path.join(prefix or "", "~/.army/dist/") else: path = "dist" force = False if reinstall: force = True dependencies = [] while(len(packages)>0): # get dependencies from top level package to end level package_dep = packages.pop(0) package = package_dep.package # dependency treated ok, append to list dependencies.append(package_dep) # append dependencies to list for dependency in package.dependencies: pkg, repo = _find_package(dependency, package.dependencies[dependency], repositories, priority_dev=link) dep_pkg = PackageDependency(package=pkg, repository=repo, from_package=package) packages.append(dep_pkg) # append plugins to list for plugin in package.plugins: pkg, repo = _find_package(plugin, package.plugins[plugin], repositories, priority_dev=link) dep_pkg = PackageDependency(package=pkg, repository=repo, from_package=package) packages.append(dep_pkg) # treat dependencies first dependencies.reverse() log.debug(f"packages: {dependencies}") # TODO checks _check_dependency_version_conflict(dependencies) _check_installed_version_conflict(dependencies) # clean dependency duplicates to avoid installing several times same package dependencies = _remove_duplicates(dependencies) # install for dependency in dependencies: install = False installed_package = load_installed_package(dependency.package.name, prefix=prefix) if installed_package: if force==True: print(f"reinstall {dependency.package}") install = True else: print(f"package {dependency.package} already installed", file=sys.stderr) install = False else: install = True print(f"install package {dependency.package}") if install==True: if link==True and dependency.repository.DEV==False: print(f"{dependency.package.name}: repository is not local, link not applied", file=sys.stderr) if dependency.repository.DEV==True: dependency.package.install(path=os.path.join(path, dependency.package.name), link=link) else: # link mode is only possible with repository DEV dependency.package.install(path=os.path.join(path, dependency.package.name), link=False)
def debug(ctx, **kwargs): log.info(f"debug") # load configuration config = ctx.config # load profile profile = ctx.profile # load project project = ctx.project if project is None: print(f"no project found", sys.stderr) exit(1) # load dependencies try: dependencies = load_project_packages(project) log.debug(f"dependencies: {dependencies}") except Exception as e: print_stack() print(f"{e}", file=sys.stderr) clean_exit() # get arch from profile arch, arch_package = get_arch(profile, project, dependencies) # get target from profile target = get_target(profile) if arch.mpu is None: print("Missing mpu informations from arch", file=sys.stderr) exit(1) # set code build path output_path = 'output' build_path = os.path.join(output_path, arch.mpu) log.info(f"build_path: {build_path}") device = arch.mpu if device.startswith("ATSAMD"): device = device.replace("ATSAMD", "SAMD") log.info(f"Debug {device} with Ozone") # hex_file = os.path.join(build_path, "bin/firmware.hex") binfile = os.path.join(build_path, "bin/firmware.bin") # ozoneexe = locate_ozone(profile) log.debug(f"ozone path: {ozoneexe}") try: commandline = [ f"{os.path.join(tools_path, ozoneexe)}", ] # add CMakeLists.txt if add_project_file(arch): commandline += ['-project', f'project.jdebug'] commandline += ['&'] log.info(" ".join(commandline)) #subprocess.check_call(commandline) os.system(" ".join(commandline)) except Exception as e: print_stack() log.error(f"{e}") exit(1)
def console(ctx, tty, baud, echo, detach, **kwargs): log.info(f"console") global tools_path print("Use ctrl-a to send content to serial") opts = [] if echo == True: opts.append("-c") picocom_command = [] picocom = shutil.which("picocom") if picocom is None: print( f"picocom: not found, you can install it with 'sudo apt-get install picocom'" ) return try: picocom_command += [ "picocom", f"/dev/{tty}", "-b", f"{baud}", "-l", "--imap=lfcrlf", "--omap=crlf", "--escape=a" ] picocom_command += opts if detach == True: # xterm = shutil.which("xterm") # if xterm is None: # print(f"xterm: not found, you can install it with 'sudo apt-get install xterm'") # return # command += [ # "nohup", # "xterm", # "-j", # "-rightbar", # "-sb", # "-si", # "-sk", # "-sl", "99999", # "-e" # ] terminator = shutil.which("terminator") if terminator is None: print( f"terminator: not found, you can install it with 'sudo apt-get install terminator'" ) return command = [ "terminator", "--no-dbus", "--command", ' '.join([f'{tools_path}/plugin/daemon.sh'] + picocom_command), ] log.debug(" ".join(command)) subprocess.Popen(command, start_new_session=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) sleep(1) else: log.debug(" ".join(picocom_command)) subprocess.check_call(picocom_command) except Exception as e: print_stack() log.error(f"{e}") exit(1)
def main(): global prefix global config global project global default_target global target_name try: # cli_init will initialize the logger only, everything else is ignored at this point # we need to load the plugins before showing any help cli_init() except: pass global premature_exit if premature_exit: exit(1) # load army configuration files prefix = os.getenv('ARMY_PREFIX', None) if prefix is not None: log.debug(f"using {prefix} as path prefix") try: config = load_configuration(parent=root_config, prefix=prefix) except Exception as e: print_stack() print(f"{e}", file=sys.stderr) exit(1) # load internal plugins import army.plugin.repository import army.plugin.dependency import army.plugin.package # import army.plugin.build # load plugins # TODO load plugins from installed packages if os.path.exists('army.toml'): try: project = load_project() except Exception as e: print_stack() print(f"army.toml: {e}", file=sys.stderr) exit(1) # load default target if exists if project is not None: # get target config default_target = None if target_name is None and project.default_target: target_name = project.default_target if target_name is not None: if target_name in project.target: default_target = project.target[target_name] else: print(f"{target_name}: target not defined in project", file=sys.stderr) exit(1) log.info(f"current target: {target_name}") if project is not None: # load plugins at project level for plugin in project.plugins: plugin_config = None # search for plugin configuration in project if plugin in project.plugin: plugin_config = project.plugin[plugin] # search plugin configuration in target if plugin in default_target.plugin: plugin_config = default_target.plugin[plugin] try: load_plugin(plugin, config, plugin_config) except Exception as e: print_stack() print(f"{e}") if default_target is not None: # load plugins at target level for plugin in default_target.plugins: plugin_config = None # search plugin configuration in target if plugin in default_target.plugin: plugin_config = default_target.plugin[plugin] try: load_plugin(plugin, config, plugin_config) except Exception as e: print_stack() print(f"{e}") # parse command line cli()