Пример #1
0
def rtt_console(ctx, speed, detach, viewer, **kwargs):
    # def rtt_console(ctx, tty, baud, echo, detach, **kwargs):
    log.info(f"rtt-console")

    opts = []
    if echo == True:
        opts.append("-c")

    command = []

    jlinkexe = locate_jlink()
    log.debug(f"jlink path: {jlinkexe}")

    try:
        command += [
            "picocom", f"/dev/{tty}", "-b", f"{baud}", "-l", "--imap=lfcrlf",
            "--omap=crlf", "--escape=a"
        ]

        command += opts

        if detach == True:
            command += ["&"]

        # TODO add check picocom is installed
        subprocess.check_call(command)
    except Exception as e:
        print_stack()
        log.error(f"{e}")
Пример #2
0
    def update(self):
        logged = self.load_credentials()
        if logged==False:
            print(f"{self.name}: warning: load credentials failed, update may be incomplete when using private repository", file=sys.stderr)
        try:
            uri, groupuri = self._decompose_uri()
            if logged:
                # no need to login for public repo but needed for private repo
                g = gitlab.Gitlab(uri, private_token=self._token)
                g.auth()
            else:
                g = gitlab.Gitlab(uri)
        except Exception as e:
            print_stack()
            log.debug(f"{type(e)} {e}")
            raise GitlabRepositoryException(f"{e}")

        try:
            group = None
            for ig in g.groups.list():
                if ig.name==groupuri:
                    group = ig
        except gitlab.exceptions.GitlabGetError as e:
            print_stack()
            log.debug(f"{type(e)} {e}")
            raise GitlabRepositoryException(f"{groupuri}: group not found")
        except Exception as e:
            print_stack()
            log.debug(f"{type(e)} {e}")
            raise GitlabRepositoryException(f"{e}")
        if group is None:
            raise GitlabRepositoryException(f"{groupuri}: group not found")

        try:
            # get versions
            projects = {}
            for p in g.projects.list():
                projects[p.name] = p
        except Exception as e:
            print_stack()
            log.debug(f"{type(e)} {e}")
            raise GitlabRepositoryException(f"{e}")
        
        try:
            for p in group.projects.list(all=True):
                log.debug(f"update repo {p.name}")
                if p.name not in projects:
                    log.error(f"{p.name}: update failed")
                    continue # TODO raise error?
                project = projects[p.name]
                for release in project.releases.list():
                    if release.tag_name.startswith("v"):
                        self._index_package(project.name, release.tag_name[1:], project.description)
        except Exception as e:
            print_stack()
            log.debug(f"{type(e)} {e}")
            raise GitlabRepositoryException(f"{e}")
    
        super(GitlabRepository, self).update()
Пример #3
0
def locate_jlink():
    global tools_path

    # search for jlink folder
    jlink_path = os.path.join('jlink', 'JLinkExe')
    if os.path.exists(os.path.join(tools_path, jlink_path)) == False:
        log.error(
            f"jlink was not found inside '{tools_path}', check plugin installation"
        )
        exit(1)

    return jlink_path
Пример #4
0
    def _search_dir(path):
        if os.path.exists(os.path.expanduser(path)) == False:
            return []

        res = []
        for package in os.listdir(os.path.expanduser(path)):
            try:
                pkg = _load_installed_package(os.path.join(path, package))
                res.append(pkg)
            except Exception as e:
                print_stack()
                log.debug(e)
                log.error(
                    f"{os.path.join(path, package)}: not a valid package")

        return res
Пример #5
0
    def _search_dir(path, version_range):
        if os.path.exists(os.path.expanduser(path)) == False:
            return None

        for package in os.listdir(os.path.expanduser(path)):
            if name == package:
                try:
                    pkg = _load_installed_package(os.path.join(path, package))
                    if version_range is None:
                        return pkg
                    else:
                        version_range = VersionRange(version_range,
                                                     [pkg.version])
                        if version_range.match(Version(pkg.version)):
                            return pkg
                except Exception as e:
                    print_stack()
                    log.debug(e)
                    log.error(
                        f"{os.path.join(path, package)}: not a valid package")

        return None
Пример #6
0
def get_arch(profile, project, dependencies):
    # add arch
    try:
        arch = profile.data["/arch"]
        arch_name = profile.data["/arch/name"]
    except Exception as e:
        print_stack()
        log.error(e)
        print("No arch definition provided by profile", file=sys.stderr)
        exit(1)

    if 'name' not in arch:
        print("Arch name missing", file=sys.stderr)
        exit(1)

    package = None
    res_package = None
    if 'package' in arch:
        if 'version' in arch:
            package_version = arch['version']
        else:
            package_version = 'latest'
        package_name = arch['package']
        package = load_installed_package(package_name, package_version)
        res_package = package

    if package is None:
        package = project

    # search arch in found package
    archs = package.archs
    arch = next(arch for arch in archs if arch.name == arch_name)
    if arch is None:
        print(f"Arch {arch_name} not found in {package}", file=sys.stderr)
        exit(1)

    return arch, res_package
Пример #7
0
def locate_tools(profile):
    tools = {
        "c": "c compiler",
        "c++": "c++ compiler",
        "asm": "assembler compiler",
        "ld": "linker",
        "ar": "archive tool",
        "objcopy": "object file copy tool",
        "objdump": "object file content dump tool",
        "size": "object file size tool",
        "nm": "symbols export tool",
    }
    for tool, name in tools.items():
        try:
            tool_path = profile.data[f"/tools/{tool}/path"]
            if os.path.exists(tool_path) == False:
                print(f"{tool_path}: path not found for {name}",
                      file=sys.stderr)
                exit(1)
        except Exception as e:
            print_stack()
            log.error(e)
            print(f"No {name} defined", file=sys.stderr)
            exit(1)
Пример #8
0
def console(ctx, tty, baud, echo, detach, **kwargs):
    log.info(f"console")

    global tools_path

    print("Use ctrl-a to send content to serial")

    opts = []
    if echo == True:
        opts.append("-c")

    picocom_command = []

    picocom = shutil.which("picocom")
    if picocom is None:
        print(
            f"picocom: not found, you can install it with 'sudo apt-get install picocom'"
        )
        return

    try:
        picocom_command += [
            "picocom", f"/dev/{tty}", "-b", f"{baud}", "-l", "--imap=lfcrlf",
            "--omap=crlf", "--escape=a"
        ]

        picocom_command += opts

        if detach == True:
            #             xterm = shutil.which("xterm")
            #             if xterm is None:
            #                 print(f"xterm: not found, you can install it with 'sudo apt-get install xterm'")
            #                 return
            #             command += [
            #                 "nohup",
            #                 "xterm",
            #                 "-j",
            #                 "-rightbar",
            #                 "-sb",
            #                 "-si",
            #                 "-sk",
            #                 "-sl", "99999",
            #                 "-e"
            #             ]

            terminator = shutil.which("terminator")
            if terminator is None:
                print(
                    f"terminator: not found, you can install it with 'sudo apt-get install terminator'"
                )
                return
            command = [
                "terminator",
                "--no-dbus",
                "--command",
                ' '.join([f'{tools_path}/plugin/daemon.sh'] + picocom_command),
            ]

            log.debug(" ".join(command))
            subprocess.Popen(command,
                             start_new_session=True,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT)
            sleep(1)
        else:
            log.debug(" ".join(picocom_command))
            subprocess.check_call(picocom_command)

    except Exception as e:
        print_stack()
        log.error(f"{e}")
        exit(1)
Пример #9
0
def debug(ctx, **kwargs):
    log.info(f"debug")

    # load configuration
    config = ctx.config

    # load profile
    profile = ctx.profile

    # load project
    project = ctx.project
    if project is None:
        print(f"no project found", sys.stderr)
        exit(1)

    # load dependencies
    try:
        dependencies = load_project_packages(project)
        log.debug(f"dependencies: {dependencies}")
    except Exception as e:
        print_stack()
        print(f"{e}", file=sys.stderr)
        clean_exit()

    # get arch from profile
    arch, arch_package = get_arch(profile, project, dependencies)

    # get target from profile
    target = get_target(profile)

    if arch.mpu is None:
        print("Missing mpu informations from arch", file=sys.stderr)
        exit(1)

    # set code build path
    output_path = 'output'
    build_path = os.path.join(output_path, arch.mpu)
    log.info(f"build_path: {build_path}")

    device = arch.mpu
    if device.startswith("ATSAMD"):
        device = device.replace("ATSAMD", "SAMD")

    log.info(f"Debug {device} with Ozone")
    #
    hex_file = os.path.join(build_path, "bin/firmware.hex")
    binfile = os.path.join(build_path, "bin/firmware.bin")
    #
    ozoneexe = locate_ozone(profile)
    log.debug(f"ozone path: {ozoneexe}")

    try:
        commandline = [
            f"{os.path.join(tools_path, ozoneexe)}",
        ]

        # add CMakeLists.txt
        if add_project_file(arch):
            commandline += ['-project', f'project.jdebug']

        commandline += ['&']
        log.info(" ".join(commandline))
        #subprocess.check_call(commandline)
        os.system(" ".join(commandline))
    except Exception as e:
        print_stack()
        log.error(f"{e}")
        exit(1)
Пример #10
0
def add_project_file(arch):
    global tools_path

    project_load = [
        f'Project.AddPathSubstitute ("{os.path.abspath(os.getcwd())}", "$(ProjectDir)");',
    ]

    cpu = arch.cpu
    peripheral = arch.mpu

    cpu_map = {'cortex-m0plus': "Cortex-M0+", 'cortex-m0': "Cortex-M0"}
    cpu_svd = {'cortex-m0plus': "Cortex-M0", 'cortex-m0': "Cortex-M0"}
    cpu_freertos = {'cortex-m0': "CM0", 'cortex-m0plus': "CM0"}
    device_svd = {}

    #     if cpu not in cpu_map:
    #         print(f"No correspondance defined for device {cpu}", file=sys.stderr)
    #         return False
    if cpu in cpu_map:
        project_load.append(f'Project.SetDevice ("{cpu_map[cpu]}");')
    else:
        project_load.append(f'Project.SetDevice ("{cpu}");')

    project_load.append('Project.SetHostIF ("USB", "");')
    project_load.append('Project.SetTargetIF ("SWD");')
    project_load.append('Project.SetTIFSpeed ("12 MHz");')

    if cpu in cpu_svd:
        project_load.append(
            f'Project.AddSvdFile ("{os.path.abspath(os.path.expanduser(tools_path))}/ozone/Config/CPU/{cpu_svd[cpu]}.svd");'
        )
    else:
        project_load.append(
            f'Project.AddSvdFile ("{os.path.abspath(os.path.expanduser(tools_path))}/ozone/Config/CPU/{cpu}.svd");'
        )

    if cpu in device_svd:
        svd_file = f"{os.path.abspath(os.path.expanduser(tools_path))}/ozone/Config/Peripherals/AT{device_svd[device]}.svd"
    else:
        svd_file = f"{os.path.abspath(os.path.expanduser(tools_path))}/ozone/Config/Peripherals/AT{peripheral}.svd"
    if os.path.exists(svd_file):
        project_load.append(f'Project.AddSvdFile ("{svd_file}");')
    else:
        log.warning(f"No peripherals found for {peripheral}")

    if cpu in cpu_freertos:
        project_load.append(
            f'Project.SetOSPlugin("FreeRTOSPlugin_{cpu_freertos[cpu]}");')
    else:
        log.warning(f"No freertos correspondance defined for device {cpu}")

    project_load.append(
        f'File.Open ("$(ProjectDir)/output/{arch.mpu}/bin/firmware.elf");')

    # write CMakeLists.txt from template
    try:
        loader = template.Loader(os.path.join(os.path.expanduser(tools_path),
                                              'template'),
                                 autoescape=None)
        cmakelists = loader.load("project.jdebug").generate(
            project_load="\n".join(project_load))
        with open("project.jdebug", "w") as f:
            f.write(cmakelists.decode("utf-8"))
    except Exception as e:
        print_stack()
        log.error(f"{e}")
        exit(1)

    return True
Пример #11
0
def install(ctx, name, link, reinstall, **kwargs):
    log.info(f"install {name} {kwargs}")
    
    _global = kwargs['global']  # not in parameters due to conflict with global keywoard
    
    # load configuration
    config = ctx.parent.config
    
    # load project
    project = ctx.parent.project
    if project is None:
        log.info(f"no project loaded")

    if len(name)==0 and project is None:
        print("nothing to install", file=sys.stderr)
        exit(1)
        
    # build repositories list
    repositories = load_repositories(config, prefix)
    
    for repository in repositories:
        if repository.load_credentials()==False:
            print(f"{repository.name}: warning: load credentials failed, update may fail due to rate limitation", file=sys.stderr)
        
    packages = []

    if len(name)==0:
        if project is None:
            log.error(f"{os.getcwd()}: army.toml not found")
            exit(1)

        # get target config
        target = ctx.parent.target
#         if target is None:
#             print(f"no target specified", file=sys.stderr)
#             exit(1)

        for package in project.dependencies:
            pkg, repo = _find_package(package, project.dependencies[package], repositories, priority_dev=link)
            packages.append(PackageDependency(package=pkg, repository=repo))

        if target is not None:
            for package in target.dependencies:
                pkg, repo = _find_package(package, target.dependencies[package], repositories, priority_dev=link)
                packages.append(PackageDependency(package=pkg, repository=repo))
            
        for plugin in project.plugins:
            pkg, repo = _find_package(plugin, project.plugins[plugin], repositories, plugin=True, priority_dev=link)
            packages.append(PackageDependency(package=pkg, repository=repo))
        
        if target is not None:
            for plugin in target.plugins:
                pkg, repo = _find_package(plugin, target.plugins[plugin], repositories, plugin=True, priority_dev=link)
                packages.append(PackageDependency(package=pkg, repository=repo))
    else:
        for package in name:
            if '@' in package:
                chunks = package.split('@')
                if len(chunks)==3:
                    package = f"{chunks[0]}@{chunks[1]}"
                    version = chunks[2]
                elif len(chunks)==2:
                    try:
                        # check if version is valid
                        test_version = VersionRange(chunks[1], ["0.0.0"])
                        package, version = chunks
                    except:
                        version = 'latest'
                else:
                    print(f"{package}: naming error", file=sys.stderr)
                    exit(1)
            else:
                version = 'latest'
            pkg, repo = _find_package(package, version, repositories, priority_dev=link)
            packages.append(PackageDependency(package=pkg, repository=repo))

    # locate install folder
    if _global:
        path = os.path.join(prefix or "", "~/.army/dist/")
    else:
        path = "dist"
    
    force = False
    if reinstall:
        force = True

    dependencies = []
    while(len(packages)>0):
        # get dependencies from top level package to end level
        package_dep = packages.pop(0)
        package = package_dep.package

        # dependency treated ok, append to list
        dependencies.append(package_dep)
        
        # append dependencies to list
        for dependency in package.dependencies:
            pkg, repo = _find_package(dependency, package.dependencies[dependency], repositories, priority_dev=link)
            dep_pkg = PackageDependency(package=pkg, repository=repo, from_package=package)
            packages.append(dep_pkg)

        # append plugins to list
        for plugin in package.plugins:
            pkg, repo = _find_package(plugin, package.plugins[plugin], repositories, priority_dev=link)
            dep_pkg = PackageDependency(package=pkg, repository=repo, from_package=package)
            packages.append(dep_pkg)

    # treat dependencies first
    dependencies.reverse()

    log.debug(f"packages: {dependencies}")
    
    # TODO checks
    _check_dependency_version_conflict(dependencies)
    _check_installed_version_conflict(dependencies)
    
    # clean dependency duplicates to avoid installing several times same package
    dependencies = _remove_duplicates(dependencies)

    # install
    for dependency in dependencies:
        install = False
        installed_package = load_installed_package(dependency.package.name, prefix=prefix)
        if installed_package:
            if force==True:
                print(f"reinstall {dependency.package}")
                install = True
            else:
                print(f"package {dependency.package} already installed", file=sys.stderr)
                install = False
        else:
            install = True
            print(f"install package {dependency.package}")
            
        if install==True:
            if link==True and dependency.repository.DEV==False:
                print(f"{dependency.package.name}: repository is not local, link not applied", file=sys.stderr)
            if dependency.repository.DEV==True:
                dependency.package.install(path=os.path.join(path, dependency.package.name), link=link)
            else:
                # link mode is only possible with repository DEV
                dependency.package.install(path=os.path.join(path, dependency.package.name), link=False)
Пример #12
0
def compile(ctx, debug, instrument, jobs, **kwargs):
    log.info(f"compile")

    # load configuration
    config = ctx.config

    # load profile
    profile = ctx.profile

    # load project
    project = ctx.project
    if project is None:
        print(f"no project found", sys.stderr)
        exit(1)

    cmake_opts = []
    make_opts = []

    # set code build path
    output_path = 'output'

    # set home directory
    cmake_opts.append("-H.")

    # load dependencies
    try:
        dependencies = load_project_packages(project)
        log.debug(f"dependencies: {dependencies}")
    except Exception as e:
        print_stack()
        print(f"{e}", file=sys.stderr)
        clean_exit()

    # add toolchain
    try:
        toolchain_name = profile.data["/tools/toolchain/name"]
        toolchain_definition = profile.data["/tools/toolchain/definition"]
        toolchain_path = profile.data["/tools/toolchain/path"]
        cmake_opts.append(f"-DCMAKE_TOOLCHAIN_FILE='{toolchain_definition}'")
    except Exception as e:
        print_stack()
        log.error(e)
        print("No toolchain definition provided by profile", file=sys.stderr)
        exit(1)

    # get arch from profile
    arch, arch_package = get_arch(profile, project, dependencies)

    # get target from profile
    target = get_target(profile)

    if debug == True and instrument == True:
        print(f"debug and instrument can not be used simultaneously",
              file=sys.stderr)
        exit(1)

    if debug == True:
        cmake_opts.append("-DCMAKE_BUILD_TYPE=Debug")
    elif instrument == True:
        cmake_opts.append("-DCMAKE_BUILD_TYPE=RelWithDebInfo")
    else:
        cmake_opts.append("-DCMAKE_BUILD_TYPE=Release")

    if get_log_level() != "fatal":
        cmake_opts.append("-DCMAKE_VERBOSE_MAKEFILE=ON")
    else:
        cmake_opts.append("-DCMAKE_VERBOSE_MAKEFILE=OFF")

    cmake_opts.append("-DCMAKE_COLOR_MAKEFILE=ON")

    #  Suppress developer warnings. Suppress warnings that are meant for the author of the CMakeLists.txt files
    cmake_opts.append("-Wno-dev")

    # search for toolchain binaries
    locate_tools(profile)

    # set build path
    if arch.mpu is None:
        build_path = os.path.join(output_path, arch.cpu)
        print(f"Build using toolchain {toolchain_name} for arch {arch.cpu}")
    else:
        build_path = os.path.join(output_path, arch.mpu)
        print(f"Build using toolchain {toolchain_name} for mpu {arch.mpu}")

    log.info(f"build_path: {build_path}")
    cmake_opts.append(f"-B{build_path}")

    # for ccache
    os.putenv("CCACHE_LOGFILE",
              os.path.abspath(os.path.join(build_path, "ccache.log")))

    # add path
    os.putenv("tools_path", os.path.abspath(tools_path))
    os.putenv("toolchain_path", os.path.abspath(toolchain_path))
    os.putenv("project_path", os.path.abspath(os.getcwd()))

    os.putenv("c_path", profile.data['/tools/c/path'])
    os.putenv("cxx_path", profile.data['/tools/c++/path'])
    os.putenv("asm_path", profile.data['/tools/asm/path'])
    os.putenv("ar_path", profile.data['/tools/ar/path'])
    os.putenv("ld_path", profile.data['/tools/ld/path'])
    os.putenv("objcopy_path", profile.data['/tools/objcopy/path'])
    os.putenv("objdump_path", profile.data['/tools/objdump/path'])
    os.putenv("size_path", profile.data['/tools/size/path'])
    os.putenv("nm_path", profile.data['/tools/nm/path'])

    # add arch vars
    os.putenv("cpu", arch.cpu)
    os.putenv("mpu", arch.mpu)
    if arch_package is None:
        os.putenv("arch_package", "_")
    else:
        os.putenv("arch_package", arch_package.name)
    os.putenv("arch_path", arch.cpu_definition)

    try:
        log.info(f"cmake options: {' '.join(cmake_opts)}")
        #
        # create output folder
        os.makedirs(build_path, exist_ok=True)

        # add smake files
        add_cmake_files(build_path, dependencies, arch, arch_package, target)

        # TODO force rebuild elf file even if not changed
        # find ${PROJECT_PATH}/output -name "*.elf" -exec rm -f {} \; 2>/dev/null

        if get_log_level() == 'debug':
            os.system("env")
            SystemExit(_program('cmake', ['--version']))

        # generate cmake files
        res = SystemExit(_program('cmake', cmake_opts))
        if res.code > 0:
            log.error(f"Build failed")
            exit(1)
    except Exception as e:
        print_stack()
        log.error(f"{e}")
        clean_exit()

    make_opts.append(f"-j{jobs}")

    #     # enable color output
    #     os.putenv("GCC_COLORS", 'error=01;31:warning=01;35:note=01;36:caret=01;32:locus=01:quote=01')

    cwd = os.getcwd()
    try:
        log.info(f"make options: {' '.join(make_opts)}")

        # build now
        os.chdir(build_path)
        subprocess.check_call(['make'] + make_opts)
    except Exception as e:
        print_stack()
        log.error(f"{e}")
        os.chdir(cwd)
        clean_exit()

    os.chdir(cwd)
Пример #13
0
def add_cmake_files(build_path, dependencies, arch, arch_package, target):
    global tools_path
    #     # build list of includes
    #     includes = get_cmake_target_includes(target)
    #     includes += get_cmake_includes(dependencies)

    # copy army.cmake
    try:
        shutil.copy(
            os.path.join(os.path.expanduser(tools_path), "cmake",
                         "army.cmake"), build_path)
    except Exception as e:
        print_stack()
        log.error(f"{e}")
        exit(1)

    with open(os.path.join(build_path, "army.cmake"), "a") as fa:
        print("\n# dependencies section definition", file=fa)

        with open(os.path.join(build_path, "dependencies.cmake"), "w") as fd:

            # add target
            print("\n# target definition", file=fa)
            if target is not None:
                # add pre definitions
                if 'pre' in target:
                    for pre in target['pre']:
                        print(f'include_army_package_file(_ {pre})', file=fd)

                # add definition
                print(f'include_army_package_file(_ {target["definition"]})',
                      file=fd)

                # add post definitions
                if 'post' in target:
                    for post in target['post']:
                        print(f'include_army_package_file(_ {post})', file=fd)

            for dependency in dependencies:
                if 'cmake' in dependency.definition:
                    print(f'set({dependency.name}_path "{dependency.path}")',
                          file=fa)
                    print(
                        f'set({dependency.name}_definition "{os.path.join(dependency.path, dependency.definition["cmake"])}")',
                        file=fa)
                    print(f"include_army_package({dependency.name})", file=fd)

                    os.putenv(f"package_{dependency.name}_path",
                              dependency.path)
                    os.putenv(f"package_{dependency.name}_definition",
                              dependency.definition["cmake"])

                log.info(f"Adding dependency: {dependency}")

            # add arch
            print("\n# arch definition", file=fa)
            if arch.mpu_definition is not None:
                if arch_package is None:
                    print(
                        f'include_army_package_file(_ {arch.mpu_definition})',
                        file=fd)
                else:
                    os.putenv(f"package_{arch_package.name}_path",
                              arch_package.path)
                    print(
                        f'include_army_package_file({arch_package.name} {arch.mpu_definition})',
                        file=fd)