Esempio n. 1
0
    def load_extension_specs(self):
        if self.manifest is None:
            # "None" means "extensions could not be determined".
            # Leaving this an empty dict would mean "there are no
            # extensions", which is different.
            self.extensions = None
            return

        path_specs = extension_commands(manifest=self.manifest)
        extension_names = set()

        for path, specs in path_specs.items():
            # Filter out attempts to shadow built-in commands as well as
            # command names which are already used.

            filtered = []
            for spec in specs:
                if spec.name in self.builtins:
                    log.wrn(f'ignoring project {spec.project.name} '
                            f'extension command "{spec.name}"; '
                            'this is a built in command')
                    continue
                if spec.name in extension_names:
                    log.wrn(f'ignoring project {spec.project.name} '
                            f'extension command "{spec.name}"; '
                            f'command "{spec.name}" is '
                            'already defined as extension command')
                    continue

                filtered.append(spec)
                extension_names.add(spec.name)
                self.extensions[spec.name] = spec

            self.extension_groups[path] = filtered
Esempio n. 2
0
def get_extension_commands():
    extensions = extension_commands()
    extension_names = set()

    for path, specs in extensions.items():
        # Filter out attempts to shadow built-in commands as well as
        # commands which have names which are already used.
        filtered = []
        for spec in specs:
            if spec.name in BUILTIN_COMMAND_NAMES:
                log.wrn(
                    'ignoring project {} extension command {};'.format(
                        spec.project.name, spec.name),
                    'this is a built in command')
                continue
            if spec.name in extension_names:
                log.wrn(
                    'ignoring project {} extension command "{}";'.format(
                        spec.project.name, spec.name),
                    'command "{}" already defined as extension command'.format(
                        spec.name))
                continue
            filtered.append(spec)
            extension_names.add(spec.name)
        extensions[path] = filtered

    return extensions
Esempio n. 3
0
    def create(cls, cfg, args):
        daparg = os.environ.get('PYOCD_DAPARG')
        if daparg:
            log.wrn('Setting PYOCD_DAPARG in the environment is',
                    'deprecated; use the --daparg option instead.')
            if args.daparg is None:
                log.dbg('Missing --daparg set to {} from environment'.format(
                    daparg),
                        level=log.VERBOSE_VERY)
                args.daparg = daparg

        build_conf = BuildConfiguration(cfg.build_dir)
        flash_addr = cls.get_flash_address(args, build_conf)

        return PyOcdBinaryRunner(cfg,
                                 args.target,
                                 flashtool=args.flashtool,
                                 flash_addr=flash_addr,
                                 flashtool_opts=args.flashtool_opt,
                                 gdbserver=args.gdbserver,
                                 gdb_port=args.gdb_port,
                                 tui=args.tui,
                                 board_id=args.board_id,
                                 daparg=args.daparg,
                                 frequency=args.frequency)
Esempio n. 4
0
    def do_run(self, args, unknown_args):
        env = Manifest.from_file()

        try:
            kafl_path = env.get_projects(['kafl'])[0].abspath
        except ValueError as e:
            kafl_path = env.get_projects(['manifest'])[0].abspath

        capstone_path = env.get_projects(['capstone'])[0].abspath
        libxdc_path = env.get_projects(['libxdc'])[0].abspath
        qemu_path = env.get_projects(['qemu'])[0].abspath

        kafl_bin_path = os.path.join(kafl_path, 'kafl_fuzz.py')
        if not os.path.exists(kafl_bin_path):
            log.wrn("Could not find kAFL-Fuzzer in %s" % kafl_path)
            kafl_bin_path = ""

        qemu_bin_path = os.path.join(qemu_path,
                                     'x86_64-softmmu/qemu-system-x86_64')
        if not os.path.exists(qemu_bin_path):
            log.wrn("Could not find kAFL Qemu binary in %s" % qemu_path)
            qemu_bin_path = ""

        # project executables
        print("KAFL_BIN_PATH=%s" % kafl_bin_path)
        print("KAFL_QEMU_PATH=%s" % qemu_bin_path)

        # project libraries/includes
        print("C_INCLUDE_PATH=%s/include:%s" % (capstone_path, libxdc_path))
        print("LIBRARY_PATH=%s:%s" % (capstone_path, libxdc_path))
        print("LD_LIBRARY_PATH=%s:%s" % (capstone_path, libxdc_path))
Esempio n. 5
0
    def do_run(self, args, extra_args):
        env = Manifest.from_file()

        for query in extra_args:
            try:
                prj = env.get_projects([query])
                print(prj[0].abspath)
            except ValueError as e:
                # check if `manifest` is the kAFL repo..
                if query != 'kafl':
                    log.err(
                        "Could not find %s in west projects. Try `west list`."
                        % query)
                    return
                try:
                    # check if manifest repo is kAFL
                    kafl_path = env.get_projects(['manifest'])[0].abspath
                    if os.path.exists(kafl_path + '/kafl_fuzz.py'):
                        log.wrn(
                            "Returning `manifest` repo path for query `%s`.." %
                            query)
                        print(kafl_path)
                except ValueError as e:
                    log.err(
                        "Could not find %s in west projects. Try `west list`."
                        % query)
            except Exception as e:
                log.err(str(e))
    def print_loot(self, name, project, z_project, args):
        # Print a list of out of tree outstanding patches in the given
        # project.
        #
        # name: project name
        # project: the west.manifest.Project instance in the NCS manifest
        # z_project: the Project instance in the upstream manifest
        name_path = _name_and_path(project)

        # Get the upstream revision of the project. The zephyr project
        # has to be treated as a special case.
        if name == 'zephyr':
            z_rev = self.zephyr_rev
        else:
            z_rev = z_project.revision

        try:
            nsha = project.sha(project.revision)
            project.git('cat-file -e ' + nsha)
        except subprocess.CalledProcessError:
            log.wrn(f"{name_path}: can't get loot; please run "
                    f'"west update" (need revision {project.revision})')
            return
        try:
            zsha = z_project.sha(z_rev)
            z_project.git('cat-file -e ' + zsha)
        except subprocess.CalledProcessError:
            log.wrn(f"{name_path}: can't get loot; please fetch upstream URL "
                    f'{z_project.url} (need revision {z_project.revision})')
            return

        try:
            analyzer = nwh.RepoAnalyzer(project, z_project, project.revision,
                                        z_rev)
        except nwh.InvalidRepositoryError as ire:
            log.die(f"{name_path}: {str(ire)}")

        try:
            loot = analyzer.downstream_outstanding
        except nwh.UnknownCommitsError as uce:
            log.die(f'{name_path}: unknown commits: {str(uce)}')

        if not loot and log.VERBOSE <= log.VERBOSE_NONE:
            # Don't print output if there's no loot unless verbose
            # mode is on.
            return

        log.banner(name_path)
        log.inf(f'NCS commit: {nsha}, upstream commit: {zsha}')
        log.inf('OOT patches: ' + (f'{len(loot)} total' if loot else 'none') +
                (', output limited by --file' if args.files else ''))
        for c in loot:
            if args.files and not nwh.commit_affects_files(c, args.files):
                log.dbg(f"skipping {c.oid}; it doesn't affect file filter",
                        level=log.VERBOSE_VERY)
                continue
            if args.sha_only:
                log.inf(str(c.oid))
            else:
                log.inf(f'- {c.oid} {nwh.commit_shortlog(c)}')
Esempio n. 7
0
    def __init__(self, name, url):
        if url.endswith('/'):
            log.wrn('Remote', name, 'URL', url, 'ends with a slash ("/");',
                    'these are automatically appended by West')

        self.name = name
        self.url = url
Esempio n. 8
0
    def do_run(self, args, remainder):
        self.args = args        # Avoid having to pass them around
        self.config_board = config_get('board', None)
        log.dbg('args: {} remainder: {}'.format(args, remainder),
                level=log.VERBOSE_EXTREME)
        # Store legacy -s option locally
        source_dir = self.args.source_dir
        self._parse_remainder(remainder)
        # Parse testcase.yaml or sample.yaml files for additional options.
        if self.args.test_item:
            self._parse_test_item()
        if source_dir:
            if self.args.source_dir:
                log.die("source directory specified twice:({} and {})".format(
                                            source_dir, self.args.source_dir))
            self.args.source_dir = source_dir
        log.dbg('source_dir: {} cmake_opts: {}'.format(self.args.source_dir,
                                                       self.args.cmake_opts),
                level=log.VERBOSE_EXTREME)
        self._sanity_precheck()
        self._setup_build_dir()

        if args.pristine is not None:
            pristine = args.pristine
        else:
            # Load the pristine={auto, always, never} configuration value
            pristine = config_get('pristine', 'never')
            if pristine not in ['auto', 'always', 'never']:
                log.wrn(
                    'treating unknown build.pristine value "{}" as "never"'.
                    format(pristine))
                pristine = 'never'
        self.auto_pristine = (pristine == 'auto')

        log.dbg('pristine: {} auto_pristine: {}'.format(pristine,
                                                        self.auto_pristine),
                level=log.VERBOSE_VERY)
        if is_zephyr_build(self.build_dir):
            if pristine == 'always':
                self._run_pristine()
                self.run_cmake = True
            else:
                self._update_cache()
                if (self.args.cmake or self.args.cmake_opts or
                        self.args.cmake_only):
                    self.run_cmake = True
        else:
            self.run_cmake = True
        self.source_dir = self._find_source_dir()
        self._sanity_check()

        board, origin = self._find_board()
        self._run_cmake(board, origin, self.args.cmake_opts)
        if args.cmake_only:
            return

        self._sanity_check()
        self._update_cache()

        self._run_build(args.target)
Esempio n. 9
0
    def do_run(self, args, user_args):
        if args.exclude_west:
            log.wrn('ignoring --exclude-west')

        failed_rebases = []

        for project in _projects(args, listed_must_be_cloned=False,
                                 exclude_manifest=True):
            _banner(project.format('updating {name_and_path}:'))

            returncode = _update(project, args.rebase, args.keep_descendants)
            if returncode:
                failed_rebases.append(project)
                log.err(project.format('{name_and_path} failed to rebase'))

        if failed_rebases:
            # Avoid printing this message if exactly one project
            # was specified on the command line.
            if len(args.projects) != 1:
                log.err(('The following project{} failed to rebase; '
                        'see above for details: {}').format(
                            's' if len(failed_rebases) > 1 else '',
                            ', '.join(p.format('{name_and_path}')
                                      for p in failed_rebases)))
            raise CommandError(1)
Esempio n. 10
0
    def do_run(self, args, ignored):
        # Here we rely on main() having set up the west_parser
        # attribute before calling run().

        global BUILTIN_COMMANDS

        if not args.command_name:
            self.west_parser.print_help(top_level=True)
            return
        elif args.command_name == 'help':
            self.parser.print_help()
            return
        elif args.command_name in BUILTIN_COMMANDS:
            BUILTIN_COMMANDS[args.command_name].parser.print_help()
            return
        elif self.west_parser.west_extensions is not None:
            extensions = self.west_parser.west_extensions.values()
            for spec in itertools.chain(*extensions):
                if spec.name == args.command_name:
                    # run_extension() does not return
                    run_extension(spec, self.topdir,
                                  [args.command_name, '--help'], self.manifest)

        log.wrn('unknown command "{}"'.format(args.command_name))
        self.west_parser.print_help(top_level=True)
Esempio n. 11
0
    def load_manifest(self):
        # Try to parse the manifest. We'll save it if that works, so
        # it doesn't have to be re-parsed.

        if not self.topdir:
            return

        try:
            self.manifest = Manifest.from_file(topdir=self.topdir)
        except (ManifestVersionError, MalformedManifest, MalformedConfig,
                FileNotFoundError, ManifestImportFailed) as e:
            # Defer exception handling to WestCommand.run(), which uses
            # handle_builtin_manifest_load_err() to decide what to do.
            #
            # Make sure to update that function if you change the
            # exceptions caught here. Unexpected exceptions should
            # propagate up and fail fast.
            #
            # This might be OK, e.g. if we're running 'west config
            # manifest.path foo' to fix the MalformedConfig error, but
            # there's no way to know until we've parsed the command
            # line arguments.
            if isinstance(e, _ManifestImportDepth):
                log.wrn('recursion depth exceeded during manifest resolution; '
                        'your manifest likely contains an import loop. '
                        'Run "west -v manifest --resolve" to debug.')
            self.mle = e
Esempio n. 12
0
def _post_checkout_help(project, branch, sha, is_ancestor):
    # Print helpful information to the user about a project that
    # might have just left a branch behind.

    if branch is None:
        # If there was no branch checked out, there are no
        # additional diagnostics that need emitting.
        return

    rel = relpath(project.abspath)
    if is_ancestor:
        # If the branch we just left behind is a descendant of
        # the new HEAD (e.g. if this is a topic branch the
        # user is working on and the remote hasn't changed),
        # print a message that makes it easy to get back,
        # no matter where in the installation os.getcwd() is.
        log.wrn(project.format(
            'left behind {name} branch "{b}"; to switch '
            'back to it (fast forward), use: git -C {rp} checkout {b}',
            b=branch, rp=rel))
        log.dbg('(To do this automatically in the future,',
                'use "west update --keep-descendants".)')
    else:
        # Tell the user how they could rebase by hand, and
        # point them at west update --rebase.
        log.wrn(project.format(
            'left behind {name} branch "{b}"; '
            'to rebase onto the new HEAD: git -C {rp} rebase {sh} {b}',
            b=branch, rp=rel, sh=sha))
        log.dbg('(To do this automatically in the future,',
                'use "west update --rebase".)')
Esempio n. 13
0
def makeSPDX(cfg):
    # report any odd configuration settings
    if cfg.analyzeIncludes and not cfg.includeSDK:
        log.wrn(f"config: requested to analyze includes but not to generate SDK SPDX document;")
        log.wrn(f"config: will proceed but will discard detected includes for SDK header files")

    # set up walker configuration
    walkerCfg = WalkerConfig()
    walkerCfg.namespacePrefix = cfg.namespacePrefix
    walkerCfg.buildDir = cfg.buildDir
    walkerCfg.analyzeIncludes = cfg.analyzeIncludes
    walkerCfg.includeSDK = cfg.includeSDK

    # make and run the walker
    w = Walker(walkerCfg)
    retval = w.makeDocuments()
    if not retval:
        log.err("SPDX walker failed; bailing")
        return False

    # set up scanner configuration
    scannerCfg = ScannerConfig()

    # scan each document from walker
    if cfg.includeSDK:
        scanDocument(scannerCfg, w.docSDK)
    scanDocument(scannerCfg, w.docApp)
    scanDocument(scannerCfg, w.docZephyr)
    scanDocument(scannerCfg, w.docBuild)

    # write each document, in this particular order so that the
    # hashes for external references are calculated

    # write SDK document, if we made one
    if cfg.includeSDK:
        retval = writeSPDX(os.path.join(cfg.spdxDir, "sdk.spdx"), w.docSDK)
        if not retval:
            log.err("SPDX writer failed for SDK document; bailing")
            return False

    # write app document
    retval = writeSPDX(os.path.join(cfg.spdxDir, "app.spdx"), w.docApp)
    if not retval:
        log.err("SPDX writer failed for app document; bailing")
        return False

    # write zephyr document
    writeSPDX(os.path.join(cfg.spdxDir, "zephyr.spdx"), w.docZephyr)
    if not retval:
        log.err("SPDX writer failed for zephyr document; bailing")
        return False

    # write build document
    writeSPDX(os.path.join(cfg.spdxDir, "build.spdx"), w.docBuild)
    if not retval:
        log.err("SPDX writer failed for build document; bailing")
        return False

    return True
Esempio n. 14
0
    def do_run(self, args, unknown_args):
        self.check_west_version()
        self.setup_upstream_downstream(args)

        log.inf('Comparing nrf/west.yml with zephyr/west.yml at revision {}{}'.
                format(self.zephyr_rev,
                       (', sha: ' + self.zephyr_sha
                        if self.zephyr_rev != self.zephyr_sha else '')))
        log.inf()

        present_blacklisted = []
        present_allowed = []
        missing_blacklisted = []
        missing_allowed = []
        for zn, zp in self.z_pmap.items():
            nn = self.to_ncs_name(zp)
            present = nn in self.ncs_pmap
            blacklisted = PurePath(zp.path) in _PROJECT_BLACKLIST
            if present:
                if blacklisted:
                    present_blacklisted.append(zp)
                else:
                    present_allowed.append(zp)
            else:
                if blacklisted:
                    missing_blacklisted.append(zp)
                else:
                    missing_allowed.append(zp)

        def print_lst(projects):
            for p in projects:
                log.small_banner(p.format('{name_and_path}'))

        if missing_blacklisted and log.VERBOSE >= log.VERBOSE_NORMAL:
            log.banner('blacklisted but missing in NCS (these are all OK):')
            print_lst(missing_blacklisted)

        if present_blacklisted:
            log.banner('blacklisted upstream projects that are in NCS:')
            log.wrn('these names should be removed from nrf/west.yml')
            print_lst(present_blacklisted)

        if missing_allowed:
            log.banner('non-blacklisted upstream projects missing in NCS:')
            log.wrn('these should be blacklisted or added to nrf/west.yml')
            for p in missing_allowed:
                log.small_banner(p.format('{name_and_path}'))
                log.inf(p.format('upstream revision: {revision}'))
                log.inf(p.format('upstream URL: {url}'))

        if present_allowed:
            log.banner('projects present in NCS:')
            for zp in present_allowed:
                # Do some extra checking on unmerged commits.
                self.allowed_project(zp)
Esempio n. 15
0
    def print_loot(self, name, project, z_project, args):
        # Print a list of out of tree outstanding patches in the given
        # project.
        #
        # name: project name
        # project: the west.manifest.Project instance in the NCS manifest
        # z_project: the Project instance in the upstream manifest
        msg = project.format('{name_and_path} outstanding downstream patches:')

        # Get the upstream revision of the project. The zephyr project
        # has to be treated as a special case.
        if name == 'zephyr':
            z_rev = self.zephyr_rev
            msg += ' NOTE: {} *must* be up to date'.format(z_rev)
        else:
            z_rev = z_project.revision

        log.banner(msg)

        try:
            nsha = project.sha(project.revision)
            project.git('cat-file -e ' + nsha)
        except subprocess.CalledProcessError:
            log.wrn(
                "can't get loot; please run \"west update {}\"".format(
                    project.name),
                '(need revision {})'.format(project.revision))
            return
        try:
            zsha = z_project.sha(z_project.revision)
            z_project.git('cat-file -e ' + zsha)
        except subprocess.CalledProcessError:
            log.wrn("can't get loot; please fetch upstream URL", z_project.url,
                    '(need revision {})'.format(z_project.revision))
            return

        try:
            analyzer = nwh.RepoAnalyzer(project, z_project, project.revision,
                                        z_rev)
        except nwh.InvalidRepositoryError as ire:
            log.die(str(ire))
        try:
            for c in analyzer.downstream_outstanding:
                if args.files and not nwh.commit_affects_files(c, args.files):
                    log.dbg(
                        'skipping {}; it does not affect file filter'.format(
                            c.oid),
                        level=log.VERBOSE_VERY)
                    continue
                if args.sha_only:
                    log.inf(str(c.oid))
                else:
                    log.inf('- {} {}'.format(c.oid, nwh.commit_shortlog(c)))
        except nwh.UnknownCommitsError as uce:
            log.die('unknown commits:', str(uce))
Esempio n. 16
0
def clean_up(path):
    try:
        run_cmake(['-P', str(path / 'pristine.cmake')],
                  capture_output=True)
    except CalledProcessError:
        # Do our best to clean up even though CMake failed.
        log.wrn(f'Failed to make {path} pristine; '
                'removing known generated files...')
        for subpath in ['CMakeCache.txt', 'CMakeFiles', 'build.ninja',
                        'cmake_install.cmake', 'rules.ninja']:
            remove_if_exists(Path(path) / subpath)
Esempio n. 17
0
def set_zephyr_base(args):
    '''Ensure ZEPHYR_BASE is set, emitting warnings if that's not
    possible, or if the user is pointing it somewhere different than
    what the manifest expects.'''
    zb_env = os.environ.get('ZEPHYR_BASE')

    if args.zephyr_base:
        # The command line --zephyr-base takes precedence over
        # everything else.
        zb = os.path.abspath(args.zephyr_base)
        zb_origin = 'command line'
    else:
        # If the user doesn't specify it concretely, use the project
        # with path 'zephyr' if that exists, or the ZEPHYR_BASE value
        # in the calling environment.
        #
        # At some point, we need a more flexible way to set environment
        # variables based on manifest contents, but this is good enough
        # to get started with and to ask for wider testing.
        try:
            manifest = Manifest.from_file()
        except MalformedConfig as e:
            log.die('Parsing of manifest file failed during command',
                    args.command, ':', *e.args)
        for project in manifest.projects:
            if project.path == 'zephyr':
                zb = project.abspath
                zb_origin = 'manifest file {}'.format(manifest.path)
                break
        else:
            if zb_env is None:
                log.wrn('no --zephyr-base given, ZEPHYR_BASE is unset,',
                        'and no manifest project has path "zephyr"')
                zb = None
                zb_origin = None
            else:
                zb = zb_env
                zb_origin = 'environment'

    if zb_env and os.path.abspath(zb) != os.path.abspath(zb_env):
        # The environment ZEPHYR_BASE takes precedence over either the
        # command line or the manifest, but in normal multi-repo
        # operation we shouldn't expect to need to set ZEPHYR_BASE to
        # point to some random place. In practice, this is probably
        # happening because zephyr-env.sh/cmd was run in some other
        # zephyr installation, and the user forgot about that.
        log.wrn('ZEPHYR_BASE={}'.format(zb_env),
                'in the calling environment, but has been set to', zb,
                'instead by the', zb_origin)

    os.environ['ZEPHYR_BASE'] = zb

    log.dbg('ZEPHYR_BASE={} (origin: {})'.format(zb, zb_origin))
Esempio n. 18
0
 def fetch_strategy(self, args):
     cfg = config.get('update', 'fetch', fallback=None)
     if cfg is not None and cfg not in ('always', 'smart'):
         log.wrn(f'ignoring invalid config update.fetch={cfg}; '
                 'choices: always, smart')
         cfg = None
     if args.fetch_strategy:
         return args.fetch_strategy
     elif cfg:
         return cfg
     else:
         return 'smart'
Esempio n. 19
0
    def do_run(self, args, user_args):
        self.args = args
        if args.exclude_west:
            log.wrn('ignoring --exclude-west')

        # We can't blindly call self._projects() here: manifests with
        # imports are limited to plain 'west update', and cannot use
        # 'west update PROJECT [...]'.
        self.fs = self.fetch_strategy(args)
        if not args.projects:
            self.update_all(args)
        else:
            self.update_some(args)
Esempio n. 20
0
def command_execute(*cmd_args: 'tuple[str|Path]',
                    cwd: 'str|Path|None' = None,
                    return_path: bool = False,
                    allow_stderr: bool = False) -> 'Path|str':
    '''Execute subprocess wrapper that handles errors and output redirections.'''
    cmd_args = tuple(str(x) for x in cmd_args)
    if cwd is not None:
        cwd = str(cwd)
    with NamedTemporaryFile(delete=(not return_path), mode='w+') as out_file, \
            NamedTemporaryFile(mode='w+') as err_file:
        try:
            t = dbg_time(
                f'Starting {cmd_args} in {cwd or "current directory"}',
                level=log.VERBOSE_VERY)
            cp = subprocess.run(cmd_args,
                                stdout=out_file,
                                stderr=err_file,
                                cwd=cwd)
            log.dbg(f'Subprocess done in {t}s', level=log.VERBOSE_VERY)
        except Exception as e:
            log.err(f'Running command "{cmd_args[0]}" failed!')
            log.err(f'Arguments: { json.dumps(cmd_args) }, cwd: "{cwd}"')
            log.err(f'Details: {e}')
            raise SbomException('Command execution error.') from e
        out_file.seek(0)
        err_file.seek(0)

        err = err_file.read()

        if len(err.strip()) > 0:
            if allow_stderr:
                log.wrn(f'Command "{cmd_args[0]}" reported errors:\n{err}')
            else:
                log.err(f'Command "{cmd_args[0]}" reported errors:\n{err}')
                if cp.returncode == 0:
                    log.err(
                        f'Arguments: { json.dumps(cmd_args) }, cwd: "{cwd}"')
                    raise SbomException('Command execution error.')
        err_file.close()

        if cp.returncode != 0:
            log.err(
                f'Command "{cmd_args[0]}" exited with error code {cp.returncode}'
            )
            log.err(f'Arguments: { json.dumps(cmd_args) }, cwd: "{cwd}"')
            raise SbomException('Command execution error.')
        if return_path:
            return Path(out_file.name)
        else:
            return out_file.read()
Esempio n. 21
0
    def _run_cmake(self, board, origin, cmake_opts):
        if board is None and config_getboolean('board_warn', True):
            log.wrn('This looks like a fresh build and BOARD is unknown;',
                    "so it probably won't work. To fix, use",
                    '--board=<your-board>.')
            log.inf('Note: to silence the above message, run',
                    "'west config build.board_warn false'")

        if not self.run_cmake:
            return

        _banner('generating a build system')

        if board is not None and origin != 'CMakeCache.txt':
            cmake_opts = ['-DBOARD={}'.format(board)]
        else:
            cmake_opts = []
        if self.args.cmake_opts:
            cmake_opts.extend(self.args.cmake_opts)

        user_args = config_get('cmake-args', None)
        if user_args:
            cmake_opts.extend(shlex.split(user_args))

        config_sysbuild = config_getboolean('sysbuild', False)
        if self.args.sysbuild or (config_sysbuild
                                  and not self.args.no_sysbuild):
            cmake_opts.extend([
                '-S{}'.format(SYSBUILD_PROJ_DIR),
                '-DAPP_DIR:PATH={}'.format(self.source_dir)
            ])
        else:
            # self.args.no_sysbuild == True or config sysbuild False
            cmake_opts.extend(['-S{}'.format(self.source_dir)])

        # Invoke CMake from the current working directory using the
        # -S and -B options (officially introduced in CMake 3.13.0).
        # This is important because users expect invocations like this
        # to Just Work:
        #
        # west build -- -DOVERLAY_CONFIG=relative-path.conf
        final_cmake_args = [
            '-DWEST_PYTHON={}'.format(sys.executable),
            '-B{}'.format(self.build_dir),
            '-G{}'.format(config_get('generator', DEFAULT_CMAKE_GENERATOR))
        ]
        if cmake_opts:
            final_cmake_args.extend(cmake_opts)
        run_cmake(final_cmake_args, dry_run=self.args.dry_run)
Esempio n. 22
0
    def do_run(self, args, user_args):
        if args.exclude_west:
            log.wrn('ignoring --exclude-west')
        fs = self._fetch_strategy(args)

        failed = []
        for project in self._projects(args.projects):
            if isinstance(project, ManifestProject):
                continue
            log.banner(project.format('updating {name_and_path}:'))
            try:
                _update(project, fs, args.rebase, args.keep_descendants)
            except subprocess.CalledProcessError:
                failed.append(project)
        self._handle_failed(args, failed)
Esempio n. 23
0
def scanDocument(cfg, doc):
    """
    Scan for licenses and calculate hashes for all Files and Packages
    in this Document.

    Arguments:
        - cfg: ScannerConfig
        - doc: Document
    """
    for pkg in doc.pkgs.values():
        log.inf(
            f"scanning files in package {pkg.cfg.name} in document {doc.cfg.name}"
        )

        # first, gather File data for this package
        for f in pkg.files.values():
            # set relpath based on package's relativeBaseDir
            f.relpath = os.path.relpath(f.abspath, pkg.cfg.relativeBaseDir)

            # get hashes for file
            hashes = getHashes(f.abspath)
            if not hashes:
                log.wrn("unable to get hashes for file {f.abspath}; skipping")
                continue
            hSHA1, hSHA256, hMD5 = hashes
            f.sha1 = hSHA1
            if cfg.doSHA256:
                f.sha256 = hSHA256
            if cfg.doMD5:
                f.md5 = hMD5

            # get licenses for file
            expression = getExpressionData(f.abspath, cfg.numLinesScanned)
            if expression:
                if cfg.shouldConcludeFileLicenses:
                    f.concludedLicense = expression
                f.licenseInfoInFile = splitExpression(expression)

            # check if any custom license IDs should be flagged for document
            for lic in f.licenseInfoInFile:
                checkLicenseValid(lic, doc)

        # now, assemble the Package data
        licsConcluded, licsFromFiles = getPackageLicenses(pkg)
        if cfg.shouldConcludePackageLicense:
            pkg.concludedLicense = normalizeExpression(licsConcluded)
        pkg.licenseInfoFromFiles = licsFromFiles
        pkg.verificationCode = calculateVerificationCode(pkg)