def main(argv=None):
    # Makes ANSI color escapes work on Windows, and strips them when
    # stdout/stderr isn't a terminal
    colorama.init()

    if argv is None:
        argv = sys.argv[1:]
    args, unknown = parse_args(argv)

    for_stack_trace = 'run as "west -v ... {} ..." for a stack trace'.format(
        args.command)
    try:
        args.handler(args, unknown)
    except WestUpdated:
        # West has been automatically updated. Restart ourselves to run the
        # latest version, with the same arguments that we were given.
        os.execv(sys.executable, [sys.executable] + sys.argv)
    except KeyboardInterrupt:
        sys.exit(0)
    except CalledProcessError as cpe:
        log.err('command exited with status {}: {}'.format(
            cpe.args[0], quote_sh_list(cpe.args[1])))
        if args.verbose:
            raise
        else:
            log.inf(for_stack_trace)
    except CommandContextError as cce:
        log.die('command', args.command, 'cannot be run in this context:',
                *cce.args)
    except Exception as exc:
        log.err(*exc.args, fatal=True)
        if args.verbose:
            raise
        else:
            log.inf(for_stack_trace)
Exemple #2
0
def _all_projects(args):
    # Parses the manifest file, returning a list of Project instances.
    #
    # Before the manifest is parsed, it is validated against a pykwalify
    # schema. An error is raised on validation errors.

    manifest_path = _manifest_path(args)

    _validate_manifest(manifest_path)

    with open(manifest_path) as f:
        manifest = yaml.safe_load(f)['manifest']

    projects = []
    # Manifest "defaults" keys whose values get copied to each project
    # that doesn't specify its own value.
    project_defaults = ('remote', 'revision')

    # mp = manifest project (dictionary with values parsed from the manifest)
    for mp in manifest['projects']:
        # Fill in any missing fields in 'mp' with values from the 'defaults'
        # dictionary
        if 'defaults' in manifest:
            for key, val in manifest['defaults'].items():
                if key in project_defaults:
                    mp.setdefault(key, val)

        # Add the repository URL to 'mp'
        for remote in manifest['remotes']:
            if remote['name'] == mp['remote']:
                mp['url'] = remote['url'] + '/' + mp['name']
                break
        else:
            log.die('Remote {} not defined in {}'.format(
                mp['remote'], manifest_path))

        # If no clone path is specified, the project's name is used
        clone_path = mp.get('path', mp['name'])

        # Use named tuples to store project information. That gives nicer
        # syntax compared to a dict (project.name instead of project['name'],
        # etc.)
        projects.append(
            Project(
                mp['name'],
                mp['url'],
                # If no revision is specified, 'master' is used
                mp.get('revision', 'master'),
                clone_path,
                # Absolute clone path
                os.path.join(util.west_topdir(), clone_path),
                # If no clone depth is specified, we fetch the entire history
                mp.get('clone-depth', None)))

    return projects
Exemple #3
0
def _validate_manifest(manifest_path):
    # Validates the manifest with pykwalify. schema.yml holds the schema.

    schema_path = os.path.join(os.path.dirname(__file__), "schema.yml")

    try:
        pykwalify.core.Core(source_file=manifest_path,
                            schema_files=[schema_path]).validate()
    except pykwalify.errors.SchemaError as e:
        log.die('{} malformed (schema: {}):\n{}'.format(
            manifest_path, schema_path, e))
Exemple #4
0
def start(_, cfg):
    global users
    try:
        file_root = cfg['wan_filestorage']
        host = cfg['wan_host']
        port = cfg['wan_port']
        realm = 'WAN'
    except:
        file_root = cfg['lan_filestorage']
        host = cfg['lan_host']
        port = cfg['lan_port']
        realm = 'LAN'

    try:
        for i in (0, 1):
            if not os.path.exists(cfg['certificates'][i]):
                die(f'specified certificate not found, "{cfg["certificates"][i]}"')
        ssl_context = (cfg['certificates'][0], cfg['certificates'][1])
    except:
        ssl_context = False

    if not os.path.exists(file_root):
        try:
            os.makedirs(file_root)
        except:
            die(f'unable to create file root {file_root}, please make it yourself and fix permissions',
                ErrorCode.SERVER_ERROR)

    app.config['fileroot'] = file_root
    app.secret_key = os.urandom(50)
    app.name = f'filuxe_server_{realm}'
    try:
        app.config['writekey'] = cfg['write_key']
    except:
        app.config['writekey'] = ''

    try:
        users = {cfg['username']: generate_password_hash(cfg['password'])}
        inf('HTTP-AUTH enabled')
    except:
        inf('HTTP-AUTH disabled')

    inf(f'filuxe {realm} server {filuxe_server_version} running at http{"s" if ssl_context else ""}://{host}:{port}')
    inf(f'filestorage root "{file_root}"')

    if ssl_context:
        app.run(host=host, port=port, ssl_context=ssl_context)
    else:
        app.run(host=host, port=port)
    return ErrorCode.OK
Exemple #5
0
def _projects(args, listed_must_be_cloned=True):
    # Returns a list of project instances for the projects requested in 'args'
    # (the command-line arguments), in the same order that they were listed by
    # the user. If args.projects is empty, no projects were listed, and all
    # projects will be returned. If a non-existent project was listed by the
    # user, an error is raised.
    #
    # Before the manifest is parsed, it is validated agains a pykwalify schema.
    # An error is raised on validation errors.
    #
    # listed_must_be_cloned (default: True):
    #   If True, an error is raised if an uncloned project was listed. This
    #   only applies to projects listed explicitly on the command line.

    projects = _all_projects(args)

    if not args.projects:
        # No projects specified. Return all projects.
        return projects

    # Got a list of projects on the command line. First, check that they exist
    # in the manifest.

    project_names = [project.name for project in projects]
    nonexistent = set(args.projects) - set(project_names)
    if nonexistent:
        log.die('Unknown project{} {} (available projects: {})'.format(
            's' if len(nonexistent) > 1 else '', ', '.join(nonexistent),
            ', '.join(project_names)))

    # Return the projects in the order they were listed
    res = []
    for name in args.projects:
        for project in projects:
            if project.name == name:
                res.append(project)
                break

    # Check that all listed repositories are cloned, if requested
    if listed_must_be_cloned:
        uncloned = [prj.name for prj in res if not _cloned(prj)]
        if uncloned:
            log.die('The following projects are not cloned: {}. Please clone '
                    "them first (with 'west fetch').".format(
                        ", ".join(uncloned)))

    return res
def _build_dir(args, die_if_none=True):
    # Get the build directory for the given argument list and environment.
    if args.build_dir:
        return args.build_dir

    cwd = getcwd()
    default = path.join(cwd, DEFAULT_BUILD_DIR)
    if is_zephyr_build(default):
        return default
    elif is_zephyr_build(cwd):
        return cwd
    elif die_if_none:
        log.die('--build-dir was not given, and neither {} '
                'nor {} are zephyr build directories.'.
                format(default, cwd))
    else:
        return None
Exemple #7
0
    def download(self, filename, path, force=False):
        url = f'{self.server}/download/{path}'
        response = requests.get(url, verify=self.certificate)
        if response.status_code != 200:
            err(f'server returned error {response.status_code} for downloading "{path}"'
                )
            return ErrorCode.FILE_NOT_FOUND

        if force or not os.path.exists(filename):
            with open(filename, 'wb') as f:
                f.write(response.content)
            inf(f'downloaded {url} ({human_file_size(os.path.getsize(filename))}) as "{filename}"'
                )
        else:
            die(f'local file "{filename}" already exists, bailing out (see --force)',
                error_code=ErrorCode.FILE_ALREADY_EXIST)
        return ErrorCode.OK
Exemple #8
0
    def do_run(self, args, user_args):
        branch_exists = False

        for project in _cloned_projects(args):
            if args.create_branch:
                _create_branch(project, args.branch)
                _checkout(project, args.branch)
                branch_exists = True
            elif _has_branch(project, args.branch):
                _checkout(project, args.branch)
                branch_exists = True

        if not branch_exists:
            msg = 'No branch {} exists in any '.format(args.branch)
            if args.projects:
                log.die(msg + 'of the listed projects')
            else:
                log.die(msg + 'cloned project')
 def _setup_source_dir(self):
     # Initialize source_dir attribute, either from command line argument,
     # implicitly from the build directory's CMake cache, or using the
     # default (current working directory).
     log.dbg('setting up source directory', level=log.VERBOSE_EXTREME)
     if self.args.source_dir:
         source_dir = self.args.source_dir
     elif self.cmake_cache:
         source_dir = self.cmake_cache.get('APPLICATION_SOURCE_DIR')
         if not source_dir:
             # Maybe Zephyr changed the key? Give the user a way
             # to retry, at least.
             log.die("can't determine application from build directory "
                     "{}, please specify an application to build".format(
                         self.build_dir))
     else:
         source_dir = os.getcwd()
     self.source_dir = os.path.abspath(source_dir)
    def _setup_build_dir(self):
        # Initialize build_dir and created_build_dir attributes.
        log.dbg('setting up build directory', level=log.VERBOSE_EXTREME)
        if self.args.build_dir:
            build_dir = self.args.build_dir
        else:
            cwd = os.getcwd()
            if is_zephyr_build(cwd):
                build_dir = cwd
            else:
                build_dir = DEFAULT_BUILD_DIR
        build_dir = os.path.abspath(build_dir)

        if os.path.exists(build_dir):
            if not os.path.isdir(build_dir):
                log.die(
                    'build directory {} exists and is not a directory'.format(
                        build_dir))
        else:
            os.makedirs(build_dir, exist_ok=False)
            self.created_build_dir = True
            self.run_cmake = True

        self.build_dir = build_dir
Exemple #11
0
def startup(cfname, checkonly, threadmax):
	# First, parse the config file.
	try:
		cfg = cfloader.parsefile(cfname)
	except cfloader.BadInput, e:
		log.die("Cannot load conf file: %s" % (str(e),))
Exemple #12
0
args = parser.parse_args()

if args.verbose:
    log.setLevel(logging.DEBUG)
elif args.info:
    log.setLevel(logging.INFO)
else:
    log.setLevel(logging.WARNING)

cfg = None

if args.config:
    try:
        cfg = config_util.load_config(args.config)
    except FileNotFoundError as e:
        die('config file not found', e, ErrorCode.FILE_NOT_FOUND)
    except json.decoder.JSONDecodeError as e:
        die(f'json error in {args.config}', e, ErrorCode.FILE_INVALID)

try:
    lan = cfg.get('lan_host')

    filuxe = filuxe_api.Filuxe(cfg, lan=lan)

    error_code = ErrorCode.UNSET

    try:
        if args.download:
            if not args.file or not args.path:
                die('need both a --path and a --file argument',
                    ErrorCode.BAD_ARGUMENTS)
def do_run_common(command, args, runner_args, cached_runner_var):
    if args.context:
        _dump_context(command, args, runner_args, cached_runner_var)
        return

    command_name = command.name
    build_dir = _build_dir(args)

    if not args.skip_rebuild:
        try:
            cmake.run_build(build_dir)
        except CalledProcessError:
            if args.build_dir:
                log.die('cannot run {}, build in {} failed'.format(
                    command_name, args.build_dir))
            else:
                log.die('cannot run {}; no --build-dir given and build in '
                        'current directory {} failed'.format(command_name,
                                                             build_dir))

    # Runner creation, phase 1.
    #
    # Get the default runner name from the cache, allowing a command
    # line override. Get the ZephyrBinaryRunner class by name, and
    # make sure it supports the command.

    cache_file = path.join(build_dir, args.cmake_cache or cmake.DEFAULT_CACHE)
    cache = cmake.CMakeCache(cache_file)
    board = cache['CACHED_BOARD']
    available = cache.get_list('ZEPHYR_RUNNERS')
    if not available:
        log.wrn('No cached runners are available in', cache_file)
    runner = args.runner or cache.get(cached_runner_var)

    if runner is None:
        raise CommandContextError(textwrap.dedent("""
        No {} runner available for {}. Please either specify one
        manually, or check your board's documentation for
        alternative instructions.""".format(command_name, board)))

    log.inf('Using runner:', runner)
    if runner not in available:
        log.wrn('Runner {} is not configured for use with {}, '
                'this may not work'.format(runner, board))
    runner_cls = get_runner_cls(runner)
    if command_name not in runner_cls.capabilities().commands:
        log.die('Runner {} does not support command {}'.format(
            runner, command_name))

    # Runner creation, phase 2.
    #
    # At this point, the common options above are already parsed in
    # 'args', and unrecognized arguments are in 'runner_args'.
    #
    # - Pull the RunnerConfig out of the cache
    # - Override cached values with applicable command-line options

    cfg = cached_runner_config(build_dir, cache)
    _override_config_from_namespace(cfg, args)

    # Runner creation, phase 3.
    #
    # - Pull out cached runner arguments, and append command-line
    #   values (which should override the cache)
    # - Construct a runner-specific argument parser to handle cached
    #   values plus overrides given in runner_args
    # - Parse arguments and create runner instance from final
    #   RunnerConfig and parsed arguments.

    cached_runner_args = cache.get_list(
        'ZEPHYR_RUNNER_ARGS_{}'.format(cmake.make_c_identifier(runner)))
    assert isinstance(runner_args, list), runner_args
    # If the user passed -- to force the parent argument parser to stop
    # parsing, it will show up here, and needs to be filtered out.
    runner_args = [arg for arg in runner_args if arg != '--']
    final_runner_args = cached_runner_args + runner_args
    parser = argparse.ArgumentParser(prog=runner)
    runner_cls.add_parser(parser)
    parsed_args, unknown = parser.parse_known_args(args=final_runner_args)
    if unknown:
        raise CommandContextError('Runner', runner,
                                  'received unknown arguments', unknown)
    runner = runner_cls.create(cfg, parsed_args)
    runner.run(command_name)
def _dump_context(command, args, runner_args, cached_runner_var):
    build_dir = _build_dir(args, die_if_none=False)

    # Try to figure out the CMake cache file based on the build
    # directory or an explicit argument.
    if build_dir is not None:
        cache_file = path.abspath(
            path.join(build_dir, args.cmake_cache or cmake.DEFAULT_CACHE))
    elif args.cmake_cache:
        cache_file = path.abspath(args.cmake_cache)
    else:
        cache_file = None

    # Load the cache itself, if possible.
    if cache_file is None:
        log.wrn('No build directory (--build-dir) or CMake cache '
                '(--cache-file) given or found; output will be limited')
        cache = None
    else:
        try:
            cache = cmake.CMakeCache(cache_file)
        except Exception:
            log.die('Cannot load cache {}.'.format(cache_file))

    # If we have a build directory, try to ensure build artifacts are
    # up to date. If that doesn't work, still try to print information
    # on a best-effort basis.
    if build_dir and not args.skip_rebuild:
        try:
            cmake.run_build(build_dir)
        except CalledProcessError:
            msg = 'Failed re-building application; cannot load context. '
            if args.build_dir:
                msg += 'Is {} the right --build-dir?'.format(args.build_dir)
            else:
                msg += textwrap.dedent('''\
                Use --build-dir (-d) to specify a build directory; the one
                used was {}.'''.format(build_dir))
            log.die('\n'.join(textwrap.wrap(msg, initial_indent='',
                                            subsequent_indent=INDENT,
                                            break_on_hyphens=False)))

    if cache is None:
        _dump_no_context_info(command, args)
        if not args.runner:
            return

    if args.runner:
        # Just information on one runner was requested.
        _dump_one_runner_info(cache, args, build_dir, INDENT)
        return

    board = cache['CACHED_BOARD']

    all_cls = {cls.name(): cls for cls in ZephyrBinaryRunner.get_runners() if
               command.name in cls.capabilities().commands}
    available = [r for r in cache.get_list('ZEPHYR_RUNNERS') if r in all_cls]
    available_cls = {r: all_cls[r] for r in available if r in all_cls}

    default_runner = cache.get(cached_runner_var)
    cfg = cached_runner_config(build_dir, cache)

    log.inf('All Zephyr runners which support {}:'.format(command.name),
            colorize=True)
    for line in util.wrap(', '.join(all_cls.keys()), INDENT):
        log.inf(line)
    log.inf('(Not all may work with this build, see available runners below.)',
            colorize=True)

    if cache is None:
        log.warn('Missing or invalid CMake cache {}; there is no context.',
                 'Use --build-dir to specify the build directory.')
        return

    log.inf('Build directory:', colorize=True)
    log.inf(INDENT + build_dir)
    log.inf('Board:', colorize=True)
    log.inf(INDENT + board)
    log.inf('CMake cache:', colorize=True)
    log.inf(INDENT + cache_file)

    if not available:
        # Bail with a message if no runners are available.
        msg = ('No runners available for {}. '
               'Consult the documentation for instructions on how to run '
               'binaries on this target.').format(board)
        for line in util.wrap(msg, ''):
            log.inf(line, colorize=True)
        return

    log.inf('Available {} runners:'.format(command.name), colorize=True)
    log.inf(INDENT + ', '.join(available))
    log.inf('Additional options for available', command.name, 'runners:',
            colorize=True)
    for runner in available:
        _dump_runner_opt_help(runner, all_cls[runner])
    log.inf('Default {} runner:'.format(command.name), colorize=True)
    log.inf(INDENT + default_runner)
    _dump_runner_config(cfg, '', INDENT)
    log.inf('Runner-specific information:', colorize=True)
    for runner in available:
        log.inf('{}{}:'.format(INDENT, runner), colorize=True)
        _dump_runner_cached_opts(cache, runner, INDENT * 2, INDENT * 3)
        _dump_runner_caps(available_cls[runner], INDENT * 2)

    if len(available) > 1:
        log.inf('(Add -r RUNNER to just print information about one runner.)',
                colorize=True)
Exemple #15
0
    def enforce_max_files(self, path, rules, recursive=True, use_http=False, lan_files=None):
        """
        Get the list of files (locally or over http) and delete files if required by
        the rule "max_files". It can run a full recursive scan-and-delete as is
        done when starting the forwarder (with an empty path and recursive=True),
        and it can run in a specific directory when triggered by a new or modified
        file (with a path and recursive=False).
        Returns nothing.
        """

        try:
            deb(f'enforce max files in {self.domain.domain} with path="{path}", dryrun={self.dryrun}')
            if use_http:
                filelist = fwd_util.get_http_filelist(self.domain, path, recursive, rules)
            else:
                if not lan_files:
                    scan_directory = self.domain.root()
                    filelist = fwd_util.get_local_filelist(scan_directory, path, recursive, rules)
                else:
                    filelist = lan_files

            try:
                directories = filelist['filelist'].keys()
            except:
                deb(f'got empty filelist from {self.domain.domain} at "{path}"')
                return

            group_list = {}
            for directory in directories:
                directory_settings = get_rules_for_path(rules, directory)
                max_files, _delete_by, _file_groups = directory_settings
                if max_files == -1:
                    inf(f'"{self.domain.domain}/{path}" has no filelimit, skipping.'
                        f' ({len(filelist["filelist"][directory])} files)')
                    continue
                if not max_files:
                    continue
                group_list[directory] = self.parse_into_file_groups(directory, filelist, directory_settings)

            deb(f'found total {len(group_list)} groups')

            for directory, group in group_list.items():
                for group_key, file_group in group.items():
                    nof_files = len(file_group['files'])
                    max_files = file_group['maxfiles']
                    excess_files = nof_files - max_files
                    if excess_files > 0:
                        message = f'"{self.domain.domain}/{directory}" group:"{group_key}" exceeded max files '\
                                  f'with {excess_files}. ({nof_files} files, limit is {max_files})'
                        inf(message)
                        with Indent() as _:
                            deleted_files = self.delete_files(file_group, group_key, use_http, rules)
                            for file in deleted_files:
                                del filelist['filelist'][directory][file]

                    else:
                        message = f'"{self.domain.domain}/{directory}" group:"{group_key}" no action. '\
                                  f'({nof_files} files, limit is {max_files})'
                        deb(message)

        except Exception as e:
            die(f'exception in enforce_max_files {e.__repr__()}', e, error_code=ErrorCode.INTERNAL_ERROR)
Exemple #16
0
def _git_helper(project, cmd, extra_args, cwd, capture_stdout, check):
    # Runs a git command.
    #
    # project:
    #   The Project instance for the project, derived from the manifest file.
    #
    # cmd:
    #   String with git arguments. Supports some "(foo)" shorthands. See below.
    #
    # extra_args:
    #   List of additional arguments to pass to the git command (e.g. from the
    #   user).
    #
    # cwd:
    #   Directory to switch to first (None = current directory)
    #
    # capture_stdout:
    #   True if stdout should be captured into the returned
    #   subprocess.CompletedProcess instance instead of being printed.
    #
    #   We never capture stderr, to prevent error messages from being eaten.
    #
    # check:
    #   True if an error should be raised if the git command finishes with a
    #   non-zero return code.
    #
    # Returns a subprocess.CompletedProcess instance.

    # TODO: Run once somewhere?
    if shutil.which('git') is None:
        log.die('Git is not installed or cannot be found')

    args = (('git', ) +
            tuple(_expand_shorthands(project, arg)
                  for arg in cmd.split()) + tuple(extra_args))
    cmd_str = util.quote_sh_list(args)

    log.dbg("running '{}'".format(cmd_str), 'in', cwd, level=log.VERBOSE_VERY)
    popen = subprocess.Popen(
        args, stdout=subprocess.PIPE if capture_stdout else None, cwd=cwd)

    stdout, _ = popen.communicate()

    dbg_msg = "'{}' in {} finished with exit status {}" \
              .format(cmd_str, cwd, popen.returncode)
    if capture_stdout:
        dbg_msg += " and wrote {} to stdout".format(stdout)
    log.dbg(dbg_msg, level=log.VERBOSE_VERY)

    if check and popen.returncode:
        _die(project,
             "Command '{}' failed for (name-and-path)".format(cmd_str))

    if capture_stdout:
        # Manual UTF-8 decoding and universal newlines. Before Python 3.6,
        # Popen doesn't seem to allow using universal newlines mode (which
        # enables decoding) with a specific encoding (because the encoding=
        # parameter is missing).
        #
        # Also strip all trailing newlines as convenience. The splitlines()
        # already means we lose a final '\n' anyway.
        stdout = "\n".join(stdout.decode('utf-8').splitlines()).rstrip("\n")

    return CompletedProcess(popen.args, popen.returncode, stdout)
Exemple #17
0
def _die(project, msg):
    # Like _wrn(), for dying

    log.die(_expand_shorthands(project, msg))
Exemple #18
0
            "product/release/image": product_release_image,
            "product/candidate": product_candidate,
            "product/release": product_release,
            "product": product
        }
    }
    jsn = json.dumps(CONFIG, indent=4, sort_keys=True)
    print(jsn)
    exit(0)

if args.config:
    try:
        cfg = config_util.load_config(args.config)
        inf(f'loaded configuration {args.config}')
    except FileNotFoundError as e:
        die('config file not found', e)
    except json.decoder.JSONDecodeError as e:
        die(f'json error in {args.config}', e, ErrorCode.FILE_INVALID)

rules = None

if args.rules:
    try:
        rules = config_util.load_config(args.rules)
        inf(f'loaded rules file {args.rules}')
    except json.decoder.JSONDecodeError as e:
        die(f'json error in {args.rules}', e, ErrorCode.FILE_INVALID)
    except:
        die(f'loading {args.rules} failed')
else:
    inf('no rules specified, running with default rules forwarding everything')
Exemple #19
0
def start(args, cfg, rules):
    global LOADED_RULES, FILE_ROOT, FILUXE_WAN, FILUXE_LAN, CONFIG, LAN_FILE_DELETER, IDLE_DETECT
    lan_files = None

    FILE_ROOT = cfg['lan_filestorage']

    if not os.path.exists(FILE_ROOT):
        die(f'filestorage root {FILE_ROOT} not found. Giving up')

    inf(f'filestorage root {FILE_ROOT}')

    CONFIG = cfg
    if rules:
        LOADED_RULES = rules
        lan_files = fwd_util.get_local_filelist(FILE_ROOT)
        coldstart_rules(lan_files)
    else:
        war('running with default rules, forwarding everything')

    try:
        FILUXE_WAN = filuxe_api.Filuxe(CONFIG, lan=False, force=True)
    except:
        war('no wan configuration found, forwarding disabled')

    if FILUXE_WAN:
        try:
            _, stats = FILUXE_WAN.get_stats()
            inf(f'connected to wan server version {stats["version"]}')
        except:
            err('wan server unreachable, forwarding disabled')
            FILUXE_WAN = None

    try:
        FILUXE_LAN = filuxe_api.Filuxe(CONFIG, lan=True)
    except:
        die('no lan configuration found, can\'t continue')

    try:
        _, stats = FILUXE_LAN.get_stats()
        inf(f'connected to lan server version {stats["version"]}')
    except requests.exceptions.ConnectionError:
        war('lan server unreachable, continuing anyway')
    except Exception as e:
        die('unexpected exception while contacting lan server', e)

    if ACTIVE_RULES:
        LAN_FILE_DELETER = fwd_file_deleter.FileDeleter(
            FILUXE_LAN, args.dryrun)
        LAN_FILE_DELETER.enforce_max_files('',
                                           rules=ACTIVE_RULES,
                                           recursive=True,
                                           lan_files=lan_files)

    try:
        if FILUXE_WAN and cfg['sync_at_startup']:
            if not lan_files:
                lan_files = fwd_util.get_local_filelist(FILE_ROOT)
            synchonize(lan_files)
    except Exception as e:
        err(f'syncronizing lan to wan failed {e}')

    IDLE_DETECT = IdleDetect()
    try:
        run_filesystem_observer(FILE_ROOT)
    except Exception as e:
        die(f'unable to start file observer in {FILE_ROOT}', e)

    inf('filuxe forwarder is ready')

    try:
        LOOP.run_forever()
    except Exception as e:
        die('the fileobserver crashed. Perhaps the filestorage was deleted ?',
            e)

    return ErrorCode.OK
    def _sanity_check(self):
        # Sanity check the build configuration.
        # Side effect: may update cmake_cache attribute.
        log.dbg('sanity checking the build', level=log.VERBOSE_EXTREME)
        if self.source_dir == self.build_dir:
            # There's no forcing this.
            log.die('source and build directory {} cannot be the same; '
                    'use --build-dir {} to specify a build directory'.format(
                        self.source_dir, self.build_dir))

        srcrel = os.path.relpath(self.source_dir)
        if is_zephyr_build(self.source_dir):
            self._check_force(
                'it looks like {srcrel} is a build directory: '
                'did you mean -build-dir {srcrel} instead?'.format(
                    srcrel=srcrel))
        elif 'CMakeLists.txt' not in os.listdir(self.source_dir):
            self._check_force(
                'source directory "{srcrel}" does not contain '
                'a CMakeLists.txt; is that really what you '
                'want to build? (Use -s SOURCE_DIR to specify '
                'the application source directory)'.format(srcrel=srcrel))

        if not is_zephyr_build(self.build_dir) and not self.args.board:
            self._check_force('this looks like a new or clean build, '
                              'please provide --board')

        if not self.cmake_cache:
            return  # That's all we can check without a cache.

        cached_app = self.cmake_cache.get('APPLICATION_SOURCE_DIR')
        log.dbg('APPLICATION_SOURCE_DIR:',
                cached_app,
                level=log.VERBOSE_EXTREME)
        source_abs = (os.path.abspath(self.args.source_dir)
                      if self.args.source_dir else None)
        cached_abs = os.path.abspath(cached_app) if cached_app else None
        if cached_abs and source_abs and source_abs != cached_abs:
            self._check_force('build directory "{}" is for application "{}", '
                              'but source directory "{}" was specified; '
                              'please clean it or use --build-dir to set '
                              'another build directory'.format(
                                  self.build_dir, cached_abs, source_abs))
            self.run_cmake = True  # If they insist, we need to re-run cmake.

        cached_board = self.cmake_cache.get('CACHED_BOARD')
        log.dbg('CACHED_BOARD:', cached_board, level=log.VERBOSE_EXTREME)
        if not cached_board and not self.args.board:
            if self.created_build_dir:
                self._check_force(
                    'Building for the first time: you must provide --board')
            else:
                self._check_force(
                    'Board is missing or unknown, please provide --board')
        if self.args.board and cached_board and \
           self.args.board != cached_board:
            self._check_force('Build directory {} targets board {}, '
                              'but board {} was specified. (Clean that '
                              'directory or use --build-dir to specify '
                              'a different one.)'.format(
                                  self.build_dir, cached_board,
                                  self.args.board))
Exemple #21
0
	except cfloader.BadInput, e:
		log.die("Cannot load conf file: %s" % (str(e),))

	# If we are just checking, go straight there:
	if checkonly:
		checkcfg(cfg)
		log.debug(1, "No problems found.")
		return

	# First we do what needs privledges: binding sockets.
	sockl = []
	for h, p in cfg['listen']:
		try:
			sockl.append(proc.getsocket(h, p))
		except proc.Kaboom, e:
			log.die("Could not establish socket %s@%s: %s" % \
				(p, h, str(e)))

	# Renounce privledges if told to.
	if cfg.has_key('user'):
		try:
			proc.changetouser(cfg['user'])
		except proc.Kaboom, e:
			log.die("Could not drop privledges to %s: %s" % \
				(cfg['user'], str(e)))

	# Initialize global parameters.
	if cfg.has_key('dropipafter'):
		hinfo.setiptimesdur(cfg['dropipafter'])
	if cfg.has_key('substitutions'):
		if cfg['substitutions'] == 'off':
			actions.dosubstitutions(0)
 def _check_force(self, msg):
     if not self.args.force:
         log.err(msg)
         log.die('refusing to proceed without --force due to above error')
Exemple #23
0
def usage():
	log.die("usage: portnanny2 [-v|-V NUM] [-M MAXTHREADS] [-S STACK] [-C] [-l] conffile")