コード例 #1
0
ファイル: install.py プロジェクト: releng-tool/releng-tool
def install(opts):
    """
    support installation cmake projects

    With provided installation options (``RelengInstallOptions``), the
    installation stage will be processed.

    Args:
        opts: installation options

    Returns:
        ``True`` if the installation stage is completed; ``False`` otherwise
    """

    if not CMAKE.exists():
        err('unable to install package; cmake is not installed')
        return False

    # check if the no-install flag is set
    if opts._cmake_noinstall:
        verbose('configured to skip install stage for cmake')
        return True

    # default definitions
    cmake_defs = {}
    if opts.install_defs:
        cmake_defs.update(expand(opts.install_defs))

    # default options
    cmake_opts = {
        # build RelWithDebInfo (when using multi-configuration projects)
        '--config': 'RelWithDebInfo',
        # default install using the install target
        '--target': 'install',
    }
    if opts.install_opts:
        cmake_opts.update(expand(opts.install_opts))

    # argument building
    cmake_args = [
        '--build',
        opts.build_output_dir,
    ]
    cmake_args.extend(prepare_definitions(cmake_defs, '-D'))
    cmake_args.extend(prepare_arguments(cmake_opts))

    # prepare environment for installation request; an environment dictionary is
    # always needed to apply a custom DESTDIR during each install request
    env = expand(opts.install_env)
    if not env:
        env = {}

    # install to each destination
    for dest_dir in opts.dest_dirs:
        env['DESTDIR'] = dest_dir
        if not CMAKE.execute(cmake_args, env=env):
            err('failed to install cmake project: {}', opts.name)
            return False

    return True
コード例 #2
0
def platform_exit(msg=None, code=None):
    """
    exit out of the releng-tool process

    Provides a convenience method to help invoke a system exit call without
    needing to explicitly use ``sys``. A caller can provide a message to
    indicate the reason for the exit. The provide message will output to
    standard error. The exit code, if not explicit set, will vary on other
    arguments. If a message is provided to this call, the default exit code will
    be ``1``. If no message is provided, the default exit code will be ``0``.
    In any case, if the caller explicitly sets a code value, the provided code
    value will be used.

    An example when using in the context of script helpers is as follows:

    .. code-block:: python

        releng_exit('there was an error performing this task')

    Args:
        msg (optional): error message to print
        code (optional): exit code; defaults to 0 if no message or defaults to 1
            if a message is set

    Raises:
        SystemExit: always raised
    """

    if msg:
        err(msg)
        if code is None:
            code = 1
    elif code is None:
        code = 0
    sys.exit(code)
コード例 #3
0
def _validate_cache(cache_dir):
    """
    validate an existing cache directory to fetch on

    A fetch operation may occur on an existing cache directory, typically when
    a force-fetch or a configured revision has changed. This call helps
    validate the existing cache directory (from a bad state such as a corrupted
    repository). If a cache directory does exist,

    Args:
        cache_dir: the cache/bare repository to fetch into

    Returns:
        a 2-tuple (if a cache directory exists; and if validation failed)
    """

    git_dir = '--git-dir=' + cache_dir

    bad_validation = False
    has_cache = False
    if os.path.isdir(cache_dir):
        log('cache directory detected; validating')
        if GIT.execute([git_dir, 'rev-parse'], cwd=cache_dir, quiet=True):
            debug('cache directory validated')
            has_cache = True
        else:
            log('cache directory has errors; will re-downloaded')
            if not path_remove(cache_dir):
                err(
                    'unable to cleanup cache folder for package\n'
                    ' (cache folder: {})', cache_dir)
                bad_validation = True

    return has_cache, bad_validation
コード例 #4
0
ファイル: git.py プロジェクト: releng-tool/releng-tool
def extract(opts):
    """
    support extraction (checkout) of a git cache into a build directory

    With provided extraction options (``RelengExtractOptions``), the extraction
    stage will be processed. A Git extraction process will populate a working
    tree based off the cached Git tree acquired from the fetch stage.

    Args:
        opts: the extraction options

    Returns:
        ``True`` if the extraction stage is completed; ``False`` otherwise
    """

    assert opts
    cache_dir = opts.cache_dir
    revision = opts.revision
    work_dir = opts.work_dir

    if not GIT.exists():
        err('unable to extract package; git is not installed')
        return None

    # extract the package
    if not _workdir_extract(cache_dir, work_dir, revision):
        return False

    # extract submodules (if configured to do so)
    if opts._git_submodules:
        if not _process_submodules(opts, work_dir):
            return False

    return True
コード例 #5
0
    def extract_submodule_revision(self, bare_dir):
        """
        extract a submodule revision

        Attempts to extract the HEAD reference of a submodule based off a
        provided bare Git repository. This is to help support processing Git
        submodules which do not have a branch/version explicitly set for module,
        which is required for (at least) recursive submodule processing.

        Args:
            bare_dir: the bare repository

        Returns:
            the revision; ``None`` when a revision cannot be extracted
        """

        rv, ref = self.execute_rv('--git-dir=' + bare_dir, 'show-ref', '--head')
        if rv != 0:
            err('failed to extract a submodule revision')
            return None

        # a `--head` fetch may fetch more than one reference; extract the first
        # entry and remove any known ref prefix from it
        revision = ref.split(None, 2)[1]
        if revision.startswith('refs/heads/'):
            revision = revision[len('refs/heads/'):]
        elif revision.startswith('refs/remotes/origin/'):
            revision = revision[len('refs/remotes/origin/'):]
        return revision
コード例 #6
0
def fetch(opts):
    """
    support fetching from scp sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    site = opts.site
    work_dir = opts.work_dir

    if not SCP.exists():
        err('unable to fetch package; scp is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    if not SCP.execute(['-o', 'BatchMode yes', site, cache_file],
                       cwd=work_dir):
        err('unable to secure-copied file from target')
        return None
    log('successfully secure-copied file from target')

    return cache_file
コード例 #7
0
ファイル: mercurial.py プロジェクト: releng-tool/releng-tool
def extract(opts):
    """
    support extraction (checkout) of a mercurial cache into a build directory

    With provided extraction options (``RelengExtractOptions``), the extraction
    stage will be processed. A Mercurial extraction process will populate a
    working tree based off the cached Mercurial repository acquired from the
    fetch stage.

    Args:
        opts: the extraction options

    Returns:
        ``True`` if the extraction stage is completed; ``False`` otherwise
    """

    assert opts
    cache_dir = opts.cache_dir
    revision = opts.revision
    work_dir = opts.work_dir

    if not HG.exists():
        err('unable to extract package; mercurial (hg) is not installed')
        return None

    log('checking out target revision into work tree')
    if not HG.execute(
        ['--verbose', 'clone', '--rev', revision, cache_dir, work_dir],
            cwd=work_dir):
        err('unable to checkout revision')
        return False

    return True
コード例 #8
0
def _sync_git_origin(cache_dir, site):
    """
    synchronize an origin site to a git configuration

    Ensures the configured site is set as the origin of the repository. This is
    to help handle scenarios where a package's site has changed while content is
    already cached.

    Args:
        cache_dir: the cache/bare repository
        site: the site that should be set

    Returns:
        ``True`` if the site is synchronized; ``False`` otherwise
    """

    git_dir = '--git-dir=' + cache_dir

    # silently try to add origin first, to lazily handle a missing case
    GIT.execute([git_dir, 'remote', 'add', 'origin', site],
                cwd=cache_dir,
                quiet=True)

    if not GIT.execute([git_dir, 'remote', 'set-url', 'origin', site],
                       cwd=cache_dir):
        err('unable to ensure origin is set on repository cache')
        return False

    return True
コード例 #9
0
def build(opts):
    """
    support building python projects

    With provided build options (``RelengBuildOptions``), the build stage will
    be processed.

    Args:
        opts: build options

    Returns:
        ``True`` if the building stage is completed; ``False`` otherwise
    """

    if opts._python_interpreter:
        python_tool = PythonTool(opts._python_interpreter,
                                 env_include=PYTHON_EXTEND_ENV)
    else:
        python_tool = PYTHON

    if not python_tool.exists():
        err('unable to build package; python is not installed')
        return False

    # definitions
    python_defs = {}
    if opts.build_defs:
        python_defs.update(expand(opts.build_defs))

    # default options
    python_opts = {}
    if opts.build_opts:
        python_opts.update(expand(opts.build_opts))

    # default environment
    path1 = python_tool.path(sysroot=opts.staging_dir, prefix=opts.prefix)
    path2 = python_tool.path(sysroot=opts.target_dir, prefix=opts.prefix)
    env = {'PYTHONPATH': path1 + os.pathsep + path2}

    # apply package-specific environment options
    if opts.build_env:
        env.update(expand(opts.build_env))

    # argument building
    python_args = [
        'setup.py',
        # ignore user's pydistutils.cfg
        '--no-user-cfg',
        # invoke the build operation
        'build',
    ]
    python_args.extend(prepare_definitions(python_defs))
    python_args.extend(prepare_arguments(python_opts))

    if not python_tool.execute(python_args, env=env):
        err('failed to build python project: {}', opts.name)
        return False

    return True
コード例 #10
0
ファイル: build.py プロジェクト: releng-tool/releng-tool
def build(opts):
    """
    support building cmake projects

    With provided build options (``RelengBuildOptions``), the build stage will
    be processed.

    Args:
        opts: build options

    Returns:
        ``True`` if the building stage is completed; ``False`` otherwise
    """

    if not CMAKE.exists():
        err('unable to build package; cmake is not installed')
        return False

    # definitions
    cmake_defs = {}
    if opts.build_defs:
        cmake_defs.update(expand(opts.build_defs))

    # options
    cmake_opts = {
        # build RelWithDebInfo (when using multi-configuration projects)
        '--config': 'RelWithDebInfo',
    }
    if opts.build_opts:
        cmake_opts.update(expand(opts.build_opts))

    # argument building
    cmake_args = [
        # tell cmake to invoke build process in the output directory
        '--build',
        opts.build_output_dir,
    ]
    cmake_args.extend(prepare_definitions(cmake_defs, '-D'))
    cmake_args.extend(prepare_arguments(cmake_opts))

    # enable specific number of parallel jobs is set
    #
    # https://cmake.org/cmake/help/v3.12/manual/cmake.1.html#build-tool-mode
    if 'releng.cmake.disable_parallel_option' not in opts._quirks:
        if opts.jobsconf != 1 and opts.jobs > 1:
            cmake_args.append('--parallel')
            cmake_args.append(str(opts.jobs))
    else:
        verbose('cmake parallel jobs disabled by quirk')

    if not CMAKE.execute(cmake_args, env=expand(opts.build_env)):
        err('failed to build cmake project: {}', opts.name)
        return False

    return True
コード例 #11
0
ファイル: __init__.py プロジェクト: releng-tool/releng-tool
    def _execute(self,
                 args=None,
                 cwd=None,
                 quiet=False,
                 env=None,
                 poll=False,
                 capture=None):
        """
        execute the host tool with the provided arguments (if any)

        Runs the host tool described by ``args`` until completion.

        Args:
            args (optional): the list of arguments for the tool
            cwd (optional): working directory to use
            quiet (optional): whether or not to suppress output
            env (optional): environment variables to include
            poll (optional): force polling stdin/stdout for output data
            capture (optional): list to capture output into

        Returns:
            the return code of the execution request
        """
        if not self.exists():
            return 1

        if args and not is_sequence_not_string(args):
            err('invalid argument type provided into execute (should be list): '
                + str(args))
            return 1

        final_env = None
        if self.include or self.sanitize or env:
            final_env = os.environ.copy()
            if self.sanitize:
                for key in self.sanitize:
                    final_env.pop(key, None)
            if self.include:
                final_env.update(self.include)
            if env:
                final_env.update(env)

        final_args = [self.tool]
        if args:
            final_args.extend(args)

        return _execute(final_args,
                        cwd=cwd,
                        env=final_env,
                        quiet=quiet,
                        critical=False,
                        poll=poll,
                        capture=capture)
コード例 #12
0
ファイル: svn.py プロジェクト: releng-tool/releng-tool
def fetch(opts):
    """
    support fetching from svn sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site
    work_dir = opts.work_dir

    if not SVN.exists():
        err('unable to fetch package; svn is not installed')
        return None

    note('fetching {}...'.format(name))
    sys.stdout.flush()

    log('checking out sources')
    if not SVN.execute(['checkout', '-r', revision, site, work_dir],
                       cwd=work_dir):
        err('unable to checkout module')
        return None

    log('caching sources')

    def svn_exclude(file):
        if file.endswith('.svn'):
            return True
        return False

    # ensure cache file's directory exists
    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    with tarfile.open(cache_file, 'w:gz') as tar:
        tar.add(work_dir, arcname=name, exclude=svn_exclude)

    return cache_file
コード例 #13
0
def process_file_flag(flag, file, quiet=False):
    """
    process a file flag event

    Will either write a file flag configuration event or attempt to read a file
    flag state. If the ``flag`` option is set to ``True``, this process event
    will assume that this instance is attempting to configure a file flag (on)
    state and generate the target file flag on the system. If the flag option is
    set to ``False``, the file's existence will be checked to reflect whether or
    not the flag is considered enabled.

    Args:
        flag: the flag option to used; ``None`` to check flag state
        file: the filename
        quiet: suppression of any error messages to standard out

    Returns:
        ``FileFlag.EXISTS`` if the flag is enabled; ``FileFlag.NO_EXIST`` if the
            flag is not enabled; ``FileFlag.CONFIGURED`` if the flag was
            configured as requested; ``FileFlag.NOT_CONFIGURED`` if the flag
            could not be configured as requested
    """

    if flag:
        # When checking if the file flag exists, attempt to update the access/
        # modified times. For the case where may experience issues creating the
        # file flag themselves (permission errors, etc.), fallback on just the
        # existence of the file flag to still be considered as configured.
        if touch(file):
            rv = FileFlag.CONFIGURED
        else:
            if os.path.isfile(file):
                rv = FileFlag.CONFIGURED
            else:
                rv = FileFlag.NOT_CONFIGURED
                if not quiet:
                    err('unable to configure file flag: {}', file)
    elif flag is None and os.path.isfile(file):
        rv = FileFlag.EXISTS
    else:
        rv = FileFlag.NO_EXIST

    return rv
コード例 #14
0
def build(opts):
    """
    support building autotools projects

    With provided build options (``RelengBuildOptions``), the build stage will
    be processed.

    Args:
        opts: build options

    Returns:
        ``True`` if the building stage is completed; ``False`` otherwise
    """

    if not MAKE.exists():
        err('unable to build package; make is not installed')
        return False

    # definitions
    autotools_defs = {}
    if opts.build_defs:
        autotools_defs.update(expand(opts.build_defs))

    # default options
    autotools_opts = {}
    if opts.build_opts:
        autotools_opts.update(expand(opts.build_opts))

    # argument building
    autotools_args = []
    autotools_args.extend(prepare_definitions(autotools_defs))
    autotools_args.extend(prepare_arguments(autotools_opts))

    if opts.jobs > 1:
        autotools_args.append('--jobs')
        autotools_args.append(str(opts.jobs))

    if not MAKE.execute(autotools_args, env=expand(opts.build_env)):
        err('failed to build autotools project: {}', opts.name)
        return False

    return True
コード例 #15
0
ファイル: io.py プロジェクト: releng-tool/releng-tool
def path_remove(path, quiet=False):
    """
    remove the provided path

    Attempts to remove the provided path if it exists. The path value can either
    be a directory or a specific file. If the provided path does not exist, this
    method has no effect. In the event that a file or directory could not be
    removed due to an error other than unable to be found, an error message will
    be output to standard error (unless ``quiet`` is set to ``True``).

    An example when using in the context of script helpers is as follows:

    .. code-block:: python

        releng_remove('my-file')
        # (or)
        releng_remove('my-directory/')

    Args:
        path: the path to remove
        quiet (optional): whether or not to suppress output

    Returns:
        ``True`` if the path was removed or does not exist; ``False`` if the
        path could not be removed from the system
    """

    if not os.path.exists(path):
        return True

    try:
        if os.path.isdir(path) and not os.path.islink(path):
            _path_remove_dir(path)
        else:
            _path_remove_file(path)
    except OSError as e:
        if e.errno != errno.ENOENT:
            if not quiet:
                err('unable to remove path: {}\n' '    {}', path, e)
            return False

    return True
コード例 #16
0
ファイル: io.py プロジェクト: releng-tool/releng-tool
def ensure_dir_exists(dir_, quiet=False, critical=False):
    """
    ensure the provided directory exists

    Attempts to create the provided directory. If the directory already exists,
    this method has no effect. If the directory does not exist and could not be
    created, this method will return ``False``. Also, if an error has been
    detected, an error message will be output to standard error (unless
    ``quiet`` is set to ``True``).

    An example when using in the context of script helpers is as follows:

    .. code-block:: python

        if releng_mkdir('my-directory'):
            print('directory was created')
        else:
            print('directory was not created')

    Args:
        dir_: the directory
        quiet (optional): whether or not to suppress output (defaults to
            ``False``)
        critical (optional): whether or not to stop execution on failure
            (defaults to ``False``)

    Returns:
        ``True`` if the directory exists; ``False`` if the directory could not
        be created
    """
    try:
        os.makedirs(dir_)
    except OSError as e:
        if e.errno != errno.EEXIST or not os.path.isdir(dir_):
            if not quiet:
                err('unable to create directory: {}\n' '    {}', dir_, e)
            if critical:
                sys.exit(-1)
            return False
    return True
コード例 #17
0
def _create_bare_git_repo(cache_dir):
    """
    create a bare git repository

    This call will build a bare Git repository in the provided cache
    directory. If the repository could not be created, an error message
    will be generated and this method will return ``False``.

    Args:
        cache_dir: the cache/bare repository

    Returns:
        ``True`` if the repository could be created; ``False`` otherwise
    """

    git_dir = '--git-dir=' + cache_dir

    if GIT.execute([git_dir, 'init', '--bare', '--quiet'], cwd=cache_dir):
        return True

    err('unable to initialize bare git repository')
    return False
コード例 #18
0
ファイル: io.py プロジェクト: releng-tool/releng-tool
def run_script(script, globals_, subject=None, catch=True):
    """
    execute the provided script and provide the resulting globals module

    With the provided ``script`` file, execute the code and return the resulting
    global module based on the execution results. This invoke is just a wrapper
    call for ``run_path`` but with improved formatting for user feedback when
    invoked in various stages of a releng-tool run. The provided ``globals``
    will be passed into the ``run_path`` call.

    When an issue occurs invoking the provided script, an error messaged is
    output to standard error. This includes an error message (tailored, if
    provided, by a ``subject`` value), the captured exception message and a
    stack trace. This call will return ``None`` when an error is detected.

    Args:
        script: the script
        globals_: dictionary to pre-populate script's globals
        subject (optional): subject value to enhance a final error message
        catch (optional): whether or not to catch any exceptions

    Returns:
        resulting globals module; ``None`` if an execution error occurs
    """
    if not catch:
        result = run_path(script, init_globals=globals_)
    else:
        try:
            result = run_path(script, init_globals=globals_)
        except Exception as e:
            err('{}\n'
                'error running {}{}script: {}\n'
                '    {}', traceback.format_exc(), subject, subject and ' ',
                script, e)
            return None

    return result
コード例 #19
0
ファイル: pipeline.py プロジェクト: releng-tool/releng-tool
    def _stage_license(self, pkg):
        """
        process license files for a specific package processing

        If a package contains one or more files containing licenses information,
        this information will be populated in the package's license folder.

        Args:
            pkg: the package being processed

        Returns:
            ``True`` if the license information was copied; ``False`` if these
            license information could not be copied
        """

        # skip if package has no license files
        if not pkg.license_files:
            if pkg.license and not pkg.is_internal and not pkg.no_extraction:
                warn('package defines no license files: ' + pkg.name)
            return True

        # ensure package-specific license directory exists
        pkg_license_dir = os.path.join(self.opts.license_dir, pkg.nv)
        if not ensure_dir_exists(pkg_license_dir):
            return False

        # copy over each license files
        for file in pkg.license_files:
            src = os.path.join(pkg.build_dir, file)
            dst = os.path.join(pkg_license_dir, file)

            if not path_copy(src, dst, critical=False):
                err('unable to copy license information: ' + pkg.name)
                return False

        return True
コード例 #20
0
def _sync_git_configuration(opts):
    """
    ensure the git configuration is properly synchronized with this repository

    This call ensures that various Git configuration options are properly
    synchronized with the cached Git repository. This includes:

    - Ensuring the configured site is set as the origin of the repository. This
       is to help handle scenarios where a package's site has changed while
       content is already cached.
    - Ensure various `git config` options are set, if specific repository
       options need to be set (e.g. overriding `core.autocrlf`).

    Args:
        opts: fetch options

    Returns:
        ``True`` if the configuration has been synchronized; ``False`` otherwise
    """

    cache_dir = opts.cache_dir
    git_dir = '--git-dir=' + cache_dir
    site = opts.site

    if not _sync_git_origin(cache_dir, site):
        return False

    # apply repository-specific configurations
    if opts._git_config:
        for key, val in opts._git_config.items():
            if not GIT.execute([git_dir, 'config', key, val], cwd=cache_dir):
                err('unable to apply configuration entry "{}" with value "{}"',
                    key, val)
                return False

    return True
コード例 #21
0
def configure(opts):
    """
    support configuration for autotools projects

    With provided configuration options (``RelengConfigureOptions``), the
    configuration stage will be processed.

    Args:
        opts: configuration options

    Returns:
        ``True`` if the configuration stage is completed; ``False`` otherwise
    """

    # check if autoreconf
    if opts._autotools_autoreconf:
        verbose('configured to run autoreconf')
        if not AUTORECONF.exists():
            err('unable to configure package; autoreconf is not installed')
            return False

        if not AUTORECONF.execute(['--verbose']):
            err('failed to prepare autotools project (autoreconf): {}',
                opts.name)
            return False

    # definitions
    autotools_defs = {
        '--prefix': opts.prefix,
        '--exec-prefix': opts.prefix,
    }
    if opts.conf_defs:
        autotools_defs.update(expand(opts.conf_defs))

    # default options
    autotools_opts = {}
    if opts.conf_opts:
        autotools_opts.update(expand(opts.conf_opts))

    # argument building
    autotools_args = []
    autotools_args.extend(prepare_definitions(autotools_defs))
    autotools_args.extend(prepare_arguments(autotools_opts))

    if not execute(['./configure'] + autotools_args,
                   env_update=expand(opts.conf_env),
                   critical=False):
        err('failed to prepare autotools project (configure): {}', opts.name)
        return False

    return True
コード例 #22
0
ファイル: io_copy.py プロジェクト: releng-tool/releng-tool
def path_copy(src, dst, quiet=False, critical=True, dst_dir=None):
    """
    copy a file or directory into a target file or directory

    This call will attempt to copy a provided file or directory, defined by
    ``src`` into a destination file or directory defined by ``dst``. If ``src``
    is a file, then ``dst`` is considered to be a file or directory; if ``src``
    is a directory, ``dst`` is considered a target directory. If a target
    directory or target file's directory does not exist, it will be
    automatically created. In the event that a file or directory could not be
    copied, an error message will be output to standard error (unless ``quiet``
    is set to ``True``). If ``critical`` is set to ``True`` and the specified
    file/directory could not be copied for any reason, this call will issue a
    system exit (``SystemExit``).

    An example when using in the context of script helpers is as follows:

    .. code-block:: python

        # (stage)
        # my-file
        releng_copy('my-file', 'my-file2')
        # (stage)
        # my-file
        # my-file2
        releng_copy('my-file', 'my-directory/')
        # (stage)
        # my-directory/my-file
        # my-file
        # my-file2
        releng_copy('my-directory/', 'my-directory2/')
        # (stage)
        # my-directory/my-file
        # my-directory2/my-file
        # my-file
        # my-file2

    Args:
        src: the source directory or file
        dst: the destination directory or file\\* (\\*if ``src`` is a file)
        quiet (optional): whether or not to suppress output
        critical (optional): whether or not to stop execution on failure
        dst_dir (optional): force hint that the destination is a directory

    Returns:
        ``True`` if the copy has completed with no error; ``False`` if the copy
        has failed

    Raises:
        SystemExit: if the copy operation fails with ``critical=True``
    """
    success = False
    errmsg = None

    try:
        if os.path.isfile(src):
            attempt_copy = True

            if dst_dir:
                base_dir = dst
            else:
                base_dir = os.path.dirname(dst)

            if not os.path.isdir(base_dir):
                attempt_copy = ensure_dir_exists(base_dir, quiet=quiet)

            if attempt_copy:
                if os.path.isdir(dst):
                    dst = os.path.join(dst, os.path.basename(src))

                if os.path.islink(src):
                    target = os.readlink(src)
                    if os.path.islink(dst) or os.path.isfile(dst):
                        path_remove(dst)

                    os.symlink(target, dst)
                else:
                    _copyfile(src, dst)

                _copystat(src, dst)
                success = True
        elif os.path.exists(src):
            if src == dst:
                errmsg = "'{!s}' and '{!s}' " \
                         "are the same folder".format(src, dst)
            elif _copy_tree(src, dst, quiet=quiet, critical=critical):
                success = True
        else:
            errmsg = 'source does not exist: {}'.format(src)
    except (IOError, ShutilError) as e:
        errmsg = str(e)

    if not quiet and errmsg:
        err('unable to copy source contents to target location\n'
            '    {}', errmsg)

    if not success and critical:
        sys.exit(-1)
    return success
コード例 #23
0
ファイル: support.py プロジェクト: releng-tool/releng-tool
def require_version(version, quiet=False, critical=True):
    """
    perform a required-version check

    Enables a caller to explicitly check for a required releng-tool version.
    Invoking this function with a dotted-separated ``version`` string, the
    string will be parsed and compared with the running releng-tool version.
    If the required version is met, this method will have no effect. In the
    event that the required version is not met, the exception ``SystemExit``
    will be raised if the critical flag is set; otherwise this call will
    return ``False``.

    An example when using in the context of script helpers is as follows:

    .. code-block:: python

        # ensure we are using releng-tool v1
        releng_require_version('1.0.0')

    Args:
        version: dotted-separated version string
        quiet (optional): whether or not to suppress output
        critical (optional): whether or not to stop execution on failure

    Returns:
        ``True`` if the version check is met; ``False`` if the version check
        has failed

    Raises:
        SystemExit: if the version check fails with ``critical=True``
    """

    rv = True

    if version:
        requested = version.split('.')
        current = releng_version.split('.')
        rv = requested <= current
        if not rv:
            if not quiet:
                args = {
                    'detected': releng_version,
                    'required': version,
                }
                err('''
required releng-tool version check has failed

This project has indicated a required minimum version of releng-tool to
be installed on this system; however, an older version has been
detected:

    (required) {required}
    (detected) {detected}

Please update to a more recent version:

 https://docs.releng.io/install/
'''.strip().format(**args))

            if critical:
                sys.exit(-1)

    return rv
コード例 #24
0
def fetch(opts):
    """
    support fetching from cvs sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site
    work_dir = opts.work_dir

    cache_basename = os.path.basename(cache_file)
    cache_stem, __ = interpret_stem_extension(cache_basename)

    if not CVS.exists():
        err('unable to fetch package; cvs is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    try:
        cvsroot, module = site.rsplit(' ', 1)
    except ValueError:
        err('''\
improper cvs site defined

The provided CVS site does not define both the CVSROOT as well as the target
module to checkout. For example:

    :pserver:[email protected]:/var/lib/cvsroot mymodule

 Site: {}''', site)
        return None

    log('checking out sources')
    if not CVS.execute(['-d', cvsroot, 'checkout', '-d', cache_stem,
            '-r', revision, module], cwd=work_dir):
        err('unable to checkout module')
        return None

    cvs_module_dir = os.path.join(work_dir, cache_stem)
    if not os.path.exists(cvs_module_dir):
        err('no sources available for the provided revision')
        return None

    log('caching sources')
    def cvs_filter(info):
        if info.name.endswith('CVS'):
            return None
        return info

    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    with tarfile.open(cache_file, 'w:gz') as tar:
        tar.add(cvs_module_dir, arcname=cache_stem, filter=cvs_filter)

    return cache_file
コード例 #25
0
ファイル: archive.py プロジェクト: releng-tool/releng-tool
def extract(opts):
    """
    support extraction of an archive into a build directory

    With provided extraction options (``RelengExtractOptions``), the extraction
    stage will be processed. The archive's extension will be used in attempt to
    finding a matching tool/implementation which can be used to extract the
    contents of the file. In the event that the method of extraction cannot be
    determined, it will be assumed that the file is in fact not extractable.
    Files which are not extracted are just copied into the build directly (e.g.
    single resource files).

    Args:
        opts: the extraction options

    Returns:
        ``True`` if the extraction stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    strip_count = opts.strip_count
    work_dir = opts.work_dir

    cache_basename = os.path.basename(cache_file)
    __, cache_ext = interpret_stem_extension(cache_basename)

    is_extractable = False
    if cache_ext:
        cache_ext = cache_ext.lower()

        # if the user defines a tool override for this extension type, use
        # whatever the user wants to use (passing the file and directory to
        # extract to)
        extract_override = getattr(opts, '_extract_override', None)
        if extract_override and cache_ext in extract_override:
            is_extractable = True

            tool_cmd = extract_override[cache_ext].format(file=cache_file,
                                                          dir=work_dir)

            if not execute(tool_cmd.split(), cwd=work_dir, critical=False):
                err('unable to extract with tool override\n'
                    ' (command: {})', tool_cmd)
                return None

        # attempt to extract the (compressed) tar archive with the host's
        # tar tool; if it does not exist, we'll fallback to using python's
        # internal implementation (tarfile)
        elif cache_ext.startswith(TAR_SUPPORTED):
            is_extractable = True

            # before attempting to use an external tar command, only allow
            # using it if the `force-local` option is available whenever a
            # colon character is provided, to prevent tar from assuming the
            # path is a remote target
            needs_force_local = False
            if ':' in cache_file:
                needs_force_local = True

            has_extracted = False
            if TAR.exists() and (TAR.force_local or not needs_force_local):
                tar_args = [
                    '--extract',
                    '--file=' + cache_file,
                    '--strip-components={}'.format(strip_count),
                    '--verbose',
                ]

                if needs_force_local:
                    tar_args.append('--force-local')

                if TAR.execute(tar_args, cwd=work_dir):
                    has_extracted = True
                else:
                    warn('unable to extract archive with host tar; '
                         'will use fallback')

            if not has_extracted:
                try:

                    def tar_extract(members, strip_count):
                        for member in members:
                            # strip members from package defined count
                            if strip_count > 0:
                                np = os.path.normpath(member.name)
                                parts = np.split(os.path.sep, strip_count)
                                if len(parts) <= strip_count:
                                    continue
                                member.name = parts[-1]

                            # notify the user of the target member to extract
                            print(member.name)
                            yield member

                    with tarfile.open(cache_file, 'r') as tar:
                        tar.extractall(path=work_dir,
                                       members=tar_extract(tar, strip_count))
                except Exception as e:
                    err(
                        'unable to extract tar file\n'
                        '    {}\n'
                        ' (file: {})\n'
                        ' (target: {})', e, cache_file, work_dir)
                    return False

        # extract a zip-extension cache file using python's internal
        # implementation (zipfile)
        elif cache_ext == 'zip':
            is_extractable = True

            try:
                with ZipFile(cache_file, 'r') as zip_:
                    for member in zip_.namelist():
                        # strip members from package defined count
                        member_s = member
                        if strip_count > 0:
                            np = os.path.normpath(member_s)
                            parts = np.split(os.path.sep, strip_count)
                            if len(parts) <= strip_count:
                                continue
                            member_s = parts[-1]
                        dest = os.path.join(work_dir, member_s)

                        # notify the user of the target member to extract
                        print(member)

                        # if this is a directory entry, ensure the directory
                        # exists for the destination
                        if not os.path.basename(member):
                            ensure_dir_exists(dest)
                        else:
                            # always ensure the container directory for a file
                            # exists before attempting to extract a member into
                            # it, as not all processed zip files may process
                            # a directory entry (to be created) ahead of time
                            ensure_dir_exists(os.path.dirname(dest))

                            with zip_.open(member) as s, open(dest, 'wb') as f:
                                shutil.copyfileobj(s, f)

            except Exception as e:
                err(
                    'unable to extract zip file\n'
                    '    {}\n'
                    ' (file: {})\n'
                    ' (target: {})', e, cache_file, work_dir)
                return False

    if not is_extractable:
        debug('file not considered extractable: ' + cache_file)
        try:
            shutil.copy2(cache_file, work_dir)
        except IOError as e:
            err(
                'unable to copy over cache file\n'
                '    {}\n'
                ' (file: {})\n'
                ' (target: {})', e, cache_file, work_dir)
            return False

    return True
コード例 #26
0
    def check(self, quiet=False):
        """
        check for the existence of required tools for the loaded package set

        For each loaded package, a series of required host tools will be checked
        and a caller will be notified whether or not anything is missing.

        Args:
            quiet (optional): whether or not to suppress output (defaults to
                ``False``)

        Returns:
            ``True`` is all known required tools exists; ``False`` otherwise
        """

        missing = set()
        pkg_types = set()
        python_interpreters = set()
        vcs_types = set()

        # package-defined requirements check
        for pkg in self.pkgs:
            pkg_types.add(pkg.type)
            vcs_types.add(pkg.vcs_type)

            if pkg.type == PackageType.AUTOTOOLS:
                if pkg.autotools_autoreconf:
                    if AUTORECONF.exists():
                        self._verbose_exists(AUTORECONF)
                    else:
                        missing.add(AUTORECONF.tool)

            elif pkg.type == PackageType.PYTHON:
                if pkg.python_interpreter:
                    python_tool = PythonTool(pkg.python_interpreter)
                else:
                    python_tool = PYTHON
                python_interpreters.add(python_tool)

        if PackageType.AUTOTOOLS in pkg_types:
            if MAKE.exists():
                self._verbose_exists(MAKE)
            else:
                missing.add(MAKE.tool)

        if PackageType.CMAKE in pkg_types:
            if CMAKE.exists():
                self._verbose_exists(CMAKE)
            else:
                missing.add(CMAKE.tool)

        if PackageType.PYTHON in pkg_types:
            for interpreter in python_interpreters:
                if interpreter.exists():
                    self._verbose_exists(interpreter)
                else:
                    missing.add(interpreter.tool)

        if VcsType.BZR in vcs_types:
            if BZR.exists():
                self._verbose_exists(BZR)
            else:
                missing.add(BZR.tool)

        if VcsType.CVS in vcs_types:
            if CVS.exists():
                self._verbose_exists(CVS)
            else:
                missing.add(CVS.tool)

        if VcsType.GIT in vcs_types:
            if GIT.exists():
                self._verbose_exists(GIT)
            else:
                missing.add(GIT.tool)

        if VcsType.HG in vcs_types:
            if HG.exists():
                self._verbose_exists(HG)
            else:
                missing.add(HG.tool)

        if VcsType.RSYNC in vcs_types:
            if RSYNC.exists():
                self._verbose_exists(RSYNC)
            else:
                missing.add(RSYNC.tool)

        if VcsType.SCP in vcs_types:
            if SCP.exists():
                self._verbose_exists(SCP)
            else:
                missing.add(SCP.tool)

        if VcsType.SVN in vcs_types:
            if SVN.exists():
                self._verbose_exists(SVN)
            else:
                missing.add(SVN.tool)

        # project-provided tools check
        for tool in self.tools:
            if which(tool):
                verbose('prerequisite exists: ' + tool)
            else:
                missing.add(tool)

        if missing and not quiet:
            sorted_missing = list(missing)
            sorted_missing.sort()

            msg = 'missing the following host tools for this project:'
            msg += '\n'
            msg += '\n'
            for entry in sorted_missing:
                msg += ' ' + entry + '\n'
            err(msg)

        return len(missing) == 0
コード例 #27
0
ファイル: init.py プロジェクト: releng-tool/releng-tool
def initialize_sample(opts):
    """
    initialize a sample project

    Generates a sample provided in the root directory to help new users or new
    project get started.

    Args:
        opts: options for this run

    Returns:
        ``True`` if the sample project could be initialized; ``False`` if an
        issue has occurred generating the sample project
    """

    root_dir = opts.root_dir

    if not ensure_dir_exists(root_dir):
        return False

    if os.listdir(root_dir):
        err('unable to initialize sample project is non-empty directory')
        return False

    sample_dir = os.path.join(root_dir, 'package', 'sample')

    success = True
    if ensure_dir_exists(sample_dir):
        # sample project
        sample_defs = os.path.join(root_dir, 'package', 'sample', 'sample')
        try:
            with open(sample_defs, 'w') as f:
                f.write('''\
#!/usr/bin/env python
# -*- coding: utf-8 -*-

SAMPLE_DEPENDENCIES = []
SAMPLE_LICENSE = ['<license name>']
SAMPLE_LICENSE_FILES = ['<license file>']
SAMPLE_SITE = '<location for sources>'
SAMPLE_TYPE = '<package-type>'
SAMPLE_VERSION = '<package-version>'
''')

            verbose('written sample file')
        except IOError as e:
            err('unable to generate a sample file')
            verbose(str(e))
            success = False
    else:
        success = False

    # .gitignore
    try:
        project_gitignore = os.path.join(root_dir,
                                         '.gitignore')  # (assumption)
        with open(project_gitignore, 'w') as f:
            f.write('''\
# releng-tool
/cache/
/dl/
/output/
.releng-flag-*
''')

        verbose('written .gitignore file')
    except IOError as e:
        err('unable to generate a .gitignore file')
        verbose(str(e))
        success = False

    # releng project
    try:
        project_defs = os.path.join(root_dir, 'releng')
        with open(project_defs, 'w') as f:
            f.write('''\
#!/usr/bin/env python
# -*- coding: utf-8 -*-

packages = [
    'sample',
]
''')

        verbose('written releng file')
    except IOError as e:
        err('unable to generate a releng file')
        verbose(str(e))
        success = False

    if success:
        log('initialized empty releng-tool project')
    else:
        warn('partially initialized a releng-tool project')
    return success
コード例 #28
0
def stage(engine, pkg, script_env):
    """
    handles the configuration stage for a package

    With a provided engine and package instance, the configuration stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being configured
        script_env: script environment information

    Returns:
        ``True`` if the configuration stage is completed; ``False`` otherwise
    """

    note('configuring {}...', pkg.name)
    sys.stdout.flush()

    # ignore configuration step for types which do not have one
    if pkg.type == PackageType.PYTHON:
        return True

    if pkg.build_subdir:
        build_dir = pkg.build_subdir
    else:
        build_dir = pkg.build_dir

    pkg_install_type = NC(pkg.install_type, PackageInstallType.TARGET)

    configure_opts = RelengConfigureOptions()
    replicate_package_attribs(configure_opts, pkg)
    configure_opts.build_dir = build_dir
    configure_opts.build_output_dir = pkg.build_output_dir
    configure_opts.conf_defs = pkg.conf_defs
    configure_opts.conf_env = pkg.conf_env
    configure_opts.conf_opts = pkg.conf_opts
    configure_opts.def_dir = pkg.def_dir
    configure_opts.env = script_env
    configure_opts.ext = pkg.ext_modifiers
    configure_opts.host_dir = engine.opts.host_dir
    configure_opts.install_type = pkg_install_type
    configure_opts.name = pkg.name
    configure_opts.prefix = NC(pkg.prefix, engine.opts.sysroot_prefix)
    configure_opts.staging_dir = engine.opts.staging_dir
    configure_opts.symbols_dir = engine.opts.symbols_dir
    configure_opts.target_dir = engine.opts.target_dir
    configure_opts.version = pkg.version
    configure_opts._quirks = engine.opts.quirks

    # if package has a job-override value, use it over any global option
    if pkg.fixed_jobs:
        configure_opts.jobs = pkg.fixed_jobs
        configure_opts.jobsconf = pkg.fixed_jobs
    else:
        configure_opts.jobs = engine.opts.jobs
        configure_opts.jobsconf = engine.opts.jobsconf

    configurer = None
    if pkg.type in engine.registry.package_types:

        def _(opts):
            return engine.registry.package_types[pkg.type].configure(
                pkg.type, opts)

        configurer = _
    elif pkg.type == PackageType.AUTOTOOLS:
        configurer = conf_autotools
    elif pkg.type == PackageType.CMAKE:
        configurer = conf_cmake
    elif pkg.type == PackageType.SCRIPT:
        configurer = conf_script

    if not configurer:
        err('configurer type is not implemented: {}', pkg.type)
        return False

    with interim_working_dir(build_dir):
        configured = configurer(configure_opts)
        if not configured:
            return False

    return True
コード例 #29
0
def stage(engine, pkg, script_env):
    """
    handles the installation stage for a package

    With a provided engine and package instance, the installation stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being built
        script_env: script environment information

    Returns:
        ``True`` if the installation stage is completed; ``False`` otherwise
    """

    note('installing {}...', pkg.name)
    sys.stdout.flush()

    if pkg.build_subdir:
        build_dir = pkg.build_subdir
    else:
        build_dir = pkg.build_dir

    pkg_install_type = NC(pkg.install_type, PackageInstallType.TARGET)

    if pkg_install_type == PackageInstallType.HOST:
        dest_dirs = [engine.opts.host_dir]
    elif pkg_install_type == PackageInstallType.IMAGES:
        dest_dirs = [engine.opts.images_dir]
    elif pkg_install_type == PackageInstallType.STAGING:
        dest_dirs = [engine.opts.staging_dir]
    elif pkg_install_type == PackageInstallType.STAGING_AND_TARGET:
        dest_dirs = [engine.opts.staging_dir, engine.opts.target_dir]
    else:
        # default to target directory
        dest_dirs = [engine.opts.target_dir]

    install_opts = RelengInstallOptions()
    replicate_package_attribs(install_opts, pkg)
    install_opts.build_dir = build_dir
    install_opts.build_output_dir = pkg.build_output_dir
    install_opts.cache_file = pkg.cache_file
    install_opts.def_dir = pkg.def_dir
    install_opts.dest_dirs = dest_dirs
    install_opts.env = script_env
    install_opts.ext = pkg.ext_modifiers
    install_opts.host_dir = engine.opts.host_dir
    install_opts.images_dir = engine.opts.images_dir
    install_opts.install_defs = pkg.install_defs
    install_opts.install_env = pkg.install_env
    install_opts.install_opts = pkg.install_opts
    install_opts.install_type = pkg_install_type
    install_opts.name = pkg.name
    install_opts.prefix = NC(pkg.prefix, engine.opts.sysroot_prefix)
    install_opts.staging_dir = engine.opts.staging_dir
    install_opts.symbols_dir = engine.opts.symbols_dir
    install_opts.target_dir = engine.opts.target_dir
    install_opts.version = pkg.version
    install_opts._quirks = engine.opts.quirks

    installer = None
    if pkg.type in engine.registry.package_types:
        def _(opts):
            return engine.registry.package_types[pkg.type].install(
                pkg.type, opts)
        installer = _
    elif pkg.type == PackageType.AUTOTOOLS:
        installer = install_autotools
    elif pkg.type == PackageType.CMAKE:
        installer = install_cmake
    elif pkg.type == PackageType.PYTHON:
        installer = install_python
    elif pkg.type == PackageType.SCRIPT:
        installer = install_script

    if not installer:
        err('installer type is not implemented: {}', pkg.type)
        return False

    with interim_working_dir(build_dir):
        installed = installer(install_opts)
        if not installed:
            return False

    return True
コード例 #30
0
def stage(engine, pkg, script_env):
    """
    handles the patching stage for a package

    With a provided engine and package instance, the patching stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being patched
        script_env: script environment information

    Returns:
        ``True`` if the patching stage is completed; ``False`` otherwise
    """

    if pkg.is_internal:
        # packages flagged for local sources do not have a patch stage
        if pkg.local_srcs:
            return True

        # internal packages in development mode that specify a development
        # revision will not perform the patch stage
        if engine.opts.devmode and pkg.has_devmode_option:
            return True

    note('patching {}...', pkg.name)
    sys.stdout.flush()

    if pkg.build_subdir:
        build_dir = pkg.build_subdir
    else:
        build_dir = pkg.build_dir

    patch_script_filename = '{}-{}'.format(pkg.name, PATCH_SCRIPT)
    patch_script = os.path.join(pkg.def_dir, patch_script_filename)
    if os.path.isfile(patch_script):
        try:
            run_path(patch_script, init_globals=script_env)

            verbose('patch script executed: ' + patch_script)
        except Exception as e:
            err('error running patch script: \n' '    {}', patch_script, e)
            return False

    # find all patches in the package's folder, sort and apply each
    patch_glob = os.path.join(pkg.def_dir, '*.patch')
    patches = glob(patch_glob)
    if patches:
        patches = sorted(patches)
        if not PATCH.exists():
            err('unable to apply patches; patch is not installed')
            return False

        for patch in patches:
            print('({})'.format(os.path.basename(patch)))

            if not PATCH.execute([
                    '--batch',
                    '--forward',
                    '--ignore-whitespace',
                    '--input={}'.format(patch),
                    '--strip=1',
            ],
                                 cwd=build_dir):
                err('failed to apply patch')
                return False

    return True