예제 #1
0
    def save(self, desc=None):
        """
        save statistics for future reference

        Will save any statistics which should be persisted for future
        considerations. This is to help render a "complete" report of statistics
        when re-running releng-tool with packages which may already been
        completed.

        Args:
            desc (optional): description of this save event (for logging)
        """

        if not ensure_dir_exists(self.out_dir):
            verbose('unable to generate output directory for statistics')
            return None

        if desc:
            desc = ' ({})'.format(desc)
        else:
            desc = ''

        try:
            with open(self.dat_file, 'wb') as f:
                pickle.dump(self.data, f, protocol=2)  # 2 for py2/py3 support
            debug('saved statistics' + desc)
        except IOError:
            verbose('failed to save statistics' + desc)
예제 #2
0
def install(opts):
    """
    support installation cmake projects

    With provided installation options (``RelengInstallOptions``), the
    installation stage will be processed.

    Args:
        opts: installation options

    Returns:
        ``True`` if the installation stage is completed; ``False`` otherwise
    """

    if not CMAKE.exists():
        err('unable to install package; cmake is not installed')
        return False

    # check if the no-install flag is set
    if opts._cmake_noinstall:
        verbose('configured to skip install stage for cmake')
        return True

    # default definitions
    cmake_defs = {}
    if opts.install_defs:
        cmake_defs.update(expand(opts.install_defs))

    # default options
    cmake_opts = {
        # build RelWithDebInfo (when using multi-configuration projects)
        '--config': 'RelWithDebInfo',
        # default install using the install target
        '--target': 'install',
    }
    if opts.install_opts:
        cmake_opts.update(expand(opts.install_opts))

    # argument building
    cmake_args = [
        '--build',
        opts.build_output_dir,
    ]
    cmake_args.extend(prepare_definitions(cmake_defs, '-D'))
    cmake_args.extend(prepare_arguments(cmake_opts))

    # prepare environment for installation request; an environment dictionary is
    # always needed to apply a custom DESTDIR during each install request
    env = expand(opts.install_env)
    if not env:
        env = {}

    # install to each destination
    for dest_dir in opts.dest_dirs:
        env['DESTDIR'] = dest_dir
        if not CMAKE.execute(cmake_args, env=env):
            err('failed to install cmake project: {}', opts.name)
            return False

    return True
예제 #3
0
def build(opts):
    """
    support building cmake projects

    With provided build options (``RelengBuildOptions``), the build stage will
    be processed.

    Args:
        opts: build options

    Returns:
        ``True`` if the building stage is completed; ``False`` otherwise
    """

    if not CMAKE.exists():
        err('unable to build package; cmake is not installed')
        return False

    # definitions
    cmake_defs = {}
    if opts.build_defs:
        cmake_defs.update(expand(opts.build_defs))

    # options
    cmake_opts = {
        # build RelWithDebInfo (when using multi-configuration projects)
        '--config': 'RelWithDebInfo',
    }
    if opts.build_opts:
        cmake_opts.update(expand(opts.build_opts))

    # argument building
    cmake_args = [
        # tell cmake to invoke build process in the output directory
        '--build',
        opts.build_output_dir,
    ]
    cmake_args.extend(prepare_definitions(cmake_defs, '-D'))
    cmake_args.extend(prepare_arguments(cmake_opts))

    # enable specific number of parallel jobs is set
    #
    # https://cmake.org/cmake/help/v3.12/manual/cmake.1.html#build-tool-mode
    if 'releng.cmake.disable_parallel_option' not in opts._quirks:
        if opts.jobsconf != 1 and opts.jobs > 1:
            cmake_args.append('--parallel')
            cmake_args.append(str(opts.jobs))
    else:
        verbose('cmake parallel jobs disabled by quirk')

    if not CMAKE.execute(cmake_args, env=expand(opts.build_env)):
        err('failed to build cmake project: {}', opts.name)
        return False

    return True
예제 #4
0
    def _verbose_exists(self, tool):
        """
        verbose log that a provided tool exists

        Will generate a verbose log which will indicate to a user that a
        provided tool has been detected on the host system.

        Args:
            tool: the tool
        """
        verbose('prerequisite exists: ' + tool.tool)
예제 #5
0
    def finalize_package(self, pkg, script):
        """
        finalize configuration for a package

        Attempts to finalize any configuration entries of an already populated
        package instance with options provided at a later stage in the
        releng-tool process. This is to support projects where select
        configuration options are defined in the package's source content,
        instead of the main releng-tool project.

        This call will accept as package instance to update and the script file
        which may include a series of configuration options to apply to a
        package. Note that any configuration option already set on the package
        will be used over any new detected package option.

        Args:
            pkg: the package
            script: the package script to load

        Raises:
            RelengToolInvalidPackageConfiguration: when an error has been
                                                    detected loading any of the
                                                    package's extended options
        """
        verbose('finalize package configuration: {}', pkg.name)
        debug('script {}', script)

        if not os.path.isfile(script):
            raise RelengToolMissingPackageScript({
                'pkg_name': pkg.name,
                'script': script,
            })

        try:
            env = run_script(script, self.script_env, catch=False)
        except Exception as e:
            raise RelengToolInvalidPackageScript({
                'description':
                str(e),
                'script':
                script,
                'traceback':
                traceback.format_exc(),
            })

        # apply any options to unset configuration entries
        self._active_package = pkg.name
        self._active_env = env
        self._apply_postinit_options(pkg)

        # extend the active script environment if the post-init call succeeds
        extend_script_env(self.script_env, env)
예제 #6
0
    def generate(self):
        """
        generate a final report of statistics

        To be invoked at the end of a releng-tool process, this call will
        generate reports/etc. for any tracked statistics information based on
        the current and previous invoked executions (if any).
        """
        if not ensure_dir_exists(self.out_dir):
            verbose('unable to generate output directory for statistics')
            return None

        self._generate_duration()
예제 #7
0
def configure(opts):
    """
    support configuration for autotools projects

    With provided configuration options (``RelengConfigureOptions``), the
    configuration stage will be processed.

    Args:
        opts: configuration options

    Returns:
        ``True`` if the configuration stage is completed; ``False`` otherwise
    """

    # check if autoreconf
    if opts._autotools_autoreconf:
        verbose('configured to run autoreconf')
        if not AUTORECONF.exists():
            err('unable to configure package; autoreconf is not installed')
            return False

        if not AUTORECONF.execute(['--verbose']):
            err('failed to prepare autotools project (autoreconf): {}',
                opts.name)
            return False

    # definitions
    autotools_defs = {
        '--prefix': opts.prefix,
        '--exec-prefix': opts.prefix,
    }
    if opts.conf_defs:
        autotools_defs.update(expand(opts.conf_defs))

    # default options
    autotools_opts = {}
    if opts.conf_opts:
        autotools_opts.update(expand(opts.conf_opts))

    # argument building
    autotools_args = []
    autotools_args.extend(prepare_definitions(autotools_defs))
    autotools_args.extend(prepare_arguments(autotools_opts))

    if not execute(['./configure'] + autotools_args,
                   env_update=expand(opts.conf_env),
                   critical=False):
        err('failed to prepare autotools project (configure): {}', opts.name)
        return False

    return True
예제 #8
0
    def load(self):
        """
        load any persisted statistics

        Will load any statistics which may have been persisted from a previous
        run. This is to help render a "complete" report of statistics when
        re-running releng-tool with packages which may already been completed.
        """
        if not os.path.exists(self.dat_file):
            return

        try:
            with open(self.dat_file, 'rb') as f:
                self.data = pickle.load(f)
            debug('loaded statistics')
        except IOError:
            verbose('failed to load original statistics (io error)')
        except ValueError:
            verbose('failed to load original statistics (pickle error)')
예제 #9
0
    def _load_dvcs_cache(self):
        """
        load any persisted dvcs cache information

        DVCS can be cached and shared over multiple projects. The following
        loads any cached DVCS database stored in the project's output folder
        where may hint the folder name for a project's cache.
        """

        if not self._dvcs_cache_enabled:
            return

        if os.path.exists(self._dvcs_cache_fname):
            try:
                with open(self._dvcs_cache_fname, 'rb') as f:
                    self._dvcs_cache = pickle.load(f)
                debug('loaded dvcs cache database')
            except IOError:
                verbose('failed to load dvcs cache database (io error)')
            except ValueError:
                verbose('failed to load dvcs cache database (pickle error)')
예제 #10
0
    def validate(self, asc, target):
        """
        validate ascii-armored file against a target

        Accepting an ASCII-armored file, use gpg to validate the public key
        against the provided target.

        Args:
            asc: the asc file
            target: the target file

        Returns:
            ``True`` if the target has been validated; ``False`` otherwise
        """

        rv, out = self.execute_rv('--verify', asc, target)
        if rv == 0:
            verbose('validated: {}', asc)
        elif out:
            log(out)

        return rv == 0
예제 #11
0
    def _save_dvcs_cache(self):
        """
        save dvcs cache information

        Will save any DVCS cache information which future runs of releng-tool
        can be used to hint where package cache data is stored.
        """

        if not self._dvcs_cache_enabled:
            return

        if not ensure_dir_exists(self.opts.cache_dir):
            verbose('unable to generate output directory for dvcs cache')
            return

        try:
            with open(self._dvcs_cache_fname, 'wb') as f:
                pickle.dump(self._dvcs_cache, f,
                            protocol=2)  # 2 for py2/py3 support
            debug('saved dvcs cache')
        except IOError:
            verbose('failed to save dvcs cache')
예제 #12
0
def configure(opts):
    """
    support configuration project-defined scripts

    With provided configuration options (``RelengConfigureOptions``), the
    configuration stage will be processed.

    Args:
        opts: configuration options

    Returns:
        ``True`` if the configuration stage is completed; ``False`` otherwise
    """

    assert opts
    build_dir = opts.build_dir
    def_dir = opts.def_dir
    env = opts.env

    configure_script_filename = '{}-{}'.format(opts.name, CONFIGURE_SCRIPT)
    configure_script = os.path.join(def_dir, configure_script_filename)
    configure_script, configure_script_exists = opt_file(configure_script)
    if not configure_script_exists:
        if (opts._skip_remote_scripts
                or 'releng.disable_remote_scripts' in opts._quirks):
            return True

        configure_script_filename = '{}-{}'.format('releng', CONFIGURE_SCRIPT)
        configure_script = os.path.join(build_dir, configure_script_filename)
        configure_script, configure_script_exists = opt_file(configure_script)
        if not configure_script_exists:
            return True

    if not run_script(configure_script, env, subject='configure'):
        return False

    verbose('install script executed: ' + configure_script)
    return True
예제 #13
0
def stage(engine, pkg, script_env):
    """
    handles the post-processing stage for a package

    With a provided engine and package instance, the post-processing stage will
    be processed. This stage is typically not advertised and is for advanced
    cases where a developer wishes to manipulate their build environment after
    package has completed each of its phases.

    Args:
        engine: the engine
        pkg: the package being built
        script_env: script environment information

    Returns:
        ``True`` if the post-processing stage is completed; ``False`` otherwise
    """

    verbose('post-processing {}...', pkg.name)
    sys.stdout.flush()

    post_script_filename = '{}-{}'.format(pkg.name, POST_SCRIPT)
    post_script = os.path.join(pkg.def_dir, post_script_filename)
    post_script, post_script_exists = opt_file(post_script)
    if not post_script_exists:
        return True

    if pkg.build_subdir:
        build_dir = pkg.build_subdir
    else:
        build_dir = pkg.build_dir

    with interim_working_dir(build_dir):
        if not run_script(post_script, script_env, subject='post-processing'):
            return False

    verbose('post-processing script executed: ' + post_script)
    return True
예제 #14
0
def build(opts):
    """
    support building project-defined scripts

    With provided build options (``RelengBuildOptions``), the build stage will
    be processed.

    Args:
        opts: build options

    Returns:
        ``True`` if the building stage is completed; ``False`` otherwise
    """

    assert opts
    build_dir = opts.build_dir
    def_dir = opts.def_dir
    env = opts.env

    build_script_filename = '{}-{}'.format(opts.name, BUILD_SCRIPT)
    build_script = os.path.join(def_dir, build_script_filename)
    build_script, build_script_exists = opt_file(build_script)
    if not build_script_exists:
        if (opts._skip_remote_scripts or
                'releng.disable_remote_scripts' in opts._quirks):
            return True

        build_script_filename = '{}-{}'.format('releng', BUILD_SCRIPT)
        build_script = os.path.join(build_dir, build_script_filename)
        build_script, build_script_exists = opt_file(build_script)
        if not build_script_exists:
            return True

    if not run_script(build_script, env, subject='build'):
        return False

    verbose('install script executed: ' + build_script)
    return True
예제 #15
0
def install(opts):
    """
    support installation project-defined scripts

    With provided installation options (``RelengInstallOptions``), the
    installation stage will be processed.

    Args:
        opts: installation options

    Returns:
        ``True`` if the installation stage is completed; ``False`` otherwise
    """

    assert opts
    build_dir = opts.build_dir
    def_dir = opts.def_dir
    env = opts.env

    install_script_filename = '{}-{}'.format(opts.name, INSTALL_SCRIPT)
    install_script = os.path.join(def_dir, install_script_filename)
    install_script, install_script_exists = opt_file(install_script)
    if not install_script_exists:
        if (opts._skip_remote_scripts or
                'releng.disable_remote_scripts' in opts._quirks):
            return True

        install_script_filename = '{}-{}'.format('releng', INSTALL_SCRIPT)
        install_script = os.path.join(build_dir, install_script_filename)
        install_script, install_script_exists = opt_file(install_script)
        if not install_script_exists:
            return True

    if not run_script(install_script, env, subject='install'):
        return False

    verbose('install script executed: ' + install_script)
    return True
예제 #16
0
def _verify_revision(git_dir, revision, quiet=False):
    """
    verify the gpg signature for a target revision

    The GPG signature for a provided revision (tag or commit) will be checked
    to validate the revision.

    Args:
        git_dir: the Git directory
        revision: the revision to verify
        quiet (optional): whether or not the log if verification is happening

    Returns:
        ``True`` if the revision is signed; ``False`` otherwise
    """

    if not quiet:
        log('verifying the gpg signature on the target revision')
    else:
        verbose('verifying the gpg signature on the target revision')

    if GIT.execute(
        [git_dir, 'rev-parse', '--quiet', '--verify', revision + '^{tag}'],
            quiet=True):
        verified_cmd = 'verify-tag'
    else:
        verified_cmd = 'verify-commit'

        # acquire the commit if (if not already set), to ensure we can verify
        # against commits or branches
        rv, revision = GIT.execute_rv(git_dir, 'rev-parse', revision)
        if rv != 0:
            verbose('failed to determine the commit id for a revision')
            return False

    return GIT.execute([git_dir, verified_cmd, revision], quiet=quiet)
예제 #17
0
def initialize_sample(opts):
    """
    initialize a sample project

    Generates a sample provided in the root directory to help new users or new
    project get started.

    Args:
        opts: options for this run

    Returns:
        ``True`` if the sample project could be initialized; ``False`` if an
        issue has occurred generating the sample project
    """

    root_dir = opts.root_dir

    if not ensure_dir_exists(root_dir):
        return False

    if os.listdir(root_dir):
        err('unable to initialize sample project is non-empty directory')
        return False

    sample_dir = os.path.join(root_dir, 'package', 'sample')

    success = True
    if ensure_dir_exists(sample_dir):
        # sample project
        sample_defs = os.path.join(root_dir, 'package', 'sample', 'sample')
        try:
            with open(sample_defs, 'w') as f:
                f.write('''\
#!/usr/bin/env python
# -*- coding: utf-8 -*-

SAMPLE_DEPENDENCIES = []
SAMPLE_LICENSE = ['<license name>']
SAMPLE_LICENSE_FILES = ['<license file>']
SAMPLE_SITE = '<location for sources>'
SAMPLE_TYPE = '<package-type>'
SAMPLE_VERSION = '<package-version>'
''')

            verbose('written sample file')
        except IOError as e:
            err('unable to generate a sample file')
            verbose(str(e))
            success = False
    else:
        success = False

    # .gitignore
    try:
        project_gitignore = os.path.join(root_dir,
                                         '.gitignore')  # (assumption)
        with open(project_gitignore, 'w') as f:
            f.write('''\
# releng-tool
/cache/
/dl/
/output/
.releng-flag-*
''')

        verbose('written .gitignore file')
    except IOError as e:
        err('unable to generate a .gitignore file')
        verbose(str(e))
        success = False

    # releng project
    try:
        project_defs = os.path.join(root_dir, 'releng')
        with open(project_defs, 'w') as f:
            f.write('''\
#!/usr/bin/env python
# -*- coding: utf-8 -*-

packages = [
    'sample',
]
''')

        verbose('written releng file')
    except IOError as e:
        err('unable to generate a releng file')
        verbose(str(e))
        success = False

    if success:
        log('initialized empty releng-tool project')
    else:
        warn('partially initialized a releng-tool project')
    return success
예제 #18
0
    def _generate_duration(self):
        """
        generate duration-related statistics

        When generating a statistics report, this call creating/adds information
        about durations which may have been captured.
        """

        if 'duration' not in self.data:
            return

        durations = self.data['duration']

        pkgs = list(durations.keys())
        pkgs = sorted(pkgs)

        categories = set()
        for pkg_data in durations.values():
            categories.update(pkg_data.keys())
        categories = sorted(categories)

        ordered_categories = [
            'boot',
            'fetch',
            'extract',
            'patch',
            'configure',
            'build',
            'install',
            'post',
        ]

        for ordered_category in ordered_categories:
            if ordered_category not in categories:
                ordered_categories.remove(ordered_category)
        for category in categories:
            if category not in ordered_categories:
                ordered_categories.append(category)
        categories = ordered_categories

        # duration statistics to csv
        verbose('generating duration statistics (csv)...')
        dur_csv = os.path.join(self.out_dir, 'durations.csv')
        try:
            with open(dur_csv, 'w') as f:
                # header
                f.write('# pkg')
                for category in categories:
                    f.write(',' + category)
                f.write('\n')

                # data
                for pkg in pkgs:
                    f.write(pkg)

                    for category in categories:
                        if category in durations[pkg]:
                            value = durations[pkg][category]
                        else:
                            value = 0
                        f.write(',' + str(int(value)))
                    f.write('\n')
        except IOError as e:
            verbose('failed to write duration statistics: {}', e)

        # duration statistics to plot (if available)
        if has_matplotlib:
            verbose('generating duration statistics (pdf)...')

            BAR_HEIGHT = 0.4
            EXTRA_HEIGHT = 1
            FIG_WIDTH = 10
            fig_height_pkgs = (BAR_HEIGHT + EXTRA_HEIGHT) * len(pkgs)
            fig_height_total = (BAR_HEIGHT + EXTRA_HEIGHT) * (len(pkgs) + 1)

            figsize_pkgs = (FIG_WIDTH, fig_height_pkgs)
            figsize_total = (FIG_WIDTH, fig_height_total)

            fig_pkgs, ax_pkgs = plt.subplots(figsize=figsize_pkgs)
            fig_total, ax_total = plt.subplots(figsize=figsize_total)
            axs = [ax_pkgs, ax_total]
            figs = [fig_pkgs, fig_total]

            pkgs.reverse()
            pkgs_total = list(pkgs)
            pkgs_total.insert(0, 'total')

            offset = [0] * len(pkgs)
            offset_total = [0] * len(pkgs_total)
            for category in categories:
                width = []
                width_total = []
                total = 0

                for pkg in pkgs:
                    if category in durations[pkg]:
                        duration = durations[pkg][category]
                        width.append(duration)
                        width_total.append(duration)
                        total += duration
                    else:
                        width.append(0)
                        width_total.append(0)
                width_total.insert(0, total)

                ax_pkgs.barh(pkgs,
                             width,
                             height=BAR_HEIGHT,
                             left=offset,
                             label=category)
                ax_total.barh(pkgs_total,
                              width_total,
                              height=BAR_HEIGHT,
                              left=offset_total,
                              label=category)
                offset = numpy.add(offset, width)
                offset_total = numpy.add(offset_total, width_total)

            # provide some spacing near the right
            xlim = int(math.ceil(max(offset) / 10.)) * 10
            if xlim - max(offset) < 10:
                xlim += 10
            ax_pkgs.set_xlim([0, xlim])

            xlim_total = int(math.ceil(max(offset_total) / 10.)) * 10
            if xlim_total - max(offset_total) < 10:
                xlim_total += 10
            ax_total.set_xlim([0, xlim_total])

            # labels
            for ax in axs:
                ax.set_title('Package Stage Durations')
                ax.set_xlabel('Duration (seconds)')
                ax.legend()
                ax.grid(axis='x', linestyle=':', linewidth=0.4)

            # ensure rotated labels state in render area
            for fig in figs:
                fig.tight_layout()

            # generate figures
            dur_pdf = os.path.join(self.out_dir, 'durations.pdf')
            fig_pkgs.savefig(dur_pdf)

            dur_pdf_total = os.path.join(self.out_dir, 'durations-total.pdf')
            fig_total.savefig(dur_pdf_total)

            # close/cleanup figures
            plt.close()
        else:
            debug('duration statistics plot not supported')
예제 #19
0
def _execute(args,
             cwd=None,
             env=None,
             env_update=None,
             quiet=None,
             critical=True,
             poll=False,
             capture=None):
    """
    execute the provided command/arguments

    Runs the command described by ``args`` until completion. A caller can adjust
    the working directory of the executed command by explicitly setting the
    directory in ``cwd``. The execution request will return the command's return
    code as well as any captured output.

    The environment variables used on execution can be manipulated in two ways.
    First, the environment can be explicitly controlled by applying a new
    environment content using the ``env`` dictionary. Key of the dictionary will
    be used as environment variable names, whereas the respective values will be
    the respective environment variable's value. If ``env`` is not provided, the
    existing environment of the executing context will be used. Second, a caller
    can instead update the existing environment by using the ``env_update``
    option. Like ``env``, the key-value pairs match to respective environment
    key-value pairs. The difference with this option is that the call will use
    the original environment values and update select values which match in the
    updated environment request. When ``env`` and ``env_update`` are both
    provided, ``env_update`` will be updated the options based off of ``env``
    instead of the original environment of the caller.

    If ``critical`` is set to ``True`` and the execution fails for any reason,
    this call will issue a system exit (``SystemExit``). By default, the
    critical flag is enabled (i.e. ``critical=True``).

    In special cases, an executing process may not provide carriage returns/new
    lines to simple output processing. This can lead the output of a process to
    be undesirably buffered. To workaround this issue, the execution call can
    instead poll for output results by using the ``poll`` option with a value
    of ``True``. By default, polling is disabled with a value of ``False``.

    A caller may wish to capture the provided output from a process for
    examination. If a list is provided in the call argument ``capture``, the
    list will be populated with the output provided from an invoked process.

    Args:
        args: the list of arguments to execute
        cwd (optional): working directory to use
        env (optional): environment variables to use for the process
        env_update (optional): environment variables to append for the process
        quiet (optional): whether or not to suppress output (defaults to
            ``False``)
        critical (optional): whether or not to stop execution on failure
            (defaults to ``True``)
        poll (optional): force polling stdin/stdout for output data (defaults to
            ``False``)
        capture (optional): list to capture output into

    Returns:
        the return code of the execution request

    Raises:
        SystemExit: if the execution operation fails with ``critical=True``
    """

    # append provided environment updates (if any) to the provided or existing
    # environment dictionary
    final_env = None
    if env:
        final_env = dict(env)
    if env_update:
        if not final_env:
            final_env = os.environ.copy()
        final_env.update(env_update)

    # if quiet is undefined, default its state based on whether or not the
    # caller wishes to capture output to a list
    if quiet is None:
        quiet = capture is not None

    cmd_str = None
    rv = 1
    if args:
        # force any `None` arguments to empty strings, as a subprocess request
        # will not accept it; ideally, a call should not be passing a `None`
        # entry, but providing flexibility when it has been done
        args = [arg if arg is not None else '' for arg in args]

        # attempt to always invoke using a script's interpreter (if any) to
        # help deal with long-path calls
        if sys.platform != 'win32':
            args = prepend_shebang_interpreter(args)

        # python 2.7 can have trouble with unicode environment variables;
        # forcing all values to an ascii type
        if final_env and sys.version_info[0] < 3:
            debug('detected python 2.7; sanity checking environment variables')
            for k, v in final_env.items():
                if isinstance(v, unicode):  # pylint: disable=E0602 # noqa: F821
                    final_env[k] = v.encode('ascii', 'replace')

        if is_verbose():
            debug('(wd) {}', cwd if cwd else os.getcwd())
            cmd_str = _cmd_args_to_str(args)
            verbose('invoking: ' + cmd_str)
            sys.stdout.flush()

        try:
            # check if this execution should poll (for carriage returns and new
            # lines); note if quiet mode is enabled, do not attempt to poll
            # since none of the output will be printed anyways.
            if poll and not quiet:
                debug('will poll process for output')
                bufsize = 0
                universal_newlines = False
            else:
                bufsize = 1
                universal_newlines = True

            proc = subprocess.Popen(
                args,
                bufsize=bufsize,
                cwd=cwd,
                env=final_env,
                stderr=subprocess.STDOUT,
                stdout=subprocess.PIPE,
                universal_newlines=universal_newlines,
            )

            if bufsize == 0:
                line = bytearray()
                while True:
                    c = proc.stdout.read(1)
                    if not c and proc.poll() is not None:
                        break
                    line += c
                    if c == b'\r' or c == b'\n':
                        decoded_line = line.decode('utf_8')
                        if c == b'\n' and capture is not None:
                            capture.append(decoded_line)
                        if not quiet:
                            sys.stdout.write(decoded_line)
                            sys.stdout.flush()
                        del line[:]
            else:
                for line in iter(proc.stdout.readline, ''):
                    if capture is not None or not quiet:
                        line = line.rstrip()
                        if capture is not None:
                            capture.append(line)
                        if not quiet:
                            print(line)
                            sys.stdout.flush()
            proc.communicate()

            rv = proc.returncode
        except OSError as e:
            if not quiet:
                if not cmd_str:
                    cmd_str = _cmd_args_to_str(args)

                err('unable to execute command: {}\n' '    {}', cmd_str, e)

    if rv != 0:
        if critical:
            if args:
                cmd_str = _cmd_args_to_str(args)
            else:
                cmd_str = '<empty>'
            err('failed to issue command: ' + cmd_str)

            # trigger a hard stop
            sys.exit(-1)
        elif args:
            debug('failed to issue last command')
        else:
            debug('failed to issue an empty command')

    return rv
예제 #20
0
def stage(engine, pkg, ignore_cache, extra_opts):
    """
    handles the fetching stage for a package

    With a provided engine and package instance, the fetching stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being fetched
        ignore_cache: always attempt to ignore the cache
        extra_opts: extra options for the fetch operation (if applicable)

    Returns:
        ``True`` if the fetching stage is completed; ``False`` otherwise
    """
    assert pkg.vcs_type
    name = pkg.name
    debug('process fetch stage: ' + name)

    # packages flagged for local sources requires to be already checked out
    if pkg.local_srcs:
        if os.path.isdir(pkg.build_dir):
            return True

        err(
            '''\
missing local sources for internal package: {0}

The active configuration is flagged for 'local sources' mode; however, an
internal package cannot be found in the local system. Before continuing, ensure
you have checked out all internal packages on your local system (or, disable the
local sources option to use the default process).

       Package: {0}
 Expected Path: {1}''', name, pkg.build_dir)
        return False

    # if the vcs-type is archive-based, flag that hash checks are needed
    perform_file_asc_check = False
    perform_file_hash_check = False
    if pkg.vcs_type == VcsType.URL:
        perform_file_asc_check = os.path.exists(pkg.asc_file)
        perform_file_hash_check = True

    fetch_opts = RelengFetchOptions()
    replicate_package_attribs(fetch_opts, pkg)
    fetch_opts.cache_dir = pkg.cache_dir
    fetch_opts.ext = pkg.ext_modifiers
    fetch_opts.extra_opts = extra_opts
    fetch_opts.ignore_cache = ignore_cache
    fetch_opts.name = name
    fetch_opts.revision = pkg.revision
    fetch_opts.site = pkg.site
    fetch_opts.version = pkg.version
    fetch_opts._mirror = False
    fetch_opts._quirks = engine.opts.quirks
    fetch_opts._urlopen_context = engine.opts.urlopen_context

    cache_filename = os.path.basename(pkg.cache_file)
    out_dir = engine.opts.out_dir
    with generate_temp_dir(out_dir) as work_dir, \
            generate_temp_dir(out_dir) as interim_cache_dir:
        with interim_working_dir(work_dir):
            interim_cache_file = os.path.join(interim_cache_dir,
                                              cache_filename)
            fetch_opts.cache_file = interim_cache_file
            fetch_opts.work_dir = work_dir

            # check if file caching should be ignored
            #
            # In special cases, a developer may configure a project to have a
            # fetched source not to cache. For example, pulling from a branch of
            # a VCS source will make a cache file from the branch and will
            # remain until manually removed from a cache file. A user may wish
            # to re-build the local cache file after cleaning their project.
            # While the releng-tool framework separates fetching/extraction into
            # two parts, ignoring cached assets can be partially achieved by
            # just removing any detected cache file if a project is configured
            # to ignore a cache file.
            if engine.opts.devmode and pkg.devmode_ignore_cache is not None:
                fetch_opts.ignore_cache = pkg.devmode_ignore_cache

                if pkg.devmode_ignore_cache and os.path.exists(pkg.cache_file):
                    verbose('removing cache file (per configuration): ' + name)
                    if not path_remove(pkg.cache_file):
                        return False

            # remove cache file if there is a force request to ignore the cache
            elif engine.opts.force and ignore_cache:
                if os.path.exists(pkg.cache_file):
                    verbose('removing cache file (forced): ' + name)
                    if not path_remove(pkg.cache_file):
                        return False

            # force explicit ignore cache (to off) when not in development mode
            elif not engine.opts.devmode and ignore_cache is None:
                fetch_opts.ignore_cache = False

            if os.path.exists(pkg.cache_file):
                rv = None
                if perform_file_hash_check:
                    hr = verify_hashes(pkg.hash_file,
                                       pkg.cache_file,
                                       relaxed=True)

                    if hr == HashResult.VERIFIED:
                        rv = True
                    elif hr == HashResult.BAD_PATH:
                        if not perform_file_asc_check and not pkg.is_internal:
                            warn('missing hash file for package: ' + name)
                        rv = True  # no hash file to compare with; assuming ok
                    elif hr == HashResult.EMPTY:
                        if not pkg.is_internal:
                            warn('hash file for package is empty: ' + name)
                        rv = True  # empty hash file; assuming ok
                    elif hr == HashResult.MISMATCH:
                        if not path_remove(pkg.cache_file):
                            rv = False
                    elif hr in (HashResult.BAD_FORMAT, HashResult.UNSUPPORTED):
                        rv = False
                    elif hr == HashResult.MISSING_ARCHIVE:
                        if not perform_file_asc_check:
                            err(
                                '''\
missing archive hash for verification

The hash file for this package does not have an entry for the cache file to be
verified. Ensure the hash file defines an entry for the expected cache file:

    Hash File: {}
         File: {}''', pkg.hash_file, cache_filename)
                            rv = False
                    else:
                        err(
                            'invalid fetch operation (internal error; '
                            'hash-check failure: {})', hr)
                        rv = False
                else:
                    rv = True

                if rv is not False and perform_file_asc_check and \
                        os.path.exists(pkg.cache_file):
                    if GPG.validate(pkg.asc_file, pkg.cache_file):
                        rv = True
                    else:
                        if not path_remove(pkg.cache_file):
                            err(
                                '''\
failed to validate against ascii-armor

Validation of a package resource failed to verify against a provided ASCII-armor
file. Ensure that the package's public key has been registered into gpg.

 ASC File: {}
     File: {}''', pkg.asc_file, cache_filename)
                            rv = False
                        else:
                            rv = None

                if rv is not None:
                    if ignore_cache:
                        verbose('ignoring cache not supported for package: {}',
                                name)
                    return rv

            # find fetching method for the target vcs-type
            fetcher = None
            if pkg.vcs_type in engine.registry.fetch_types:

                def _(opts):
                    return engine.registry.fetch_types[pkg.vcs_type].fetch(
                        pkg.vcs_type, opts)

                fetcher = _
            elif pkg.vcs_type == VcsType.BZR:
                fetcher = fetch_bzr
            elif pkg.vcs_type == VcsType.CVS:
                fetcher = fetch_cvs
            elif pkg.vcs_type == VcsType.GIT:
                fetcher = fetch_git
            elif pkg.vcs_type == VcsType.HG:
                fetcher = fetch_mercurial
            elif pkg.vcs_type == VcsType.RSYNC:
                fetcher = fetch_rsync
            elif pkg.vcs_type == VcsType.SCP:
                fetcher = fetch_scp
            elif pkg.vcs_type == VcsType.SVN:
                fetcher = fetch_svn
            elif pkg.vcs_type == VcsType.URL:
                fetcher = fetch_url

            if not fetcher:
                err('fetch type is not implemented: {}', pkg.vcs_type)
                return False

            # if this is url-type location, attempt to search on the mirror
            # first (if configured)
            fetched = None
            if engine.opts.url_mirror and pkg.vcs_type == VcsType.URL:
                original_site = fetch_opts.site
                new_site = engine.opts.url_mirror + cache_filename
                if original_site != new_site:
                    fetch_opts._mirror = True

                    fetch_opts.site = new_site
                    fetched = fetcher(fetch_opts)
                    fetch_opts.site = original_site

                    fetch_opts._mirror = False

            # perform the fetch request (if not already fetched)
            if not fetched:
                fetched = fetcher(fetch_opts)
                if not fetched:
                    return False

            # if the fetch type has populated the package's cache directory
            # directly, we are done
            if fetched == pkg.cache_dir:
                pass
            # if the fetch type has returned a file, the file needs to be hash
            # checked and then be moved into the download cache
            elif fetched == interim_cache_file:
                if perform_file_hash_check:
                    hr = verify_hashes(pkg.hash_file, fetched)
                    if hr == HashResult.VERIFIED:
                        pass
                    elif hr == HashResult.BAD_PATH:
                        if not perform_file_asc_check and not pkg.is_internal:
                            warn('missing hash file for package: ' + name)
                    elif hr == HashResult.EMPTY:
                        if not pkg.is_internal:
                            warn('hash file for package is empty: ' + name)
                    elif hr == HashResult.MISMATCH:
                        return False
                    elif hr in (HashResult.BAD_FORMAT, HashResult.UNSUPPORTED):
                        return False
                    elif hr == HashResult.MISSING_ARCHIVE:
                        if not perform_file_asc_check:
                            err(
                                '''\
missing archive hash for verification

The hash file for this package does not have an entry for the cache file to be
verified. Ensure the hash file defines an entry for the expected cache file:

    Hash File: {}
         File: {}''', pkg.hash_file, cache_filename)
                            return False
                    else:
                        err(
                            'invalid fetch operation (internal error; '
                            'hash-check failure: {})', hr)
                        return False

                if perform_file_asc_check:
                    if not GPG.validate(pkg.asc_file, interim_cache_file):
                        err(
                            '''\
failed to validate against ascii-armor

Validation of a package resource failed to verify against a provided ASCII-armor
file. Ensure that the package's public key has been registered into gpg.

     ASC File: {}
         File: {}''', pkg.asc_file, cache_filename)
                        return False

                debug('fetch successful; moving cache file')

                # ensure the cache container/directory exists
                cache_dir = os.path.dirname(pkg.cache_file)
                if not ensure_dir_exists(cache_dir):
                    return False

                try:
                    shutil.move(interim_cache_file, pkg.cache_file)
                except shutil.Error:
                    err(
                        'invalid fetch operation (internal error; fetch mode '
                        '"{}" has provided a missing cache file)',
                        pkg.vcs_type)
                    return False
            else:
                err(
                    'invalid fetch operation (internal error; fetch mode "{}" '
                    'has returned an unsupported value)', pkg.vcs_type)
                return False

    return True
예제 #21
0
def _fetch_submodules(opts, cache_dir, revision):
    """
    fetch the submodules on a provided cache/bar repository

    Using a provided bare repository, submodules configured at the provided
    revision will be fetched into the bare repository's modules directory. If it
    has been detected that a submodule contains additional submodules, they will
    also be fetched into a cache directory.

    Args:
        opts: fetch options
        cache_dir: the cache/bare repository
        revision: the revision (branch, tag, hash) to fetch

    Returns:
        ``True`` if submodules have been processed; ``False`` otherwise
    """
    assert revision

    git_dir = '--git-dir=' + cache_dir

    # find a .gitmodules configuration on the target revision
    submodule_ref = '{}:.gitmodules'.format(revision)
    rv, raw_submodules = GIT.execute_rv(git_dir, 'show', submodule_ref)
    if rv != 0:
        submodule_ref = 'origin/' + submodule_ref
        rv, raw_submodules = GIT.execute_rv(git_dir, 'show', submodule_ref)
        if rv != 0:
            verbose('no git submodules file detected for this revision')
            return True

    debug('parsing git submodules file...')
    cfg = GIT.parse_cfg_str(raw_submodules)
    if not cfg:
        verbose('no git submodules file detected for this revision')
        return False

    for sec_name in cfg.sections():
        if not sec_name.startswith('submodule'):
            continue

        if not cfg.has_option(sec_name, 'path') or \
                not cfg.has_option(sec_name, 'url'):
            debug('submodule section missing path/url')
            continue

        submodule_path = cfg.get(sec_name, 'path')
        submodule_revision = None
        if cfg.has_option(sec_name, 'branch'):
            submodule_revision = cfg.get(sec_name, 'branch')
        submodule_url = cfg.get(sec_name, 'url')
        verbose('detected submodule: {}', submodule_path)
        debug('submodule revision: {}',
              submodule_revision if submodule_revision else '(none)')
        debug('submodule url: {}', submodule_url)

        ckey = pkg_cache_key(submodule_url)
        root_cache_dir = os.path.abspath(
            os.path.join(opts.cache_dir, os.pardir))
        submodule_cache_dir = os.path.join(root_cache_dir, ckey)
        verbose('submodule_cache_dir: {}', submodule_cache_dir)

        # check to make sure the submodule's path isn't pointing to a relative
        # path outside the expected cache base
        check_abs = os.path.abspath(submodule_cache_dir)
        check_common = os.path.commonprefix((submodule_cache_dir, check_abs))
        if check_abs != check_common:
            err('unable to process submodule pathed outside of bare repository'
                )
            verbose('submodule expected base: {}', check_common)
            verbose('submodule absolute path: {}', check_abs)
            return False

        # fetch/cache the submodule repository
        if not _fetch_submodule(opts, submodule_path, submodule_cache_dir,
                                submodule_revision, submodule_url):
            return False

        # if a revision is not provided, extract the HEAD from the cache
        if not submodule_revision:
            submodule_revision = GIT.extract_submodule_revision(
                submodule_cache_dir)
            if not submodule_revision:
                return False

        # process nested submodules
        if not _fetch_submodules(opts, submodule_cache_dir,
                                 submodule_revision):
            return False

    return True
예제 #22
0
def _fetch_srcs(opts, cache_dir, revision, desc=None, refspecs=None):
    """
    invokes a git fetch call of the configured origin into a bare repository

    With a provided cache directory (``cache_dir``; bare repository), fetch the
    contents of a configured origin into the directory. The fetch call will
    use a restricted depth, unless configured otherwise. In the event a target
    revision cannot be found (if provided), an unshallow fetch will be made.

    This call may be invoked without a revision provided -- specifically, this
    can occur for submodule configurations which do not have a specific revision
    explicitly set.

    Args:
        opts: fetch options
        cache_dir: the bare repository to fetch into
        revision: expected revision desired from the repository
        desc (optional): description to use for error message
        refspecs (optional): additional refspecs to add to the fetch call

    Returns:
        ``True`` if the fetch was successful; ``False`` otherwise
    """

    git_dir = '--git-dir=' + cache_dir

    if not desc:
        desc = 'repository: {}'.format(opts.name)

    log('fetching most recent sources')
    prepared_fetch_cmd = [
        git_dir,
        'fetch',
        '--progress',
        '--prune',
        'origin',
    ]

    # limit fetch depth
    target_depth = 1
    if opts._git_depth is not None:
        target_depth = opts._git_depth
    limited_fetch = (target_depth
                     and 'releng.git.no_depth' not in opts._quirks)

    depth_cmds = [
        '--depth',
        str(target_depth),
    ]

    # if a revision is provided, first attempt to do a revision-specific fetch
    quick_fetch = 'releng.git.no_quick_fetch' not in opts._quirks
    if revision and quick_fetch:
        ls_cmd = [
            'ls-remote',
            '--exit-code',
            'origin',
        ]
        debug('checking if tag exists on remote')
        if GIT.execute(ls_cmd + ['--tags', 'refs/tags/{}'.format(revision)],
                       cwd=cache_dir,
                       quiet=True):
            debug('attempting a tag reference fetch operation')
            fetch_cmd = list(prepared_fetch_cmd)
            fetch_cmd.append('+refs/tags/{0}:refs/tags/{0}'.format(revision))
            if limited_fetch:
                fetch_cmd.extend(depth_cmds)

            if GIT.execute(fetch_cmd, cwd=cache_dir):
                debug('found the reference')
                return True

        debug('checking if reference exists on remote')
        if GIT.execute(ls_cmd + ['--heads', 'refs/heads/{}'.format(revision)],
                       cwd=cache_dir,
                       quiet=True):
            debug('attempting a head reference fetch operation')
            fetch_cmd = list(prepared_fetch_cmd)
            fetch_cmd.append(
                '+refs/heads/{0}:refs/remotes/origin/{0}'.format(revision))
            if limited_fetch:
                fetch_cmd.extend(depth_cmds)

            if GIT.execute(fetch_cmd, cwd=cache_dir):
                debug('found the reference')
                return True

    # fetch standard (and configured) refspecs
    std_refspecs = [
        '+refs/heads/*:refs/remotes/origin/*',
        '+refs/tags/*:refs/tags/*',
    ]
    prepared_fetch_cmd.extend(std_refspecs)

    # allow fetching addition references if configured (e.g. pull requests)
    if refspecs:
        for ref in refspecs:
            prepared_fetch_cmd.append(
                '+refs/{0}:refs/remotes/origin/{0}'.format(ref))

    fetch_cmd = list(prepared_fetch_cmd)
    if limited_fetch:
        fetch_cmd.extend(depth_cmds)

    if not GIT.execute(fetch_cmd, cwd=cache_dir):
        err('unable to fetch branches/tags from remote repository')
        return False

    if revision:
        verbose('verifying target revision exists')
        exists_state = revision_exists(git_dir, revision)
        if exists_state in REVISION_EXISTS:
            pass
        elif (exists_state == GitExistsType.MISSING_HASH and limited_fetch
              and opts._git_depth is None):
            warn('failed to find hash on depth-limited fetch; fetching all...')

            fetch_cmd = list(prepared_fetch_cmd)
            fetch_cmd.append('--unshallow')

            if not GIT.execute(fetch_cmd, cwd=cache_dir):
                err('unable to unshallow fetch state')
                return False

            if revision_exists(git_dir, revision) not in REVISION_EXISTS:
                err(
                    'unable to find matching revision in {}\n'
                    ' (revision: {})', desc, revision)
                return False
        else:
            err('unable to find matching revision in {}\n'
                'revision: {})', desc, revision)
            return False

    return True
예제 #23
0
def stage(engine, pkg):
    """
    handles the extraction stage for a package

    With a provided engine and package instance, the extraction stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being extracted

    Returns:
        ``True`` if the extraction stage is completed; ``False`` otherwise
    """

    # packages flagged for local sources do not have an extraction stage
    if pkg.local_srcs:
        return True

    # skip packages flagged not to extract
    if pkg.no_extraction:
        return True

    note('extracting {}...', pkg.name)
    sys.stdout.flush()

    extract_opts = RelengExtractOptions()
    replicate_package_attribs(extract_opts, pkg)
    extract_opts.cache_dir = pkg.cache_dir
    extract_opts.cache_file = pkg.cache_file
    extract_opts.ext = pkg.ext_modifiers
    extract_opts.name = pkg.name
    extract_opts.revision = pkg.revision
    extract_opts.strip_count = pkg.strip_count
    extract_opts.version = pkg.version
    extract_opts._extract_override = engine.opts.extract_override
    extract_opts._quirks = engine.opts.quirks

    if os.path.exists(pkg.build_dir):
        warn('build directory exists before extraction; removing')

        if not path_remove(pkg.build_dir):
            err('unable to cleanup build directory: ' + pkg.build_dir)
            return False

    # prepare and step into the a newly created working directory
    #
    # An extractor will take the contents of an archive, cache directory or
    # other fetched content and populate the "work" directory. On successful
    # extraction (or moving resources), the work directory will be moved to the
    # package's respective build directory.
    out_dir = engine.opts.out_dir
    with generate_temp_dir(out_dir) as work_dir:
        with interim_working_dir(work_dir):
            extract_opts.work_dir = work_dir

            extracter = None
            hash_exclude = []
            extract_types = engine.registry.extract_types
            if pkg.extract_type and pkg.extract_type in extract_types:
                def _(opts):
                    return engine.registry.extract_types[pkg.vcs_type].extract(
                        pkg.vcs_type, opts)
                extracter = _
            elif pkg.vcs_type in extract_types:
                extracter = extract_types[pkg.vcs_type].extract
            elif pkg.vcs_type == VcsType.GIT:
                extracter = extract_git
            elif pkg.vcs_type == VcsType.HG:
                extracter = extract_mercurial
            elif os.path.isfile(pkg.cache_file):
                cache_basename = os.path.basename(pkg.cache_file)
                hash_exclude.append(cache_basename)
                extracter = extract_archive

            if not extracter:
                err('extract type is not implemented: {}', pkg.vcs_type)
                return False

            # perform the extract request
            extracted = extracter(extract_opts)
            if not extracted:
                return False

            result = verify_hashes(pkg.hash_file, work_dir, hash_exclude)
            if result == HashResult.VERIFIED:
                pass
            elif result == HashResult.BAD_PATH:
                if not pkg.is_internal:
                    warn('missing hash file for package: ' + pkg.name)
            elif result == HashResult.EMPTY:
                if not pkg.is_internal:
                    verbose('hash file for package is empty: ' + pkg.name)
            elif result in (HashResult.BAD_FORMAT, HashResult.MISMATCH,
                    HashResult.MISSING_LISTED, HashResult.UNSUPPORTED):
                return False
            else:
                err('invalid extract operation (internal error; '
                    'hash-check failure: {})', result)
                return False

        debug('extraction successful; moving sources into package output '
            'directory: ' + pkg.build_dir)
        shutil.move(work_dir, pkg.build_dir)

    return True
예제 #24
0
def fetch(opts):
    """
    support fetching from bzr sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site

    if not BZR.exists():
        err('unable to fetch package; bzr is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    export_opts = [
        'export',
        cache_file,
        site,
        '--format=tgz',
        '--root=' + name,
        '--revision=' + revision,
    ]

    # some environments may have issue export bzr sources due to certificate
    # issues; this quirk allows injecting certifi-provided certificates for
    # all bzr exports
    if 'releng.bzr.certifi' in opts._quirks:
        global CERTIFI_MISSING_WARNED

        if certifi:
            verbose('performing bzr fetch with certifi certificates')
            pkg_site = certifi.where()
            export_opts.append('-Ossl.ca_certs=' + pkg_site)
        elif not CERTIFI_MISSING_WARNED:
            CERTIFI_MISSING_WARNED = True
            warn('''\
unable to perform bzr fetch with certifi certificates

A quirk has been enabled to export bzr images using certifi
certificates; however, certifi is not installed on this system.
''')

    log('exporting sources')
    if not BZR.execute(export_opts, poll=True):
        err('unable to export module')
        return None

    return cache_file
예제 #25
0
def stage(engine, pkg, script_env):
    """
    handles the patching stage for a package

    With a provided engine and package instance, the patching stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being patched
        script_env: script environment information

    Returns:
        ``True`` if the patching stage is completed; ``False`` otherwise
    """

    if pkg.is_internal:
        # packages flagged for local sources do not have a patch stage
        if pkg.local_srcs:
            return True

        # internal packages in development mode that specify a development
        # revision will not perform the patch stage
        if engine.opts.devmode and pkg.has_devmode_option:
            return True

    note('patching {}...', pkg.name)
    sys.stdout.flush()

    if pkg.build_subdir:
        build_dir = pkg.build_subdir
    else:
        build_dir = pkg.build_dir

    patch_script_filename = '{}-{}'.format(pkg.name, PATCH_SCRIPT)
    patch_script = os.path.join(pkg.def_dir, patch_script_filename)
    if os.path.isfile(patch_script):
        try:
            run_path(patch_script, init_globals=script_env)

            verbose('patch script executed: ' + patch_script)
        except Exception as e:
            err('error running patch script: \n' '    {}', patch_script, e)
            return False

    # find all patches in the package's folder, sort and apply each
    patch_glob = os.path.join(pkg.def_dir, '*.patch')
    patches = glob(patch_glob)
    if patches:
        patches = sorted(patches)
        if not PATCH.exists():
            err('unable to apply patches; patch is not installed')
            return False

        for patch in patches:
            print('({})'.format(os.path.basename(patch)))

            if not PATCH.execute([
                    '--batch',
                    '--forward',
                    '--ignore-whitespace',
                    '--input={}'.format(patch),
                    '--strip=1',
            ],
                                 cwd=build_dir):
                err('failed to apply patch')
                return False

    return True
예제 #26
0
    def check(self, quiet=False):
        """
        check for the existence of required tools for the loaded package set

        For each loaded package, a series of required host tools will be checked
        and a caller will be notified whether or not anything is missing.

        Args:
            quiet (optional): whether or not to suppress output (defaults to
                ``False``)

        Returns:
            ``True`` is all known required tools exists; ``False`` otherwise
        """

        missing = set()
        pkg_types = set()
        python_interpreters = set()
        vcs_types = set()

        # package-defined requirements check
        for pkg in self.pkgs:
            pkg_types.add(pkg.type)
            vcs_types.add(pkg.vcs_type)

            if pkg.type == PackageType.AUTOTOOLS:
                if pkg.autotools_autoreconf:
                    if AUTORECONF.exists():
                        self._verbose_exists(AUTORECONF)
                    else:
                        missing.add(AUTORECONF.tool)

            elif pkg.type == PackageType.PYTHON:
                if pkg.python_interpreter:
                    python_tool = PythonTool(pkg.python_interpreter)
                else:
                    python_tool = PYTHON
                python_interpreters.add(python_tool)

        if PackageType.AUTOTOOLS in pkg_types:
            if MAKE.exists():
                self._verbose_exists(MAKE)
            else:
                missing.add(MAKE.tool)

        if PackageType.CMAKE in pkg_types:
            if CMAKE.exists():
                self._verbose_exists(CMAKE)
            else:
                missing.add(CMAKE.tool)

        if PackageType.PYTHON in pkg_types:
            for interpreter in python_interpreters:
                if interpreter.exists():
                    self._verbose_exists(interpreter)
                else:
                    missing.add(interpreter.tool)

        if VcsType.BZR in vcs_types:
            if BZR.exists():
                self._verbose_exists(BZR)
            else:
                missing.add(BZR.tool)

        if VcsType.CVS in vcs_types:
            if CVS.exists():
                self._verbose_exists(CVS)
            else:
                missing.add(CVS.tool)

        if VcsType.GIT in vcs_types:
            if GIT.exists():
                self._verbose_exists(GIT)
            else:
                missing.add(GIT.tool)

        if VcsType.HG in vcs_types:
            if HG.exists():
                self._verbose_exists(HG)
            else:
                missing.add(HG.tool)

        if VcsType.RSYNC in vcs_types:
            if RSYNC.exists():
                self._verbose_exists(RSYNC)
            else:
                missing.add(RSYNC.tool)

        if VcsType.SCP in vcs_types:
            if SCP.exists():
                self._verbose_exists(SCP)
            else:
                missing.add(SCP.tool)

        if VcsType.SVN in vcs_types:
            if SVN.exists():
                self._verbose_exists(SVN)
            else:
                missing.add(SVN.tool)

        # project-provided tools check
        for tool in self.tools:
            if which(tool):
                verbose('prerequisite exists: ' + tool)
            else:
                missing.add(tool)

        if missing and not quiet:
            sorted_missing = list(missing)
            sorted_missing.sort()

            msg = 'missing the following host tools for this project:'
            msg += '\n'
            msg += '\n'
            for entry in sorted_missing:
                msg += ' ' + entry + '\n'
            err(msg)

        return len(missing) == 0
예제 #27
0
    def load(self, names):
        """
        load one or more packages from the provided collection of names

        Attempts to load and return a series of ordered package instances using
        the collection of names provided. Each name will be used to find a
        package definition on the system. Package scripts are found, loaded and
        parsed. Packages with dependencies will have their dependent packages
        loaded as well (either from the explicitly from the ``names`` or
        implicitly from the package's configuration file). The returned package
        list will be an ordered package list based on configured dependencies
        outlined in the user's package definitions. When package dependencies do
        not play a role in the required order of the releng process, a
        first-configured first-returned approach is used.

        Args:
            names: the names of packages to load

        Returns:
            returns an ordered list of packages to use

        Raises:
            RelengToolInvalidPackageConfiguration: when an error has been
                                                    detected loading the package
        """
        pkgs = OrderedDict()
        final_deps = {}

        # cycle through all pending packages until the complete list is known
        names_left = list(names)
        while names_left:
            name = names_left.pop(0)

            # attempt to load the package from a user defined external directory
            pkg = None
            for pkg_dir in self.opts.extern_pkg_dirs:
                pkg_script = os.path.join(pkg_dir, name, name)
                pkg_script, pkg_script_exists = opt_file(pkg_script)
                if pkg_script_exists:
                    pkg, env, deps = self.load_package(name, pkg_script)

            # if a package location has not been found, finally check the
            # default package directory
            if not pkg:
                pkg_script = os.path.join(self.opts.default_pkg_dir, name,
                                          name)
                pkg_script, _ = opt_file(pkg_script)

                pkg, env, deps = self.load_package(name, pkg_script)

            pkgs[pkg.name] = pkg
            for dep in deps:
                # if this is an unknown package and is not in out current list,
                # append it to the list of names to process
                if dep == name:
                    raise RelengToolCyclicPackageDependency({
                        'pkg_name': name,
                    })
                elif dep not in pkgs:
                    if dep not in names_left:
                        verbose('adding implicitly defined package: {}', dep)
                        names_left.append(dep)

                    if pkg not in final_deps:
                        final_deps[pkg] = []
                    final_deps[pkg].append(dep)
                else:
                    pkg.deps.append(pkgs[dep])
            extend_script_env(self.script_env, env)

        # for packages which have a dependency but have not been bound yet,
        # bind the dependencies now
        for pkg, deps in final_deps.items():
            for dep in deps:
                assert pkgs[dep]
                pkg.deps.append(pkgs[dep])

        debug('sorting packages...')

        def fetch_deps(pkg):
            return pkg.deps

        sorter = TopologicalSorter(fetch_deps)
        sorted_pkgs = []
        for pkg in pkgs.values():
            sorted_pkgs = sorter.sort(pkg)
            if sorted_pkgs is None:
                raise RelengToolCyclicPackageDependency({
                    'pkg_name': name,
                })
        debug('sorted packages)')
        for pkg in sorted_pkgs:
            debug(' {}', pkg.name)

        return sorted_pkgs
예제 #28
0
def main():
    """
    mainline

    The mainline for the releng tool.

    Returns:
        the exit code
    """
    retval = 1

    try:
        parser = argparse.ArgumentParser(prog='releng-tool',
                                         add_help=False,
                                         usage=usage())

        parser.add_argument('--assets-dir')
        parser.add_argument('--cache-dir')
        parser.add_argument('--config')
        parser.add_argument('--debug', action='store_true')
        parser.add_argument('--development', '-D', action='store_true')
        parser.add_argument('--dl-dir')
        parser.add_argument('--force', '-F', action='store_true')
        parser.add_argument('--help', '-h', action='store_true')
        parser.add_argument('--help-quirks', action='store_true')
        parser.add_argument('--images-dir')
        parser.add_argument('--jobs',
                            '-j',
                            default=0,
                            type=type_nonnegativeint)
        parser.add_argument('--local-sources',
                            '-L',
                            nargs='?',
                            action='append')
        parser.add_argument('--nocolorout', action='store_true')
        parser.add_argument('--out-dir')
        parser.add_argument('--root-dir')
        parser.add_argument('--quirk', action='append')
        parser.add_argument('--verbose', '-V', action='store_true')
        parser.add_argument('--version',
                            action='version',
                            version='%(prog)s ' + releng_version)

        known_args = sys.argv[1:]
        forward_args = []
        idx = known_args.index('--') if '--' in known_args else -1
        if idx != -1:
            forward_args = known_args[idx + 1:]
            known_args = known_args[:idx]

        args, unknown_args = parser.parse_known_args(known_args)
        if args.help:
            print(usage())
            sys.exit(0)
        if args.help_quirks:
            print(usage_quirks())
            sys.exit(0)

        # force verbose messages if debugging is enabled
        if args.debug:
            args.verbose = True

        releng_log_configuration(args.debug, args.nocolorout, args.verbose)

        # toggle on ansi colors by default for commands
        if not args.nocolorout:
            os.environ['CLICOLOR_FORCE'] = '1'

            # support character sequences (for color output on win32 cmd)
            if sys.platform == 'win32':
                enable_ansi_win32()

        verbose('releng-tool {}', releng_version)
        debug('({})', __file__)

        # extract additional argument information:
        #  - pull the action value
        #  - pull "exec" command (if applicable)
        #  - key-value entries to be injected into the running
        #     script/working environment
        new_args, unknown_args = process_args(unknown_args)
        args.action = new_args['action']
        args.action_exec = new_args['exec']
        args.injected_kv = new_args['entries']

        # register any injected entry into the working environment right away
        for k, v in args.injected_kv.items():
            os.environ[k] = v

        if unknown_args:
            warn('unknown arguments: {}', ' '.join(unknown_args))

        if forward_args:
            debug('forwarded arguments: {}', ' '.join(forward_args))

        # warn if the *nix-based system is running as root; ill-formed projects
        # may attempt to modify the local system's root
        if sys.platform != 'win32':
            if os.geteuid() == 0:  # pylint: disable=E1101
                if 'RELENG_IGNORE_RUNNING_AS_ROOT' not in os.environ:
                    # attempt to check if we are in a container; if so, ignore
                    # generating a warning -- we will check if kernel threads
                    # are running on pid2; if not, it is most likely that we
                    # are in a container environment; checks for a container
                    # do not have to be perfect here, only to try to help
                    # improve a user's experience (suppressing this warning
                    # when not running on a typical host setup)
                    try:
                        with open('/proc/2/status') as f:
                            inside_container = 'kthreadd' not in f.read()
                    except IOError:
                        inside_container = True

                    if not inside_container:
                        warn('running as root; this may be unsafe')

        # prepare engine options
        opts = RelengEngineOptions(args=args, forward_args=forward_args)

        # create and start the engine
        engine = RelengEngine(opts)
        try:
            if engine.run():
                retval = 0
        except RelengToolException as e:
            err(e)
    except KeyboardInterrupt:
        print('')

    return retval
예제 #29
0
    def load_package(self, name, script):
        """
        load a package definition

        Attempts to load a package definition of a given ``name`` from the
        provided ``script`` location. The script will be examine for required
        and optional configuration keys. On a successful execution/parsing, a
        package object will be returned along with other meta information. On
        error, ``None`` types are returned.

        Args:
            name: the package name
            script: the package script to load

        Returns:
            returns a tuple of three (3) containing the package instance, the
            extracted environment/globals from the package script and a list of
            known package dependencies

        Raises:
            RelengToolInvalidPackageConfiguration: when an error has been
                                                    detected loading the package
        """
        verbose('loading package: {}', name)
        debug('script {}', script)
        opts = self.opts

        if not os.path.isfile(script):
            raise RelengToolMissingPackageScript({
                'pkg_name': name,
                'script': script,
            })

        pkg_def_dir = os.path.abspath(os.path.join(script, os.pardir))
        self.script_env['PKG_DEFDIR'] = pkg_def_dir

        try:
            env = run_script(script, self.script_env, catch=False)
        except Exception as e:
            raise RelengToolInvalidPackageScript({
                'description':
                str(e),
                'script':
                script,
                'traceback':
                traceback.format_exc(),
            })

        self._active_package = name
        self._active_env = env

        # prepare helper expand values
        expand_extra = {}

        # version/revision extraction first
        #
        # Attempt to check the version first since it will be the most commonly
        # used package field -- rather initially fail on a simple field first
        # (for new packages and/or developers) than breaking on a possibly more
        # complex field below. Note that the version field is optional, in cases
        # where a package type does not need a version entry (e.g. sites which
        # do not require a version value for fetching or there is not revision
        # value to use instead).
        #
        # Note that when in development mode, the development-mode revision
        # (if any is set) needs to be checked as well. This value may override
        # the package's version value.

        # version
        pkg_version = self._fetch(Rpk.VERSION)

        if not pkg_version:
            pkg_version = ''

        pkg_version_key = pkg_key(name, Rpk.VERSION)
        expand_extra[pkg_version_key] = pkg_version

        # development mode revision
        pkg_has_devmode_option = False
        pkg_devmode_revision = self._fetch(Rpk.DEVMODE_REVISION,
                                           allow_expand=True,
                                           expand_extra=expand_extra)

        if pkg_devmode_revision:
            pkg_has_devmode_option = True

            if opts.revision_override and name in opts.revision_override:
                pkg_devmode_revision = opts.revision_override[name]

            if opts.devmode:
                pkg_version = pkg_devmode_revision
                expand_extra[pkg_version_key] = pkg_version

        # revision
        if opts.revision_override and name in opts.revision_override:
            pkg_revision = opts.revision_override[name]
        else:
            pkg_revision = self._fetch(Rpk.REVISION,
                                       allow_expand=True,
                                       expand_extra=expand_extra)
        if opts.devmode and pkg_devmode_revision:
            pkg_revision = pkg_devmode_revision
        elif not pkg_revision:
            pkg_revision = pkg_version

        # site / vcs-site detection
        #
        # After extracted required version information, the site / VCS type
        # needs to be checked next. This will allow the manage to early detect
        # if a version/revision field is required, and fail early if we have
        # not detected one from above.

        # site
        if opts.sites_override and name in opts.sites_override:
            # Site overriding is permitted to help in scenarios where a builder
            # is unable to acquire a package's source from the defined site.
            # This includes firewall settings or a desire to use a mirrored
            # source when experiencing network connectivity issues.
            pkg_site = opts.sites_override[name]
        else:
            pkg_site = self._fetch(Rpk.SITE,
                                   allow_expand=True,
                                   expand_extra=expand_extra)

        # On Windows, if a file site is provided, ensure the path value is
        # converted to a posix-styled path, to prevent issues with `urlopen`
        # being provided an unescaped path string
        if sys.platform == 'win32' and \
                pkg_site and pkg_site.startswith('file://'):
            pkg_site = pkg_site[len('file://'):]
            abs_site = os.path.isabs(pkg_site)
            pkg_site = pkg_site.replace(os.sep, posixpath.sep)
            if abs_site:
                pkg_site = '/' + pkg_site
            pkg_site = 'file://' + pkg_site

        # vcs-type
        pkg_vcs_type = None
        pkg_vcs_type_raw = self._fetch(Rpk.VCS_TYPE)
        if pkg_vcs_type_raw:
            pkg_vcs_type_raw = pkg_vcs_type_raw.lower()
            if pkg_vcs_type_raw in VcsType:
                pkg_vcs_type = pkg_vcs_type_raw
            elif pkg_vcs_type_raw in self.registry.fetch_types:
                pkg_vcs_type = pkg_vcs_type_raw
            else:
                raise RelengToolUnknownVcsType({
                    'pkg_name':
                    name,
                    'pkg_key':
                    pkg_key(name, Rpk.VCS_TYPE),
                })

        if not pkg_vcs_type:
            if pkg_site:
                site_lc = pkg_site.lower()
                if site_lc.startswith('bzr+'):
                    pkg_site = pkg_site[4:]
                    pkg_vcs_type = VcsType.BZR
                elif site_lc.startswith('cvs+'):
                    pkg_site = pkg_site[4:]
                    pkg_vcs_type = VcsType.CVS
                elif site_lc.startswith((
                        ':ext:',
                        ':extssh:',
                        ':gserver:',
                        ':kserver:',
                        ':pserver:',
                )):
                    pkg_vcs_type = VcsType.CVS
                elif site_lc.startswith('git+'):
                    pkg_site = pkg_site[4:]
                    pkg_vcs_type = VcsType.GIT
                elif site_lc.endswith('.git'):
                    pkg_vcs_type = VcsType.GIT
                elif site_lc.startswith('hg+'):
                    pkg_site = pkg_site[3:]
                    pkg_vcs_type = VcsType.HG
                elif site_lc.startswith('rsync+'):
                    pkg_site = pkg_site[6:]
                    pkg_vcs_type = VcsType.RSYNC
                elif site_lc.startswith('scp+'):
                    pkg_site = pkg_site[4:]
                    pkg_vcs_type = VcsType.SCP
                elif site_lc.startswith('svn+'):
                    pkg_site = pkg_site[4:]
                    pkg_vcs_type = VcsType.SVN
                elif site_lc == 'local':
                    pkg_vcs_type = VcsType.LOCAL
                else:
                    pkg_vcs_type = VcsType.URL
            else:
                pkg_vcs_type = VcsType.NONE

        if pkg_vcs_type == VcsType.LOCAL:
            warn('package using local content: {}', name)

        # check if the detected vcs type needs a revision, and fail if we do
        # not have one
        if not pkg_revision and pkg_vcs_type in (
                VcsType.BZR,
                VcsType.CVS,
                VcsType.GIT,
                VcsType.HG,
                VcsType.SVN,
        ):
            raise RelengToolMissingPackageRevision({
                'pkg_name':
                name,
                'pkg_key1':
                pkg_key(name, Rpk.VERSION),
                'pkg_key2':
                pkg_key(name, Rpk.REVISION),
                'vcs_type':
                pkg_vcs_type,
            })

        # archive extraction strip count
        pkg_strip_count = self._fetch(Rpk.STRIP_COUNT,
                                      default=DEFAULT_STRIP_COUNT)

        # build subdirectory
        pkg_build_subdir = self._fetch(Rpk.BUILD_SUBDIR)

        # dependencies
        deps = self._fetch(Rpk.DEPS, default=[])

        # ignore cache
        pkg_devmode_ignore_cache = self._fetch(Rpk.DEVMODE_IGNORE_CACHE)

        # extension (override)
        pkg_filename_ext = self._fetch(Rpk.EXTENSION)

        # extract type
        pkg_extract_type = self._fetch(Rpk.EXTRACT_TYPE)
        if pkg_extract_type:
            pkg_extract_type = pkg_extract_type.lower()

            if pkg_extract_type not in self.registry.extract_types:
                raise RelengToolUnknownExtractType({
                    'pkg_name':
                    name,
                    'pkg_key':
                    pkg_key(name, Rpk.EXTRACT_TYPE),
                })

        # is-external
        pkg_is_external = self._fetch(Rpk.EXTERNAL)

        # is-internal
        pkg_is_internal = self._fetch(Rpk.INTERNAL)

        # no extraction
        pkg_no_extraction = self._fetch(Rpk.NO_EXTRACTION)

        # skip any remote configuration
        pkg_skip_remote_config = self._fetch(Rpk.SKIP_REMOTE_CONFIG)

        # skip any remote scripts
        pkg_skip_remote_scripts = self._fetch(Rpk.SKIP_REMOTE_SCRIPTS)

        # type
        pkg_type = None
        pkg_type_raw = self._fetch(Rpk.TYPE)
        if pkg_type_raw:
            pkg_type_raw = pkg_type_raw.lower()
            if pkg_type_raw in PackageType:
                pkg_type = pkg_type_raw
            elif pkg_type_raw in self.registry.package_types:
                pkg_type = pkg_type_raw
            else:
                raise RelengToolUnknownPackageType({
                    'pkg_name':
                    name,
                    'pkg_key':
                    pkg_key(name, Rpk.TYPE),
                })

        if not pkg_type:
            pkg_type = PackageType.SCRIPT

        # ######################################################################

        # git configuration options for a repository
        pkg_git_config = self._fetch(Rpk.GIT_CONFIG)

        # git-depth
        pkg_git_depth = self._fetch(Rpk.GIT_DEPTH)

        # git-refspecs
        pkg_git_refspecs = self._fetch(Rpk.GIT_REFSPECS)

        # git-submodules
        pkg_git_submodules = self._fetch(Rpk.GIT_SUBMODULES)

        # git-verify
        pkg_git_verify_revision = self._fetch(Rpk.GIT_VERIFY_REVISION)

        # ######################################################################

        # checks
        if pkg_is_external is not None and pkg_is_internal is not None:
            if pkg_is_external == pkg_is_internal:
                raise RelengToolConflictingConfiguration({
                    'pkg_name':
                    name,
                    'pkg_key1':
                    pkg_key(name, Rpk.EXTERNAL),
                    'pkg_key2':
                    pkg_key(name, Rpk.INTERNAL),
                    'desc':
                    'package flagged as external and internal',
                })
        elif pkg_is_external is not None:
            pkg_is_internal = not pkg_is_external
        elif pkg_is_internal is not None:
            pass
        elif opts.default_internal_pkgs:
            pkg_is_internal = True
        else:
            pkg_is_internal = False

        # check a site is defined for vcs types which require it
        if not pkg_site and pkg_vcs_type in (
                VcsType.BZR,
                VcsType.CVS,
                VcsType.GIT,
                VcsType.HG,
                VcsType.RSYNC,
                VcsType.SCP,
                VcsType.SVN,
                VcsType.URL,
        ):
            raise RelengToolMissingPackageSite({
                'pkg_name':
                name,
                'pkg_key':
                pkg_key(name, Rpk.SITE),
                'vcs_type':
                pkg_vcs_type,
            })

        # list of support dvcs types
        SUPPORTED_DVCS = [
            VcsType.GIT,
            VcsType.HG,
        ]
        is_pkg_dvcs = (pkg_vcs_type in SUPPORTED_DVCS)

        # find possible extension for a cache file
        #
        # non-dvcs's will be always gzip-tar'ed.
        if pkg_vcs_type in (
                VcsType.BZR,
                VcsType.CVS,
                VcsType.RSYNC,
                VcsType.SVN,
        ):
            cache_ext = 'tgz'
        # dvcs's will not have an extension type
        elif is_pkg_dvcs:
            cache_ext = None
        # non-vcs type does not have an extension
        elif pkg_vcs_type in (VcsType.LOCAL, VcsType.NONE):
            cache_ext = None
        else:
            cache_ext = None
            url_parts = urlparse(pkg_site)

            if opts.cache_ext_transform:
                # Allow a configuration to override the target cache file's
                # extension based on the package's site path (for unique path
                # scenarios).
                cache_ext = opts.cache_ext_transform(url_parts.path)

            if not cache_ext:
                if pkg_filename_ext:
                    cache_ext = pkg_filename_ext
                else:
                    basename = os.path.basename(url_parts.path)
                    __, cache_ext = interpret_stem_extension(basename)

        # prepare package container and directory locations
        #
        # The container folder for a package will typically be a combination of
        # a package's name plus version. If no version is set, the container
        # will be only use the package's name. We try to use the version entry
        # when possible to help manage multiple versions of output (e.g. to
        # avoid conflicts when bumping versions).
        #
        # When the version value is used, we will attempt to cleanup/minimize
        # the version to help provide the container a more "sane" path. For
        # instance, if a version references a path-styled branch names (e.g.
        # `bugfix/my-bug`, we want to avoid promoting a container name which
        # can result in a sub-directory being made (e.g. `pkg-bugfix/my-bug/`).
        if pkg_version:
            pkg_nv = '{}-{}'.format(
                name, ''.join(x if (x.isalnum() or x in '-._') else '_'
                              for x in pkg_version))
        else:
            pkg_nv = name

        pkg_build_output_dir = os.path.join(opts.build_dir, pkg_nv)

        if pkg_vcs_type == VcsType.LOCAL:
            pkg_build_dir = pkg_def_dir
        else:
            pkg_build_dir = pkg_build_output_dir

        # check if an internal package is configured to point to a local
        # directory for sources
        pkg_local_srcs = False
        if pkg_is_internal and opts.local_srcs:
            # specific package name reference in the local sources; either is
            # set to the path to use, or is set to `None` to indicate at this
            # package should not be retrieved locally
            if name in opts.local_srcs:
                if opts.local_srcs[name]:
                    pkg_build_dir = opts.local_srcs[name]
                    pkg_local_srcs = True

            # check if the "global" local sources path exists; either set to
            # a specific path, or set to `None` to indicate that it will use
            # the parent path based off the root directory
            elif GBL_LSRCS in opts.local_srcs:
                if opts.local_srcs[GBL_LSRCS]:
                    container_dir = opts.local_srcs[GBL_LSRCS]
                else:
                    container_dir = os.path.dirname(opts.root_dir)

                pkg_build_dir = os.path.join(container_dir, name)
                pkg_local_srcs = True

            if pkg_build_dir == opts.root_dir:
                raise RelengToolConflictingLocalSrcsPath({
                    'pkg_name': name,
                    'root': opts.root_dir,
                    'path': pkg_build_dir,
                })

        if pkg_build_subdir:
            pkg_build_subdir = os.path.join(pkg_build_dir, pkg_build_subdir)

        cache_dir = os.path.join(opts.dl_dir, name)
        if cache_ext:
            pkg_cache_file = os.path.join(cache_dir, pkg_nv + '.' + cache_ext)
        else:
            pkg_cache_file = os.path.join(cache_dir, pkg_nv)

        # Select sources (like CMake-based projects) may wish to be using
        # out-of-source tree builds. For supported project types, adjust the
        # build output directory to a sub-folder of the originally assumed
        # output folder.
        if pkg_type == PackageType.CMAKE:
            pkg_build_output_dir = os.path.join(pkg_build_output_dir,
                                                'releng-output')

        # determine the build tree for a package
        #
        # A build tree (introduced for the libfoo-exec action), tracks the
        # directory where build commands would typically be executed for a
        # package on a host system. In most cases, this will be set to the
        # same path as `pkg_build_dir` (or the sub-directory, if provided);
        # however, some package types may have a better working directory
        # for build commands. For example, CMake projects will generate a
        # build package in an out-of-source directory (e.g.
        # `pkg_build_output_dir`), which is a better make to issue commands
        # such as "cmake --build .".
        if pkg_type == PackageType.CMAKE:
            pkg_build_tree = pkg_build_output_dir
        elif pkg_build_subdir:
            pkg_build_tree = pkg_build_subdir
        else:
            pkg_build_tree = pkg_build_dir

        # determine the package directory for this package
        #
        # Typically, a package's "cache directory" will be stored in the output
        # folder's "cache/<pkg-name>" path. However, having package-name driven
        # cache folder targets does not provide an easy way to manage sharing
        # caches between projects if they share the same content (either the
        # same site or sharing submodules). Cache targets for packages will be
        # stored in a database and can be used here to decide if a package's
        # cache will actually be stored in a different container.
        pkg_cache_dir = os.path.join(opts.cache_dir, name)
        if is_pkg_dvcs:
            ckey = pkg_cache_key(pkg_site)

            pkg_cache_dirname = name

            # if the default cache directory exists, always prioritize it (and
            # force update the cache location)
            if os.path.exists(pkg_cache_dir):
                self._dvcs_cache[name] = name
            # if the cache content is stored in another container, use it
            elif ckey in self._dvcs_cache:
                pkg_cache_dirname = self._dvcs_cache[ckey]
                verbose('alternative cache path for package: {} -> {}', name,
                        pkg_cache_dirname)

            # track ckey entry to point to our cache container
            #
            # This package's "ckey" will be used to cache the target folder
            # being used for this package, so other packages with matching site
            # values could use it. In the rare case that the "ckey" entry
            # already exists but is pointing to another folder that our target
            # one, leave it as is (assume ownership of key is managed by another
            # package).
            if ckey not in self._dvcs_cache:
                self._dvcs_cache[ckey] = pkg_cache_dirname

            # adjust the cache directory and save any new cache changes
            pkg_cache_dir = os.path.join(opts.cache_dir, pkg_cache_dirname)
            self._save_dvcs_cache()

        # (commons)
        pkg = RelengPackage(name, pkg_version)
        pkg.asc_file = os.path.join(pkg_def_dir, name + '.asc')
        pkg.build_dir = pkg_build_dir
        pkg.build_output_dir = pkg_build_output_dir
        pkg.build_subdir = pkg_build_subdir
        pkg.build_tree = pkg_build_tree
        pkg.cache_dir = pkg_cache_dir
        pkg.cache_file = pkg_cache_file
        pkg.def_dir = pkg_def_dir
        pkg.devmode_ignore_cache = pkg_devmode_ignore_cache
        pkg.extract_type = pkg_extract_type
        pkg.git_config = pkg_git_config
        pkg.git_depth = pkg_git_depth
        pkg.git_refspecs = pkg_git_refspecs
        pkg.git_submodules = pkg_git_submodules
        pkg.git_verify_revision = pkg_git_verify_revision
        pkg.has_devmode_option = pkg_has_devmode_option
        pkg.hash_file = os.path.join(pkg_def_dir, name + '.hash')
        pkg.is_internal = pkg_is_internal
        pkg.local_srcs = pkg_local_srcs
        pkg.no_extraction = pkg_no_extraction
        pkg.revision = pkg_revision
        pkg.site = pkg_site
        pkg.skip_remote_config = pkg_skip_remote_config
        pkg.skip_remote_scripts = pkg_skip_remote_scripts
        pkg.strip_count = pkg_strip_count
        pkg.type = pkg_type
        pkg.vcs_type = pkg_vcs_type

        self._apply_postinit_options(pkg)

        # (additional environment helpers)
        for env in (os.environ, env):
            env[pkg_key(name, 'BUILD_DIR')] = pkg_build_dir
            env[pkg_key(name, 'BUILD_OUTPUT_DIR')] = pkg_build_output_dir
            env[pkg_key(name, 'DEFDIR')] = pkg_def_dir
            env[pkg_key(name, 'NAME')] = name
            env[pkg_key(name, 'REVISION')] = pkg_revision
        os.environ[pkg_key(name, Rpk.VERSION)] = pkg_version

        # (internals)
        prefix = '.releng_tool-stage-'
        outdir = pkg.build_output_dir
        pkg._ff_bootstrap = os.path.join(outdir, prefix + 'bootstrap')
        pkg._ff_build = os.path.join(outdir, prefix + 'build')
        pkg._ff_configure = os.path.join(outdir, prefix + 'configure')
        pkg._ff_extract = os.path.join(outdir, prefix + 'extract')
        pkg._ff_install = os.path.join(outdir, prefix + 'install')
        pkg._ff_license = os.path.join(outdir, prefix + 'license')
        pkg._ff_patch = os.path.join(outdir, prefix + 'patch')
        pkg._ff_post = os.path.join(outdir, prefix + 'post')

        # dump package attributes if running in a debug mode
        if opts.debug:
            info = {}
            for key, value in pkg.__dict__.items():
                if not key.startswith('_'):
                    info[key] = value

            debug(
                '''package-data: {}
==============================
{}
==============================''', name, pprint.pformat(info))

        return pkg, env, deps
예제 #30
0
    def load(self, name, ignore=True):
        """
        load the provided extension into the registry

        Attempts to load an extension with the provided name value. If an
        extension which is already loaded in the registry is provided, the
        request to load the specific extension is ignored. If an extension could
        not be loaded, a warning is generated and this method will return
        ``False``.

        Args:
            name: name of the extension to load
            ignore (optional): ignore if the detected extension could not be
                                loaded (default: True)

        Returns:
            whether or not the extension was loaded in the registry
        """

        # ignore if extension is already loaded
        if name in self.extension:
            return True

        loaded = False
        debug('attempting to load extension: {}', name)
        try:
            try:
                plugin = import_module(name)
            except RelengModuleNotFoundError:
                # python 2.7 may not be able to load from a nested path; try
                # searching through each package (if a nested module)
                if sys.version_info >= (3, 0) or '.' not in name:
                    raise

                # split the module into parts and for each part, check to see
                # if it's a package directory; if so, keep going until the last
                # namespace package
                ext_parts = name.split('.')
                path = None
                for part in ext_parts[:-1]:
                    file, pathname, desc = imp.find_module(part, path)

                    if desc[-1] != imp.PKG_DIRECTORY:
                        raise ImportError(name)

                    pkg = imp.load_module(part, file, pathname, desc)
                    path = pkg.__path__

                # with the path of the last namespace package found, find the
                # desired module in this path
                last_part = ext_parts[-1]
                file, pathname, desc = imp.find_module(last_part, path)
                plugin = imp.load_module(last_part, file, pathname, desc)

            if hasattr(plugin, 'releng_setup'):
                if not ignore:
                    plugin.releng_setup(self)
                    loaded = True
                else:
                    try:
                        plugin.releng_setup(self)
                        loaded = True
                    except RelengInvalidSetupException as e:
                        warn(
                            'extension is not supported '
                            'due to an invalid setup: {}\n'
                            ' ({})', name, e)
                    except RelengVersionNotSupportedException as e:
                        warn(
                            'extension is not supported '
                            'with this version: {}\n'
                            ' ({})', name, e)

                if loaded:
                    self.extension.append(name)
                    verbose('loaded extension: {}', name)
                    loaded = True
            else:
                warn('extension does not have a setup method: {}', name)
        except RelengModuleNotFoundError:
            warn('unable to find extension: {}', name)

        return loaded