Beispiel #1
0
def create_dependencies_overlay(staging_path: str, overlay_path: str):
    """
    Create the dependencies overlay from staging_path.

    :param str staging_path: Path where all the dependencies
    have been installed/extracted to
    :param str overlay_path: Path of overlay output file
    (.tar.gz)
    """
    dep_staging_path = Path(staging_path)
    dep_tar_gz_path = Path(overlay_path)
    logger.info('Dependencies changed, updating {}'.format(
        str(dep_tar_gz_path)))

    shellscript_dest = Path(dep_staging_path) / 'setup.sh'
    _render_template(Path('v2_setup.jinja2.sh'), shellscript_dest,
                     _CONTEXT_VAR_SH)
    shellscript_dest.chmod(0o755)

    shellscript_dest_bash = Path(dep_staging_path) / 'setup.bash'
    _render_template(Path('v2_setup.jinja2.sh'), shellscript_dest_bash,
                     _CONTEXT_VAR_BASH)
    shellscript_dest_bash.chmod(0o755)

    if dep_tar_gz_path.exists():
        dep_tar_gz_path.unlink()
    recursive_tar_gz_in_path(dep_tar_gz_path, dep_staging_path)
Beispiel #2
0
def update_symlinks(base_path):
    """
    Update all symlinks inside of base_path to be relative.

    Recurse through the path and update all symlinks to be relative except
    symlinks to libc this is because we want our applications to call into
    the libraries we are bundling. We do not bundle libc and want to use the
    system's version, so we should not update those. Copy any other libraries,
    not found in the bundle, into the bundle so that relative symlinks work.

    :param base_path: Directory that will be recursed
    """
    logger.info('Updating symlinks in {base_path}'.format_map(locals()))
    encoding = sys.getfilesystemencoding()
    dpkg_libc_paths = subprocess.check_output(['dpkg', '-L', 'libc6'
                                               ]).decode(encoding).strip()
    libc_paths = set(dpkg_libc_paths.split('\n'))

    for root, subdirs, files in os.walk(base_path):
        for name in itertools.chain(subdirs, files):
            symlink_path = os.path.join(root, name)
            if os.path.islink(symlink_path) and os.path.isabs(
                    os.readlink(symlink_path)):
                symlink_dest_path = os.readlink(symlink_path)
                if symlink_dest_path in libc_paths:
                    # We don't want to update symlinks which are pointing to
                    # libc
                    continue
                else:
                    logger.info('Symlink: {symlink_path} Points to: {'
                                'symlink_dest_path}'.format_map(locals()))
                    bundle_library_path = os.path.join(base_path,
                                                       symlink_dest_path[1:])
                    if os.path.exists(bundle_library_path):
                        # Dep is already installed, update symlink
                        logger.info(
                            'Linked file is already in bundle at {}, '
                            'updating symlink...'.format(bundle_library_path))
                    else:
                        # Dep is not installed, we need to copy it...
                        logger.info(
                            'Linked file is not in bundle, copying and '
                            'updating symlink...')
                        if not os.path.exists(
                                os.path.dirname(bundle_library_path)):
                            # Create directory (permissions?)
                            os.makedirs(os.path.dirname(bundle_library_path),
                                        exist_ok=True)
                        shutil.copy(symlink_dest_path, bundle_library_path)

                    bundle_library_path_obj = Path(bundle_library_path)
                    symlink_path_obj = Path(symlink_path)

                    relative_path = os.path.relpath(bundle_library_path,
                                                    symlink_path)
                    logger.info(
                        'bundle_library_path {} relative path {}'.format(
                            bundle_library_path, relative_path))
                    os.remove(symlink_path)
                    os.symlink(relative_path, symlink_path)
Beispiel #3
0
def rewrite_catkin_package_path(base_path):
    """
    Update catkin/profile.d to use correct shebangs.

    :param base_path: Path to the bundle staging directory
    """
    # TODO: This should be in the ros package
    import re
    python_regex = re.compile('/usr/bin/python')
    logger.info('Starting shebang update...')

    ros_distribution_version = get_ros_distribution_version()
    profiled_path = os.path.join(base_path, 'opt', 'ros',
                                 ros_distribution_version, 'etc', 'catkin',
                                 'profile.d', '1.ros_package_path.sh')
    if os.path.isfile(profiled_path):
        with open(profiled_path, 'rb+') as file_handle:
            contents = file_handle.read()
            try:
                str_contents = contents.decode()
            except UnicodeError:
                logger.error(
                    '{profiled_path} should be a text file'.format_map(
                        locals()))
                return
            replacement_tuple = python_regex.subn('python',
                                                  str_contents,
                                                  count=1)
            if replacement_tuple[1] > 0:
                logger.info(
                    'Found direct python invocation in {profiled_path}'.
                    format_map(locals()))
                file_handle.seek(0)
                file_handle.truncate()
                file_handle.write(replacement_tuple[0].encode())
Beispiel #4
0
    def _fetch_packages(self):  # noqa: D102
        from apt.cache import FetchFailedException
        from apt.package import FetchError
        packages = []
        source_fetch_failures = []
        for package in self._cache:
            if package.marked_install:
                if self.include_sources:
                    package_version = package.versions[0]
                    try:
                        package_version.fetch_source(destdir=self.sources_path,
                                                     unpack=False)
                    except ValueError:
                        source_fetch_failures.append(package.name)
                        logger.error('No sources available for {}'.format(
                            package.name))
                    except FetchFailedException as e:
                        source_fetch_failures.append(package.name)
                        logger.error('Failed to fetch sources for {}'.format(
                            package.name))
                        logger.error(e)
                    except FetchError as e:
                        source_fetch_failures.append(package.name)
                        logger.error('Failed to fetch sources for {}'.format(
                            package.name))
                        logger.error(e)
                packages.append(package.name)
        logger.info('Fetching packages: {packages}'.format_map(locals()))
        self._cache.fetch_archives()

        if len(source_fetch_failures) > 0:
            self.metadata['missing_sources'] = source_fetch_failures
Beispiel #5
0
    def run_installers(self, *, include_sources=False):
        """
        Invoke all installers to install packages into the bundle.

        :param include_sources: creates a sources tarball
        for all packages being installed that have sources
        available
        :return: true if all packages installed in the
        bundle are the same as the previous run
        """
        print('Collecting dependency information...')
        logger.info('Collecting dependency information...')

        print('Fetching and installing dependencies...')
        logger.info('Fetching and installing dependencies...')
        installer_metadata = {}
        for name, installer in self.installers.items():
            installer_metadata[name] = installer.install()

        installer_metadata_string = json.dumps(installer_metadata,
                                               sort_keys=True)

        installer_metadata_path = self._path_context.installer_metadata_path()
        dependency_match = False
        if os.path.exists(installer_metadata_path):
            with open(installer_metadata_path, 'r') as f:
                previous_metadata = f.read()
                if previous_metadata == installer_metadata_string:
                    dependency_match = True

        with open(installer_metadata_path, 'w') as f:
            f.write(installer_metadata_string)

        if include_sources:
            sources_tar_gz_path = self._path_context.sources_tar_gz_path()
            with tarfile.open(sources_tar_gz_path, 'w:gz',
                              compresslevel=5) as archive:
                for name, directory in self.installer_cache_dirs.items():
                    sources_path = os.path.join(directory, 'sources')
                    if not os.path.exists(sources_path):
                        continue
                    for filename in os.listdir(sources_path):
                        file_path = os.path.join(sources_path, filename)
                        archive.add(file_path,
                                    arcname=os.path.join(
                                        name, os.path.basename(file_path)))

        update_symlinks(self.prefix_path)
        # TODO: Update pkgconfig files?
        update_shebang(self.prefix_path)
        # TODO: Move this to colcon-ros-bundle
        rewrite_catkin_package_path(self.prefix_path)

        return dependency_match
Beispiel #6
0
def recursive_tar_gz_in_path(output_path: Path, tar_path: Path):
    """
    Create a tar.gz archive of all files inside a directory.

    This function includes all sub-folders of path in the root of the tarfile

    :param output_path: Name of archive file to create
    :param tar_path: path to recursively collect all files and include in
    tar.gz. These will be included with path as the root of the archive.
    """
    with tarfile.open(str(output_path), mode='w:gz', compresslevel=5) as tar:
        logger.info('Creating tar of {path}'.format(path=tar_path))
        for child in tar_path.iterdir():
            tar.add(str(child), arcname=str(child.name))
Beispiel #7
0
    def install(self):  # noqa: D102
        if len(self._packages) == 0 and self.additional_requirements is None:
            logger.info('No dependencies to install for {}'.format(
                os.path.basename(self._python_path)))
            return {'installed_packages': []}

        logger.info('Installing pip dependencies...')

        requirements_file = os.path.join(self._cache_path, 'requirements')
        metadata_file = os.path.join(self._cache_path, 'metadata')

        if self.additional_requirements is not None:
            logger.info('Installing additional Python requirements from'
                        '{req} into {path}'.format(
                            req=self.additional_requirements,
                            path=self._python_path))
            with open(self.additional_requirements) as req:
                for requirement in req.readlines():
                    self._packages.append(requirement)

        if os.path.exists(requirements_file) and os.path.exists(metadata_file):
            with open(requirements_file, 'r') as req:
                existing_requirements = list(map(str.strip, req.readlines()))
                if sorted(existing_requirements) == sorted(self._packages):
                    logger.info('No changes detected for {}'.format(
                        self._python_path))
                    with open(metadata_file, 'r') as f:
                        metadata = json.load(f)
                        return metadata

        python_pip_args = [self._python_path, '-m', 'pip']
        pip_install_args = python_pip_args + ['install']
        subprocess.check_call(pip_install_args +
                              ['-U', 'pip', 'setuptools==44.0.0'])

        with open(requirements_file, 'w') as req:
            for name in self._packages:
                req.write(name.strip() + '\n')

        pip_args = []
        pip_args += pip_install_args
        pip_args += (self._pip_args or [])
        pip_args += ['--default-timeout=100']
        pip_args += ['--ignore-installed', '-r', requirements_file]
        subprocess.check_call(pip_args)

        # https://pip.pypa.io/en/stable/reference/pip_download/
        if self.context.args.include_sources:
            sources_path = os.path.join(self._cache_path, 'sources')
            download_args = python_pip_args + [
                'download', '--no-binary', ':all', '-d', sources_path, '-r',
                requirements_file
            ]
            subprocess.check_call(download_args)

        metadata = self._generate_metadata(python_pip_args)
        with open(metadata_file, 'w') as f:
            json.dump(metadata, f)
        return metadata
def recursive_tar_gz_in_path(output_path, path):
    """
    Create a tar.gz archive of all files inside a directory.

    This function includes all sub-folders of path in the root of the tarfile

    :param output_path: Name of archive file to create
    :param path: path to recursively collect all files and include in
    tar.gz. These will be included with path as the root of the archive.
    """
    with tarfile.open(output_path, mode='w:gz', compresslevel=5) as tar:
        logger.info('Creating tar of {path}'.format(path=path))
        for name in os.listdir(path):
            some_path = os.path.join(path, name)
            tar.add(some_path, arcname=os.path.basename(some_path))
def generate_archive_v2(path_context: PathContext, metadata_paths: List[str],
                        dependencies_changed: bool):
    """
    Generate bundle archive v2.

    This archive is a tarfile that contains multiple compressed archives:

    output.tar
    |- version
    |- metadata.tar.gz
    |- pad (optional)
    |- dependencies.tar.gz
    |- workspace.tar.gz

    :param path_context: PathContext object including all path configurations
    :param metadata_paths: [str] paths to files which should be included
    in the metadata archive
    :param dependencies_changed: Boolean representing whether the staging path
    needs to be re-archvied
    """
    logger.info('Archiving the bundle output')
    print('Creating bundle archive V2...')
    logger.debug('Start: workspace.tar.gz')
    workspace_tar_gz_path = path_context.workspace_overlay_path()
    create_workspace_overlay(path_context.install_base(),
                             path_context.workspace_staging_path(),
                             workspace_tar_gz_path)
    logger.debug('End: workspace.tar.gz')

    logger.debug('Start: dependencies.tar.gz')
    dependencies_overlay_path = path_context.dependencies_overlay_path()
    if dependencies_changed:
        create_dependencies_overlay(path_context.dependencies_staging_path(),
                                    dependencies_overlay_path)
        update_dependencies_cache(path_context)
    logger.debug('End: dependencies.tar.gz')

    logger.debug('Start: bundle.tar')
    with Bundle(name=path_context.bundle_v2_output_path()) as bundle:
        for path in metadata_paths:
            bundle.add_metadata(path)
        bundle.add_overlay_archive(dependencies_overlay_path)
        bundle.add_overlay_archive(workspace_tar_gz_path)
    logger.debug('End: bundle.tar')

    logger.info('Archiving complete')
    print('Archiving complete!')
    _mark_cache_valid(path_context)
Beispiel #10
0
def _recursive_tar_in_path(tar_path, path, *, mode='w'):
    """
    Tar all files inside a directory.

    This function includes all sub-folders of path in the root of the tarfile

    :param tar_path: The output path
    :param path: path to recursively collect all files and include in
    tar
    :param mode: mode flags passed to tarfile
    """
    with tarfile.open(tar_path, mode) as tar:
        logger.info('Creating tar of {path}'.format(path=path))
        for name in os.listdir(path):
            some_path = os.path.join(path, name)
            tar.add(some_path, arcname=os.path.basename(some_path))
Beispiel #11
0
def update_shebang(path):
    """
    Search for python shebangs in path and all sub-paths.

    It then replaces them with /usr/bin/env.
    env does not support parameters so we need to so something
    else if python is invoked with parameters

    :param path: Path to file to replace shebang in
    """
    # TODO: We should handle scripts that have parameters in the shebang
    # TODO: We should hangle scripts that are doing other /usr/bin executables
    py3_shebang_regex = re.compile(r'#!\s*.+python3')
    py_shebang_regex = re.compile(r'#!\s*.+python')
    sh_shebang_regex = re.compile(r'#!\s*.+sh')
    logger.info('Starting shebang update...')
    for (root, dirs, files) in os.walk(path):
        for file in files:
            file_path = os.path.join(root, file)
            if not os.path.islink(file_path) and \
                    '.so' not in os.path.basename(file_path) and \
                    'README' not in os.path.basename(file_path):
                with open(file_path, 'rb+') as file_handle:
                    contents = file_handle.read()
                    try:
                        str_contents = contents.decode()
                    except UnicodeError:
                        continue
                    py3_replacement_tuple = py3_shebang_regex.subn(
                        '#!/usr/bin/env python3', str_contents, count=1)
                    if py3_replacement_tuple[1] > 0:
                        logger.info('Found shebang in {file_path}'.format_map(
                            locals()))
                        file_handle.seek(0)
                        file_handle.truncate()
                        file_handle.write(py3_replacement_tuple[0].encode())
                        continue

                    py_replacement_tuple = py_shebang_regex.subn(
                        '#!/usr/bin/env python', str_contents, count=1)
                    if py_replacement_tuple[1] > 0:
                        logger.info('Found shebang in {file_path}'.format_map(
                            locals()))
                        file_handle.seek(0)
                        file_handle.truncate()
                        file_handle.write(py_replacement_tuple[0].encode())
                    sh_replacement_tuple = sh_shebang_regex.subn(
                        '#!/usr/bin/env sh', str_contents, count=1)
                    if sh_replacement_tuple[1] > 0:
                        logger.info('Found shebang in {file_path}'.format_map(
                            locals()))
                        file_handle.seek(0)
                        file_handle.truncate()
                        file_handle.write(py_replacement_tuple[0].encode())
Beispiel #12
0
def update_shebang(path):
    """
    Search and replace shebangs in path and all sub-paths with /usr/bin/env.

    `env` does not support parameters so parameters are removed.
    Environment variables should be used instead of parameters
    (specifically for Python).

    :param path: Path to directory with files to replace shebang in.
    """
    # Parse the shebang
    shebang_regex = re.compile(r'^#!\s*\S*.')
    # Shebangs in templates are a special case we need to handle.
    # Example: #!@PYTHON_EXECUTABLE@
    template_shebang_regex = re.compile(r'^#!\s*@\S*.@')
    # Parse the command to execute in the shebang
    cmd_regex = re.compile(r'([^\/]*)\/*$')
    logger.info('Starting shebang update...')
    for (root, dirs, files) in os.walk(path):
        for file in files:
            file_path = os.path.join(root, file)
            if not os.path.islink(file_path) and \
                    '.so' not in os.path.basename(file_path) and \
                    'README' not in os.path.basename(file_path):
                with open(file_path, 'rb+') as file_handle:
                    contents = file_handle.read()
                    try:
                        str_contents = contents.decode()
                    except UnicodeError:
                        continue
                    template_shebang_match = template_shebang_regex.match(
                        str_contents)
                    if template_shebang_match:
                        logger.debug('Skipping templated shebang')
                        continue
                    shebang_match = shebang_regex.match(str_contents)
                    if shebang_match:
                        shebang_str = shebang_match.group(0)
                        logger.info('Found shebang in {}'.format(file_path))
                        shebang_command = cmd_regex.search(shebang_str)
                        if not shebang_command:
                            logger.warning(
                              'Unable to find shebang command in {}.'
                              'It may be malformed.'.format(file_path))
                            continue
                        shebang_command = shebang_command.group(0)
                        if shebang_command.strip() == ENV_COMMAND:
                            logger.debug('Valid shebang for {}.'
                                         'Skipping.'.format(file_path))
                            continue
                        logger.info('Modifying shebang for {}'.format(
                            file_path))
                        result, _ = shebang_regex.subn(
                          '#!/usr/bin/env {}'.format(shebang_command),
                          str_contents,
                          count=1
                        )
                        file_handle.seek(0)
                        file_handle.truncate()
                        file_handle.write(result.encode())
Beispiel #13
0
def rewrite_catkin_package_path(base_path):
    """
    Update catkin/profile.d to use correct shebangs.

    :param base_path: Path to the bundle staging directory
    """
    # TODO: This should be in the ros package
    import re
    python_regex = re.compile('/usr/bin/python')
    logger.info('Starting shebang update...')

    ros_distribution_version = get_ros_distribution_version()
    # These files contain references to /usr/bin/python that need
    # to be converted to avoid errors when setting up the ROS workspace
    files = ['1.ros_package_path.sh', '10.ros.sh']

    profiled_path = os.path.join(
        base_path, 'opt', 'ros', ros_distribution_version,
        'etc', 'catkin', 'profile.d')

    for file in map(lambda s: os.path.join(profiled_path, s), files):
        if os.path.isfile(file):
            with open(file, 'rb+') as file_handle:
                contents = file_handle.read()
                try:
                    str_contents = contents.decode()
                except UnicodeError:
                    logger.error(
                        '{file} should be a text file'.format_map(
                            locals()))
                    continue
                replacement_tuple = python_regex.subn('python', str_contents)
                if replacement_tuple[1] > 0:
                    logger.info(
                        'Found direct python invocation in {file}'
                        .format_map(locals()))
                    file_handle.seek(0)
                    file_handle.truncate()
                    file_handle.write(replacement_tuple[0].encode())
def generate_archive_v1(path_context):
    """
    Generate bundle archive.

    output.tar.gz
    |- version
    |- metadata.tar
    |- bundle.tar

    :param path_context: PathContext object including path configurations
    """
    # install_base: Directory with built artifacts from the workspace
    install_base = path_context.install_base()
    # staging_path: Directory where all dependencies have been installed
    staging_path = path_context.dependencies_staging_path()

    logger.info('Copying {} into bundle...'.format(install_base))
    assets_directory = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), 'assets')
    shellscript_path = os.path.join(assets_directory, 'v1_setup.sh')
    shutil.copy2(shellscript_path, os.path.join(staging_path, 'setup.sh'))
    os.chmod(os.path.join(staging_path, 'setup.sh'), 0o755)
    bundle_workspace_install_path = os.path.join(staging_path, 'opt',
                                                 'install')
    if os.path.exists(bundle_workspace_install_path):
        shutil.rmtree(bundle_workspace_install_path)
    shutil.copytree(install_base, bundle_workspace_install_path)

    logger.info('Archiving the bundle output')
    print('Creating bundle archive...')

    bundle_tar_path = path_context.bundle_tar_path()
    metadata_tar_path = path_context.metadata_tar_path()
    archive_tar_gz_path = path_context.bundle_v1_output_path()

    with tarfile.open(metadata_tar_path, 'w') as archive:
        archive.add(path_context.installer_metadata_path(),
                    arcname='installers.json')

    if os.path.exists(bundle_tar_path):
        os.remove(bundle_tar_path)

    recursive_tar_in_path(bundle_tar_path, staging_path)

    version_file_path = path_context.version_file_path()
    with open(version_file_path, 'w') as v:
        v.write('1')

    with tarfile.open(archive_tar_gz_path, 'w:gz', compresslevel=5) as archive:
        archive.add(version_file_path, arcname='version')
        archive.add(metadata_tar_path,
                    arcname=os.path.basename(metadata_tar_path))
        archive.add(bundle_tar_path, arcname=os.path.basename(bundle_tar_path))

    os.remove(metadata_tar_path)
    os.remove(bundle_tar_path)

    logger.info('Archiving complete')
def create_dependencies_overlay(staging_path, overlay_path):
    """
    Create the dependencies overlay from staging_path.

    :param str staging_path: Path where all the dependencies
    have been installed/extracted to
    :param str overlay_path: Path of overlay output file
    (.tar.gz)
    """
    dependencies_staging_path = staging_path
    dependencies_tar_gz_path = overlay_path

    logger.info(
        'Dependencies changed, updating {}'.format(dependencies_tar_gz_path))
    assets_directory = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), 'assets')
    shellscript_path = os.path.join(assets_directory, 'v2_setup.sh')
    shutil.copy2(shellscript_path,
                 os.path.join(dependencies_staging_path, 'setup.sh'))
    if os.path.exists(dependencies_tar_gz_path):
        os.remove(dependencies_tar_gz_path)
    recursive_tar_gz_in_path(dependencies_tar_gz_path,
                             dependencies_staging_path)
Beispiel #16
0
def generate_archive_v1(install_base, staging_path, bundle_base):
    """
    Generate bundle archive.

    output.tar.gz
    |- version
    |- metadata.tar
    |- bundle.tar

    :param install_base: Directory with built artifacts from the workspace
    :param staging_path: Directory where all dependencies have been installed
    :param bundle_base: Directory to place the output of this function
    """
    logger.info('Copying {} into bundle...'.format(install_base))
    assets_directory = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), 'assets')
    shellscript_path = os.path.join(assets_directory, 'v1_setup.sh')
    shutil.copy2(shellscript_path, os.path.join(staging_path, 'setup.sh'))
    bundle_workspace_install_path = os.path.join(staging_path, 'opt',
                                                 'install')
    if os.path.exists(bundle_workspace_install_path):
        shutil.rmtree(bundle_workspace_install_path)
    shutil.copytree(install_base, bundle_workspace_install_path)

    logger.info('Archiving the bundle output')
    print('Creating bundle archive...')

    bundle_tar_path = os.path.join(bundle_base, 'bundle.tar')
    metadata_tar_path = os.path.join(bundle_base, 'metadata.tar')
    archive_tar_gz_path = os.path.join(bundle_base, 'output.tar.gz')

    with tarfile.open(metadata_tar_path, 'w') as archive:
        archive.add(os.path.join(bundle_base, 'installer_metadata.json'),
                    arcname='installers.json')

    if os.path.exists(bundle_tar_path):
        os.remove(bundle_tar_path)

    _recursive_tar_in_path(bundle_tar_path, staging_path)

    version_file_path = os.path.join(bundle_base, 'version')
    with open(version_file_path, 'w') as v:
        v.write('1')

    with tarfile.open(archive_tar_gz_path, 'w:gz', compresslevel=5) as archive:
        archive.add(version_file_path, arcname='version')
        archive.add(metadata_tar_path,
                    arcname=os.path.basename(metadata_tar_path))
        archive.add(bundle_tar_path, arcname=os.path.basename(bundle_tar_path))

    os.remove(metadata_tar_path)
    os.remove(bundle_tar_path)

    logger.info('Archiving complete')
Beispiel #17
0
    def add_to_install_list(self, name, metadata=None):  # noqa: D102
        logger.info('Adding {name} to install list'.format(name=name))
        package_key, version = self._separate_version_information(name)
        if not self.is_package_available(package_key):
            logger.error('Package {package_key} is not in the package'
                         'cache.'.format(package_key=package_key))
            raise PackageNotInCacheException(name)

        logger.info('Found these versions of {package_key}'.format(
            package_key=package_key))
        logger.info(self._cache[package_key].versions)

        package = self._cache[package_key]
        # This will fallback to the latest version available
        # if the specified version does not exist.
        candidate = package.versions.get(version, package.candidate)
        package.candidate = candidate
        package.mark_install(auto_fix=False, from_user=False)
Beispiel #18
0
    def install(self):  # noqa: D102
        # There are certain packages we don't want to install because they
        # come with the
        # base distribution of the OS. We remove them from the install list
        # here.
        with open(self.context.args.apt_package_blacklist, 'rt') as blacklist:
            blacklisted_packages = [line.rstrip('\n') for line in blacklist]

        for package_name in blacklisted_packages:
            try:
                self.remove_from_install_list(package_name)
            except KeyError:
                pass

        # Check for differences
        installed_cache_path = os.path.join(self._cache_dir, 'installed.json')
        if os.path.isfile(installed_cache_path):
            with open(installed_cache_path, 'r') as f:
                installed = set(json.loads(f.read()))
        else:
            installed = set()

        to_install = {
            package
            for package in self._cache if package.marked_install
        }
        intersection = installed.intersection(to_install)
        if len(intersection) == len(to_install) and len(intersection) == len(
                installed):
            return False
        else:
            logger.info('The install list of the bundle has changed...')

        self._fetch_packages()

        deb_cache = os.path.join(self._cache_dir, 'var', 'cache', 'apt',
                                 'archives')
        pkgs_abs_path = glob.glob(os.path.join(deb_cache, '*.deb'))
        print('Extracting apt packages...')
        for pkg in pkgs_abs_path:
            if os.path.basename(pkg) not in installed:
                try:
                    logger.info('Installing {package}'.format(package=pkg))
                    subprocess.check_call([
                        'dpkg-deb', '--extract', pkg, self.context.prefix_path
                    ])
                    installed.add(os.path.basename(pkg))
                except subprocess.CalledProcessError:
                    raise RuntimeError()

        with open(installed_cache_path, 'w') as f:
            f.write(json.dumps(list(installed)))

        installed_packages_metadata = []
        for package in self._cache:
            if package.marked_install:
                installed_packages_metadata.append({
                    'name':
                    package.shortname,
                    'version':
                    package.candidate.version
                })
        self.metadata['installed_packages'] = installed_packages_metadata

        return self.metadata
Beispiel #19
0
def generate_archive_v2(install_base, dependencies_staging_path, bundle_base,
                        metadata_paths, dependencies_changed):
    """
    Generate bundle archive v2.

    This archive is a tarfile that contains multiple compressed archives:

    output.tar
    |- version
    |- metadata.tar.gz
    |- pad (optional)
    |- dependencies.tar.gz
    |- workspace.tar.gz

    :param install_base: Directory with built artifacts from the workspace
    :param dependencies_staging_path: Directory where all dependencies
    have been installed
    :param bundle_base: Directory to place the output of this function
    :param metadata_paths: [str] paths to files which should be included
    in the metadata archive
    :param dependencies_changed: Boolean representing whether the staging path
    needs to be re-archvied
    """
    logger.info('Archiving the bundle output')
    print('Creating bundle archive V2...')

    archive_tar_path = os.path.join(bundle_base, 'output.tar')
    workspace_tar_gz_path = os.path.join(bundle_base, 'workspace.tar.gz')

    # Install directory
    workspace_staging_path = os.path.join(bundle_base, 'workspace_staging')
    workspace_install_path = os.path.join(workspace_staging_path, 'opt',
                                          'built_workspace')
    shutil.rmtree(workspace_staging_path, ignore_errors=True)
    assets_directory = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), 'assets')

    shellscript_path = os.path.join(assets_directory, 'v2_workspace_setup.sh')
    os.mkdir(workspace_staging_path)
    shutil.copy2(shellscript_path,
                 os.path.join(workspace_staging_path, 'setup.sh'))
    shutil.copytree(install_base, workspace_install_path)
    _recursive_tar_in_path(workspace_tar_gz_path,
                           workspace_staging_path,
                           mode='w:gz')

    # Dependencies directory
    dependencies_tar_gz_path = os.path.join(bundle_base, 'dependencies.tar.gz')
    if dependencies_changed:
        logger.info('Dependencies changed, updating {}'.format(
            dependencies_tar_gz_path))
        assets_directory = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), 'assets')
        shellscript_path = os.path.join(assets_directory, 'v2_setup.sh')
        shutil.copy2(shellscript_path,
                     os.path.join(dependencies_staging_path, 'setup.sh'))
        if os.path.exists(dependencies_tar_gz_path):
            os.remove(dependencies_tar_gz_path)
        _recursive_tar_in_path(dependencies_tar_gz_path,
                               dependencies_staging_path,
                               mode='w:gz')

    with Bundle(name=archive_tar_path) as bundle:
        for path in metadata_paths:
            bundle.add_metadata(path)
        bundle.add_overlay_archive(dependencies_tar_gz_path)
        bundle.add_overlay_archive(workspace_tar_gz_path)

    logger.info('Archiving complete')
    print('Archiving complete!')
Beispiel #20
0
 def add_to_install_list(self, name, metadata=None):  # noqa: D102
     logger.info('Marking {name} for installation'.format_map(locals()))
     logger.info(self._cache[name].versions)
     self._cache[name].mark_install(auto_fix=False, from_user=False)