def main(self, *, context: CommandContext): # noqa: D102 print('Bundling workspace...') upgrade_deps_graph = context.args.upgrade install_base = os.path.abspath(context.args.install_base) merge_install = context.args.merge_install bundle_base = os.path.abspath(context.args.bundle_base) bundle_version = context.args.bundle_version if not os.path.exists(install_base): raise RuntimeError( 'You must build your workspace before bundling it.') check_and_mark_install_layout( install_base, merge_install=merge_install) self._path_contex = PathContext(install_base, bundle_base, bundle_version) self._installer_manager = InstallerManager(self._path_contex) dependencies_changed = self._manage_dependencies( context, self._path_contex, upgrade_deps_graph) if context.args.bundle_version == 2: generate_archive_v2(self._path_contex, [self._path_contex.installer_metadata_path()], dependencies_changed) else: generate_archive_v1(self._path_contex) return 0
def test_initalize_bundle_base_does_exist(self, bundle_tool, cache_version, bundle_version): PathContext(self.install_base, self.bundle_base, 2) bundle_version.assert_called_with(self.bundle_base, this_bundle_version=2, previously_bundled=True) cache_version.assert_called_with(self.bundle_base, previously_bundled=True) bundle_tool.assert_called_with(self.bundle_base)
def package_dependencies_changed(path_context: PathContext, decorators: List[PackageDecorator]): """ Determine if workspace package dependencies have changed. Compares the direct dependencies of the provided decorators against the dependencies that were found in the invocation of this method. This does not take in to account transitive dependency updates coming from installers. :param PathContext path_context: paths to use :param PackageDecorators decorators: decorators that should be used to compare dependencies :return bool: True if package dependencies have changed """ dependency_hash = {} for decorator in decorators: if not decorator.selected: continue pkg = decorator.descriptor dependency_list = sorted( str(dependency) for dependency in pkg.dependencies['run']) dependency_hash[pkg.name] = hashlib.sha256( ' '.join(dependency_list).encode('utf-8')).hexdigest() current_hash_string = json.dumps(dependency_hash, sort_keys=True) logger.debug('Hash for current dependencies: ' '{current_hash_string}'.format_map(locals())) dependency_hash_path = path_context.dependency_hash_path() dependency_hash_cache_path = path_context.dependency_hash_cache_path() dependencies_changed = False if os.path.exists(dependency_hash_path): with open(dependency_hash_path, 'r') as f: previous_hash_string = f.read() if previous_hash_string != current_hash_string: dependencies_changed = True with open(dependency_hash_cache_path, 'w') as f: f.write(current_hash_string) return not os.path.exists(dependency_hash_path) or dependencies_changed
def update_dependencies_cache(path_context: PathContext): """ Promote the latest set of logged dependencies. This method takes the set of the dependencies that were last logged by package_dependencies_changed and updates the cache with them. We use this method because when we run package_dependencies_changed we do not want to actually update the cache yet because the latest dependencies overlay has not been built. Once the dependencies overlay is built we then call this. :param path_context: paths to use """ dependency_hash_path = path_context.dependency_hash_path() dependency_hash_cache_path = path_context.dependency_hash_cache_path() if os.path.exists(dependency_hash_cache_path): os.replace(dependency_hash_cache_path, dependency_hash_path)
def generate_archive_v2(path_context: PathContext, metadata_paths: List[str], dependencies_changed: bool): """ Generate bundle archive v2. This archive is a tarfile that contains multiple compressed archives: output.tar |- version |- metadata.tar.gz |- pad (optional) |- dependencies.tar.gz |- workspace.tar.gz :param path_context: PathContext object including all path configurations :param metadata_paths: [str] paths to files which should be included in the metadata archive :param dependencies_changed: Boolean representing whether the staging path needs to be re-archvied """ logger.info('Archiving the bundle output') print('Creating bundle archive V2...') logger.debug('Start: workspace.tar.gz') workspace_tar_gz_path = path_context.workspace_overlay_path() create_workspace_overlay(path_context.install_base(), path_context.workspace_staging_path(), workspace_tar_gz_path) logger.debug('End: workspace.tar.gz') logger.debug('Start: dependencies.tar.gz') dependencies_overlay_path = path_context.dependencies_overlay_path() if dependencies_changed: create_dependencies_overlay(path_context.dependencies_staging_path(), dependencies_overlay_path) update_dependencies_cache(path_context) logger.debug('End: dependencies.tar.gz') logger.debug('Start: bundle.tar') with Bundle(name=path_context.bundle_v2_output_path()) as bundle: for path in metadata_paths: bundle.add_metadata(path) bundle.add_overlay_archive(dependencies_overlay_path) bundle.add_overlay_archive(workspace_tar_gz_path) logger.debug('End: bundle.tar') logger.info('Archiving complete') print('Archiving complete!') _mark_cache_valid(path_context)
class BundleVerb(VerbExtensionPoint): """Bundle a package and all of its dependencies.""" def __init__(self): # noqa: D107 satisfies_version(VerbExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') self._path_contex = None self._installer_manager = None def add_arguments(self, *, parser): # noqa: D102 parser.add_argument('--build-base', default='build', help='The base path for all build directories (' 'default: build)') parser.add_argument('--install-base', default='install', help='The base path for all install prefixes (' 'default: install)') parser.add_argument('--merge-install', action='store_true', help='Merge all install prefixes into a single ' 'location') parser.add_argument('--bundle-base', default='bundle', help='The base path for all bundle prefixes (' 'default: bundle)') parser.add_argument( '--include-sources', action='store_true', help='Include a sources tarball for all packages installed into ' 'the bundle via apt') parser.add_argument( '--bundle-version', default=2, type=int, help='Version of bundle to generate') parser.add_argument( '-U', '--upgrade', action='store_true', help='Upgrade all dependencies in the bundle to their latest ' 'versions' ) add_executor_arguments(parser) add_event_handler_arguments(parser) add_package_arguments(parser) decorated_parser = DestinationCollectorDecorator(parser) add_task_arguments(decorated_parser, 'colcon_bundle.task.bundle', ) self.task_argument_destinations = decorated_parser.get_destinations() add_installer_arguments(decorated_parser) def main(self, *, context: CommandContext): # noqa: D102 print('Bundling workspace...') upgrade_deps_graph = context.args.upgrade install_base = os.path.abspath(context.args.install_base) merge_install = context.args.merge_install bundle_base = os.path.abspath(context.args.bundle_base) bundle_version = context.args.bundle_version if not os.path.exists(install_base): raise RuntimeError( 'You must build your workspace before bundling it.') check_and_mark_install_layout( install_base, merge_install=merge_install) self._path_contex = PathContext(install_base, bundle_base, bundle_version) self._installer_manager = InstallerManager(self._path_contex) dependencies_changed = self._manage_dependencies( context, self._path_contex, upgrade_deps_graph) if context.args.bundle_version == 2: generate_archive_v2(self._path_contex, [self._path_contex.installer_metadata_path()], dependencies_changed) else: generate_archive_v1(self._path_contex) return 0 def _manage_dependencies(self, context, path_context, upgrade_deps_graph): destinations = self.task_argument_destinations decorators = get_packages(context.args, additional_argument_names=destinations, recursive_categories=('run',)) if len(decorators) == 0: estr = 'We did not find any packages to add to the '\ 'bundle. This might be because you are not '\ 'in the right directory, or your workspace is '\ 'not setup correctly for colcon. Please see '\ 'https://github.com/colcon/colcon-ros-bundle/issues/13' \ 'for some possible suggestions. If you are still having ' \ 'trouble please post to our' \ 'issues: https://github.com/colcon/colcon-bundle/issues '\ 'and we will be happy to help.' raise RuntimeError(estr) self._installer_manager.setup_installers(context) print('Checking if dependency tarball exists...') logger.info('Checking if dependency tarball exists...') jobs = self._get_jobs(context.args, self._installer_manager.installers, decorators) rc = execute_jobs(context, jobs) if rc != 0: return rc direct_dependencies_changed = package_dependencies_changed( path_context, decorators) installer_parameters_changed = \ self._installer_manager.cache_invalid() if not os.path.exists(path_context.dependencies_overlay_path()): self._installer_manager.run_installers( include_sources=context.args.include_sources) return True elif upgrade_deps_graph: print('Checking if dependency graph has changed since last ' 'bundle...') logger.info('Checking if dependency graph has changed since last' ' bundle...') if self._installer_manager.run_installers( include_sources=context.args.include_sources): print('All dependencies in dependency graph not changed, ' 'skipping dependencies update...') logger.info('All dependencies in dependency graph not changed,' ' skipping dependencies update...') return False else: print('Checking if local dependencies have changed since last' ' bundle...') logger.info( 'Checking if local dependencies have changed since last' ' bundle...') if not direct_dependencies_changed and \ not installer_parameters_changed: print('Local dependencies not changed, skipping dependencies' ' update...') logger.info( 'Local dependencies not changed, skipping dependencies' ' update...') return False self._installer_manager.run_installers( include_sources=context.args.include_sources) return True def _get_jobs(self, args, installers, decorators): jobs = OrderedDict() workspace_package_names = [decorator.descriptor.name for decorator in decorators] logger.info( 'Including {} in bundle...'.format(workspace_package_names)) for decorator in decorators: if not decorator.selected: continue pkg = decorator.descriptor extension = get_task_extension( 'colcon_bundle.task.bundle', pkg.type) if not extension: logger.warn( 'No task extension to bundle a {pkg.type} package' .format_map(locals())) continue recursive_dependencies = OrderedDict() for dep_name in decorator.recursive_dependencies: dep_path = args.install_base if not args.merge_install: dep_path = os.path.join(dep_path, dep_name) recursive_dependencies[dep_name] = dep_path dest = self.task_argument_destinations.values() package_args = BundlePackageArguments( pkg, installers, args, additional_destinations=dest) ordered_package_args = ', '.join( [('%s: %s' % (repr(k), repr(package_args.__dict__[k]))) for k in sorted(package_args.__dict__.keys())]) logger.debug( 'Bundling package {pkg.name} with the following arguments: ' '{{{ordered_package_args}}}'.format_map(locals())) task_context = TaskContext(pkg=pkg, args=package_args, dependencies=recursive_dependencies) task_context.installers = installers job = Job( identifier=pkg.name, dependencies=set(recursive_dependencies.keys()), task=extension, task_context=task_context) jobs[pkg.name] = job return jobs
def setUp(self) -> None: self.bundle_base = tempfile.mkdtemp() self.install_base = tempfile.mkdtemp() shutil.rmtree(self.bundle_base) self.path_context = PathContext(self.install_base, self.bundle_base, 2)
def test_v1_no_cache(self, cache_version, *_): cache_version.return_value = 1 context = PathContext(self.install_base, self.bundle_base, 2) assert context.bundle_base() == self.bundle_base assert context.install_base() == self.install_base self._assert_not_under_cache_subpath(context.dependency_hash_path()) self._assert_not_under_cache_subpath(context.installer_cache_path()) self._assert_not_under_cache_subpath( context.dependency_hash_cache_path()) self._assert_not_under_cache_subpath( context.dependencies_overlay_path()) self._assert_not_under_cache_subpath(context.bundle_tar_path()) self._assert_not_under_cache_subpath(context.installer_metadata_path()) self._assert_not_under_cache_subpath(context.metadata_tar_path()) self._assert_not_under_cache_subpath( context.dependencies_staging_path()) self._assert_not_under_cache_subpath(context.version_file_path()) self._assert_not_under_cache_subpath(context.workspace_staging_path()) self._assert_not_under_cache_subpath(context.workspace_overlay_path()) self._assert_not_under_cache_subpath(context.bundle_v1_output_path()) self._assert_not_under_cache_subpath(context.bundle_v2_output_path()) self._assert_not_under_cache_subpath(context.sources_tar_gz_path())