def main(options): lines = [] if not options.topological_order: package_paths = find_package_paths(options.basepath) for package_path in package_paths: package = None package_abs_path = os.path.join(options.basepath, package_path) if options.depends_on is not None: package = parse_package(package_abs_path) if options.depends_on not in get_unique_depend_names(package): continue if options.names_only: package = package or parse_package(package_abs_path) lines.append(package.name) elif options.paths_only: lines.append(package_path) else: package = package or parse_package(package_abs_path) lines.append(package.name + ' ' + package_path) lines.sort() else: packages = find_unique_packages(options.basepath) packages = topological_order_packages(packages) for package_path, package, _ in packages: if options.depends_on is not None: if options.depends_on not in get_unique_depend_names(package): continue if options.names_only: lines.append(package.name) elif options.paths_only: lines.append(package_path) else: lines.append(package.name + ' ' + package_path) for line in lines: print(line)
def iterate_packages(opts, packages, per_package_callback): package_dict = dict([(path, package) for path, package, _ in packages]) for (path, package, depends) in reversed(packages): if package.name in opts.skip_packages: print('# Skipping: %s' % package.name) else: pkg_path = os.path.join(opts.basepath, path) opts.path = pkg_path # get recursive package dependencies in topological order ordered_depends = topological_order_packages(package_dict, whitelisted=depends) ordered_depends = [ pkg.name for _, pkg, _ in ordered_depends if pkg.name != package.name ] # get package share folder for each package opts.build_dependencies = [] for depend in ordered_depends: package_share = os.path.join(opts.install_space, 'share', depend) opts.build_dependencies.append(package_share) rc = per_package_callback(opts) if rc: return rc if package.name == opts.start_with: print("Stopped after package '{0}'".format(package.name)) break
def main(options): if not options.topological_order: package_paths = find_package_paths(options.basepath) for package_path in sorted(package_paths): package = None package_abs_path = os.path.join(options.basepath, package_path) if options.depends_on is not None: package = parse_package(package_abs_path) if options.depends_on not in get_unique_depend_names(package): continue if options.names_only: package = package or parse_package(package_abs_path) print(package.name) else: print(package_path) else: packages = find_unique_packages(options.basepath) packages = topological_order_packages(packages) for package_path, package, _ in packages: if options.depends_on is not None: if options.depends_on not in get_unique_depend_names(package): continue if options.names_only: print(package.name) else: print(package_path)
def iterate_packages(opts, packages, per_package_callback): end_with_found = not opts.end_with package_dict = dict([(path, package) for path, package, _ in packages]) for (path, package, depends) in reversed(packages): if package.name == opts.end_with: end_with_found = True if not end_with_found: print('# Skipping: %s' % package.name) continue pkg_path = os.path.join(opts.basepath, path) opts.path = pkg_path # get recursive package dependencies in topological order ordered_depends = topological_order_packages( package_dict, whitelisted=depends) ordered_depends = [ pkg.name for _, pkg, _ in ordered_depends if pkg.name != package.name] # get package share folder for each package opts.build_dependencies = [] for depend in ordered_depends: package_share = os.path.join(opts.install_space, 'share', depend) opts.build_dependencies.append(package_share) rc = per_package_callback(opts) if rc: return rc if package.name == opts.start_with: print("Stopped after package '{0}'".format(package.name)) break
def main(options): lines = [] if not options.topological_order: package_paths = find_package_paths(options.basepath) for package_path in package_paths: package = None package_abs_path = os.path.join(options.basepath, package_path) if options.depends_on is not None: package = parse_package(package_abs_path) if options.depends_on not in get_unique_depend_names(package): continue if options.names_only: package = package or parse_package(package_abs_path) lines.append(package.name) elif options.paths_only: lines.append(package_path) else: package = package or parse_package(package_abs_path) lines.append(package.name + ' ' + package_path) else: packages = find_unique_packages(options.basepath) packages = topological_order_packages(packages) for package_path, package, _ in packages: if options.depends_on is not None: if options.depends_on not in get_unique_depend_names(package): continue if options.names_only: lines.append(package.name) elif options.paths_only: lines.append(package_path) else: lines.append(package.name + ' ' + package_path) lines.sort() for line in lines: print(line)
def main(options): lines = [] if not options.topological_order: package_paths = find_package_paths(options.basepath) # parse package manifests packages = {} for package_path in package_paths: package_abs_path = os.path.join(options.basepath, package_path) package = parse_package(package_abs_path) packages[package_path] = package # evaluate conditions for package in packages.values(): package.evaluate_conditions(os.environ) # expand group dependencies for package in packages.values(): for group in package.group_depends: if group.evaluated_condition: group.extract_group_members(packages.values()) for package_path, package in packages.items(): if options.depends_on is not None: if options.depends_on not in get_unique_depend_names(package): continue if options.names_only: lines.append(package.name) elif options.paths_only: lines.append(package_path) else: lines.append(package.name + ' ' + package_path) lines.sort() else: packages = find_unique_packages(options.basepath) packages = topological_order_packages(packages) for package_path, package, _ in packages: if options.depends_on is not None: if options.depends_on not in get_unique_depend_names(package): continue if options.names_only: lines.append(package.name) elif options.paths_only: lines.append(package_path) else: lines.append(package.name + ' ' + package_path) for line in lines: print(line)
def iterate_packages(opts, packages, per_package_callback): install_space_base = opts.install_space package_dict = dict([(path, package) for path, package, _ in packages]) workspace_package_names = [pkg.name for pkg in package_dict.values()] jobs = OrderedDict() for (path, package, depends) in packages: if package.name in opts.skip_packages: print('# Skipping: %s' % package.name) else: pkg_path = os.path.join(opts.basepath, path) package_opts = copy.copy(opts) package_opts.path = os.path.abspath( os.path.join(os.getcwd(), pkg_path)) if package_opts.isolated: package_opts.install_space = os.path.join( install_space_base, package.name) # get recursive package dependencies in topological order ordered_depends = topological_order_packages(package_dict, whitelisted=depends) ordered_depends = [ pkg.name for _, pkg, _ in ordered_depends if pkg.name != package.name ] # get package share folder for each package package_opts.build_dependencies = [] for depend in ordered_depends: install_space = install_space_base if package_opts.isolated: install_space = os.path.join(install_space, depend) package_share = os.path.join(install_space, 'share', depend) package_opts.build_dependencies.append(package_share) # get the package share folder for each exec depend of the package package_opts.exec_dependency_paths_in_workspace = [] for dep_object in package.exec_depends: dep_name = dep_object.name if dep_name not in workspace_package_names: # do not add to this list if the dependency is not in the workspace continue install_space = install_space_base if package_opts.isolated: install_space = os.path.join(install_space, dep_name) package_share = os.path.join(install_space, 'share', dep_name) package_opts.exec_dependency_paths_in_workspace.append( package_share) jobs[package.name] = { 'callback': per_package_callback, 'opts': package_opts, 'depends': ordered_depends, } if package.name == opts.end_with: break if not opts.parallel: rc = processSequentially(jobs) else: rc = processInParallel(jobs) if not rc and opts.end_with: print("Stopped after package '{0}'".format(opts.end_with)) # expand prefix-level setup files for the root of the install-space if opts.isolated: for name in get_isolated_prefix_level_template_names(): template_path = get_isolated_prefix_level_template_path(name) if name.endswith('.in'): content = configure_file( template_path, { 'CMAKE_INSTALL_PREFIX': install_space_base, 'PYTHON_EXECUTABLE': sys.executable, }) destination_path = os.path.join(install_space_base, name[:-3]) with open(destination_path, 'w') as h: h.write(content) else: dst = os.path.join(install_space_base, name) if os.path.exists(dst): if not opts.symlink_install or \ not os.path.islink(dst) or \ not os.path.samefile(template_path, dst): os.remove(dst) if not os.path.exists(dst): if not opts.symlink_install: shutil.copy(template_path, dst) else: os.symlink(template_path, dst) return rc
def iterate_packages(opts, packages, per_package_callback): install_space_base = opts.install_space package_dict = {path: package for path, package, _ in packages} workspace_package_names = [pkg.name for pkg in package_dict.values()] jobs = OrderedDict() for (path, package, depends) in packages: if package.name in opts.skip_packages: print('# Skipping: %s' % package.name) else: pkg_path = os.path.join(opts.basepath, path) package_opts = copy.copy(opts) package_opts.path = os.path.abspath(os.path.join(os.getcwd(), pkg_path)) if package_opts.isolated: package_opts.install_space = os.path.join(install_space_base, package.name) # get recursive package dependencies in topological order ordered_depends = topological_order_packages( package_dict, whitelisted=depends) ordered_depends = [ pkg.name for _, pkg, _ in ordered_depends if pkg.name != package.name] # get package share folder for each package package_opts.build_dependencies = [] for depend in ordered_depends: install_space = install_space_base if package_opts.isolated: install_space = os.path.join(install_space, depend) package_share = os.path.join(install_space, 'share', depend) package_opts.build_dependencies.append(package_share) # get the package share folder for each exec depend of the package # also consider group dependencies package_opts.exec_dependency_paths_in_workspace = [] dep_names = [d.name for d in package.exec_depends] for g in package.group_depends: dep_names += g.members for dep_name in dep_names: if dep_name not in workspace_package_names: # do not add to this list if the dependency is not in the workspace continue install_space = install_space_base if package_opts.isolated: install_space = os.path.join(install_space, dep_name) package_share = os.path.join(install_space, 'share', dep_name) package_opts.exec_dependency_paths_in_workspace.append(package_share) jobs[package.name] = { 'callback': per_package_callback, 'opts': package_opts, 'depends': ordered_depends, } if package.name == opts.end_with: break if not opts.parallel: rc = process_sequentially(jobs) else: rc = process_in_parallel(jobs) if not rc and opts.end_with: print("Stopped after package '{0}'".format(opts.end_with)) # expand prefix-level setup files for the root of the install-space if opts.isolated: for name in get_isolated_prefix_level_template_names(): template_path = get_isolated_prefix_level_template_path(name) if name.endswith('.in'): content = configure_file(template_path, { 'CMAKE_INSTALL_PREFIX': install_space_base, 'PYTHON_EXECUTABLE': opts.python_interpreter, }) destination_path = os.path.join( install_space_base, name[:-3]) with open(destination_path, 'w') as h: h.write(content) else: dst = os.path.join(install_space_base, name) if os.path.exists(dst): if not opts.symlink_install or \ not os.path.islink(dst) or \ not os.path.samefile(template_path, dst): os.remove(dst) if not os.path.exists(dst): if not opts.symlink_install: shutil.copy(template_path, dst) else: os.symlink(template_path, dst) return rc
def iterate_packages(opts, packages, per_package_callback): start_with_found = not opts.start_with install_space_base = opts.install_space package_dict = dict([(path, package) for path, package, _ in packages]) for (path, package, depends) in packages: if package.name == opts.start_with: start_with_found = True if not start_with_found: print('# Skipping: %s' % package.name) continue pkg_path = os.path.join(opts.basepath, path) opts.path = pkg_path if opts.isolated: opts.install_space = os.path.join(install_space_base, package.name) # get recursive package dependencies in topological order ordered_depends = topological_order_packages(package_dict, whitelisted=depends) ordered_depends = [ pkg.name for _, pkg, _ in ordered_depends if pkg.name != package.name ] # get package share folder for each package opts.build_dependencies = [] for depend in ordered_depends: install_space = install_space_base if opts.isolated: install_space = os.path.join(install_space, depend) package_share = os.path.join(install_space, 'share', depend) opts.build_dependencies.append(package_share) rc = per_package_callback(opts) if rc: return rc if package.name == opts.end_with: print("Stopped after package '{0}'".format(package.name)) break # expand prefix-level setup files for the root of the install-space if opts.isolated: for name in get_isolated_prefix_level_template_names(): template_path = get_isolated_prefix_level_template_path(name) if name.endswith('.in'): content = configure_file( template_path, { 'CMAKE_INSTALL_PREFIX': install_space_base, 'PYTHON_EXECUTABLE': sys.executable, }) destination_path = os.path.join(install_space_base, name[:-3]) with open(destination_path, 'w') as h: h.write(content) else: dst = os.path.join(install_space_base, name) if os.path.exists(dst): if not opts.symlink_install or \ not os.path.islink(dst) or \ not os.path.samefile(template_path, dst): os.remove(dst) if not os.path.exists(dst): if not opts.symlink_install: shutil.copy(template_path, dst) else: os.symlink(template_path, dst)
def iterate_packages(opts, packages, per_package_callback): start_with_found = not opts.start_with install_space_base = opts.install_space package_dict = dict([(path, package) for path, package, _ in packages]) for (path, package, depends) in packages: if package.name == opts.start_with: start_with_found = True if not start_with_found: print('# Skipping: %s' % package.name) continue pkg_path = os.path.join(opts.basepath, path) opts.path = pkg_path if opts.isolated: opts.install_space = os.path.join(install_space_base, package.name) # get recursive package dependencies in topological order ordered_depends = topological_order_packages( package_dict, whitelisted=depends) ordered_depends = [ pkg.name for _, pkg, _ in ordered_depends if pkg.name != package.name] # get package share folder for each package opts.build_dependencies = [] for depend in ordered_depends: install_space = install_space_base if opts.isolated: install_space = os.path.join(install_space, depend) package_share = os.path.join(install_space, 'share', depend) opts.build_dependencies.append(package_share) rc = per_package_callback(opts) if rc: return rc if package.name == opts.end_with: print("Stopped after package '{0}'".format(package.name)) break # expand prefix-level setup files for the root of the install-space if opts.isolated: for name in get_isolated_prefix_level_template_names(): template_path = get_isolated_prefix_level_template_path(name) if name.endswith('.in'): content = configure_file(template_path, { 'CMAKE_INSTALL_PREFIX': install_space_base, 'PYTHON_EXECUTABLE': sys.executable, }) destination_path = os.path.join( install_space_base, name[:-3]) with open(destination_path, 'w') as h: h.write(content) else: dst = os.path.join(install_space_base, name) if os.path.exists(dst): if not opts.symlink_install or \ not os.path.islink(dst) or \ not os.path.samefile(template_path, dst): os.remove(dst) if not os.path.exists(dst): if not opts.symlink_install: shutil.copy(template_path, dst) else: os.symlink(template_path, dst)