def _get_dependency_tree_for_package(package, parent=None, sync=False, jobs=1): if package.name not in _TREE_DICT: logger.info("Building dependency tree for package: %s..." % package) tree = TreeNode(package, parent=parent) dependencies = [] with parallel.no_daemon_pool(processes=jobs) as pool: dependencies = pool.map(partial(_create_package, **{"sync": sync}), _INFO_DICT[package.name]["dependencies"]) with parallel.no_daemon_pool(processes=jobs) as pool: children = pool.map( partial(_get_dependency_tree_for_package, **{"parent": tree}), dependencies) if children: tree.add_children(*children) _TREE_DICT[package.name] = tree else: logger.info("Using cached dependency tree for package: %s." % package) tree = _TREE_DICT[package.name] tree.parent = parent return tree
def test_pool(): def _assert(pool): results = pool.map(sum, [(1, 2), (3, 4)]) assert results[0] == 3 assert results[1] == 7 with parallel.pool() as pool: _assert(pool) with parallel.pool(class_=parallel.NoDaemonPool) as pool: _assert(pool) with parallel.no_daemon_pool() as pool: _assert(pool)
def __init__(self, source, packages=[], installed=False, sync=False, build_dependency_tree=False, resolve=False, latest=False, jobs=1): self.source = source args = {"sync": sync} if installed: args["pip_exec"] = source self.installed = installed self._packages = [] with parallel.no_daemon_pool(processes=jobs) as pool: for package in pool.imap_unordered(partial(Package, **args), packages): self._packages.append(package) if installed and build_dependency_tree and self._packages: self._build_dependency_tree_for_packages(sync=sync, jobs=jobs) if resolve: # --format tree overtakes --resolve # by default, attempt latest resolution # build shallow dependency list # if installed: # self._build_dependency_tree_for_packages(sync = sync, jobs = jobs, depth = 1) logger.info("Resolving Packages %s...", self._packages) from mixology.version_solver import VersionSolver from pipupgrade.pubgrub import PackageSource source = PackageSource() for package in self._packages: source.root_dep(package, package.latest_version) solver = VersionSolver(source) result = solver.solve() logger.info("Resolution Result: %s", result.decisions)
def __init__(self, source, packages=[], installed=False, sync=False, build_dependency_tree=False, jobs=1): self.source = source args = {"sync": sync} if installed: args["pip_exec"] = source with parallel.no_daemon_pool(processes=jobs) as pool: self.packages = pool.map(partial(Package, **args), packages) self.installed = installed if installed and build_dependency_tree and self.packages: self._build_dependency_tree_for_packages(sync=sync, jobs=jobs)
def command(pip_path=[], requirements=[], pipfile=[], project=None, pull_request=False, git_username=None, git_email=None, github_access_token=None, github_reponame=None, github_username=None, target_branch="master", latest=False, format="table", all=False, pip=False, self=False, jobs=1, user=False, check=False, interactive=False, yes=False, no_included_requirements=False, no_cache=False, no_color=True, output=None, force=False, verbose=False): if not verbose: logger.setLevel(log.NOTSET) logger.info("Environment: %s" % environment()) cli.echo(cli_format("Checking...", cli.YELLOW)) logger.info("Arguments Passed: %s" % locals()) pip_path = pip_path or [] pip_path = [which(p) for p in pip_path] or _pip._PIP_EXECUTABLES logger.info("`pip` executables found: %s" % pip_path) logger.info("Using %s jobs..." % jobs) registries = [] if pip: logger.info("Updating pip executables: %s" % " ".join(pip_path)) with parallel.no_daemon_pool(processes=jobs) as pool: pool.map( partial(update_pip, **{ "user": user, "quiet": not verbose }), pip_path) if self: package = __name__ logger.info("Updating %s..." % package) cli.echo(cli_format("Updating %s..." % package, cli.YELLOW)) _pip.call("install", package, user=user, quiet=not verbose, no_cache=True, upgrade=True) cli.echo("%s upto date." % cli_format(package, cli.CYAN)) else: if project: project = sequencify(project) requirements = requirements or [] pipfile = pipfile or [] logger.info("Detecting projects and its dependencies...") with parallel.no_daemon_pool(processes=jobs) as pool: project = pool.map( partial(Project.from_path, **{"recursive_search": force}), project) requirements += flatten(map(lambda p: p.requirements, project)) pipfile += flatten( map(lambda p: [p.pipfile] if p.pipfile else [], project)) logger.info("Updating projects %s..." % project) if requirements: logger.info("Detecting requirements...") if not no_included_requirements: with parallel.no_daemon_pool(processes=jobs) as pool: results = pool.map(get_included_requirements, requirements) requirements += flatten(results) logger.info("Requirements found: %s" % requirements) with parallel.no_daemon_pool(processes=jobs) as pool: results = pool.map( partial(get_registry_from_requirements, **{ "sync": no_cache, "jobs": jobs }), requirements) registries += results else: with parallel.no_daemon_pool(processes=jobs) as pool: results = pool.map( partial( get_registry_from_pip, **{ "user": user, "sync": no_cache, "outdated": not all, "build_dependency_tree": format in _DEPENDENCY_FORMATS, "jobs": jobs }), pip_path) registries += results logger.info("Updating registries: %s..." % registries) # TODO: Tweaks within parallel.no_daemon_pool to run serially. if yes: with parallel.no_daemon_pool(processes=jobs) as pool: results = pool.map( partial( update_registry, **{ "yes": yes, "user": user, "check": check, "latest": latest, "interactive": interactive, "verbose": verbose, "format_": format, "all": all }), registries) else: for registry in registries: update_registry(registry, yes=yes, user=user, check=check, latest=latest, interactive=interactive, verbose=verbose, format_=format, all=all) if pipfile: logger.info("Updating Pipfiles: %s..." % pipfile) cli.echo( cli_format("Updating Pipfiles: %s..." % ", ".join(pipfile), cli.YELLOW)) with parallel.no_daemon_pool(processes=jobs) as pool: results = pool.map( partial(update_pipfile, **{"verbose": verbose}), pipfile) if builtins.all(results): cli.echo(cli_format("Pipfiles upto date.", cli.GREEN)) if project and pull_request: errstr = '%s not found. Use %s or the environment variable "%s" to set value.' if not git_username: raise ValueError(errstr % ("Git Username", "--git-username", getenvvar("GIT_USERNAME"))) if not git_email: raise ValueError( errstr % ("Git Email", "--git-email", getenvvar("GIT_EMAIL"))) for p in project: popen("git config user.name %s" % git_username, cwd=p.path) popen("git config user.email %s" % git_email, cwd=p.path) _, output, _ = popen("git status -s", output=True) if output: branch = get_timestamp_str(format_="%Y%m%d%H%M%S") popen("git checkout -B %s" % branch, quiet=not verbose) title = "fix(dependencies): Update dependencies to latest" body = "" # TODO: cross-check with "git add" ? files = p.requirements + [p.pipfile] popen("git add %s" % " ".join(files), quiet=not verbose, cwd=p.path) popen("git commit -m '%s'" % title, quiet=not verbose, cwd=p.path) popen("git push origin %s" % branch, quiet=not verbose, cwd=p.path) if not github_reponame: raise ValueError( errstr % ("GitHub Reponame", "--github-reponame", getenvvar("GITHUB_REPONAME"))) if not github_username: raise ValueError( errstr % ("GitHub Username", "--github-username", getenvvar("GITHUB_USERNAME"))) url = "/".join([ "https://api.github.com", "repos", github_username, github_reponame, "pulls" ]) headers = dict({ "Content-Type": "application/json", "Authorization": "token %s" % github_access_token }) data = dict(head="%s:%s" % (git_username, branch), base=target_branch, title=title, body=body) # Although there's monkey patch support for the "requests" # library, avoid using the "json" parameter which was # added in requests 2.4.2+ response = req.post(url, data=json.dumps(data), headers=headers) if response.ok: response = response.json() number = response["number"] url = "/".join( map(str, [ "https://github.com", github_username, github_reponame, "pull", number ])) message = "Created a Pull Request at %s" % url cli.echo(cli_format(message, cli.GREEN)) else: response.raise_for_status()
def _command(*args, **kwargs): a = to_params(kwargs) if not a.verbose: logger.setLevel(log.NOTSET) logger.info("Environment: %s" % environment()) logger.info("Arguments Passed: %s" % locals()) file_ = a.output if file_: logger.info("Writing to output file %s..." % file_) touch(file_) cli.echo(cli_format("Checking...", cli.YELLOW), file=file_) pip_path = a.pip_path or [] pip_path = [which(p) for p in pip_path] or _pip._PIP_EXECUTABLES logger.info("`pip` executables found: %s" % pip_path) logger.info("Using %s jobs..." % a.jobs) registries = [] if a.pip: logger.info("Updating pip executables: %s" % " ".join(pip_path)) with parallel.no_daemon_pool(processes=a.jobs) as pool: pool.imap_unordered( partial( update_pip, **{ "user": a.user, "quiet": not a.verbose, "file": file_ }), pip_path) if a.self: package = __name__ logger.info("Updating %s..." % package) cli.echo(cli_format("Updating %s..." % package, cli.YELLOW), file=file_) _pip.call("install", package, user=a.user, quiet=not a.verbose, no_cache=True, upgrade=True) cli.echo("%s upto date." % cli_format(package, cli.CYAN), file=file_) else: requirements = sequencify(a.requirements or []) pipfile = sequencify(a.pipfile or []) if a.project: project = sequencify(a.project) logger.info("Detecting projects and its dependencies...") with parallel.no_daemon_pool(processes=a.jobs) as pool: project = pool.imap_unordered( partial(Project.from_path, **{"depth_search": a.force}), project) requirements += flatten(map(lambda p: p.requirements, project)) pipfile += flatten( map(lambda p: [p.pipfile] if p.pipfile else [], project)) logger.info("Updating projects %s..." % project) if requirements: logger.info("Detecting requirements...") if not a.no_included_requirements: with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered(get_included_requirements, requirements) requirements += flatten(results) requirements = list(set(requirements)) logger.info("Requirements found: %s" % requirements) with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered( partial( get_registry_from_requirements, **{ "sync": a.no_cache, "jobs": a.jobs, "only_packages": a.packages, "file": file_, "ignore_packages": a.ignore }), requirements) registries += results else: with parallel.no_daemon_pool(processes=a.jobs) as pool: for registry in pool.imap_unordered( partial( get_registry_from_pip, **{ "user": a.user, "sync": a.no_cache, "outdated": not a.all, "build_dependency_tree": a.format in _DEPENDENCY_FORMATS, "jobs": a.jobs, "only_packages": a.packages, "ignore_packages": a.ignore, }), pip_path): registries.append(registry) logger.info("Updating registries: %s..." % registries) for registry in registries: update_registry(registry, yes=a.yes, user=a.user, check=a.check, latest=a.latest, interactive=a.interactive, verbose=a.verbose, format_=a.format, all=a.all, filter_=a.packages, file=file_, raise_err=not a.ignore_error, upgrade_type=a.upgrade_type) if pipfile: logger.info("Updating Pipfiles: %s..." % pipfile) cli.echo(cli_format( "Updating Pipfiles: %s..." % ", ".join(pipfile), cli.YELLOW), file=file_) with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered( partial(update_pipfile, **{"a.verbose": a.verbose}), pipfile) if builtins.all(results): cli.echo(cli_format("Pipfiles upto date.", cli.GREEN), file=file_) if a.project and a.pull_request: errstr = '%s not found. Use %s or the environment variable "%s" to set value.' if not a.git_username: raise ValueError(errstr % ("Git Username", "--git-username", getenvvar("GIT_USERNAME"))) if not a.git_email: raise ValueError( errstr % ("Git Email", "--git-email", getenvvar("GIT_EMAIL"))) for p in project: popen("git config user.name %s" % a.git_username, cwd=p.path) popen("git config user.email %s" % a.git_email, cwd=p.path) _, output, _ = popen("git status -s", output=True, cwd=p.path) if output: branch = get_timestamp_str(format_="%Y%m%d%H%M%S") popen("git checkout -B %s" % branch, quiet=not a.verbose, cwd=p.path) title = "fix(dependencies): Update dependencies to latest" body = "" # TODO: cross-check with "git add" ? files = p.requirements + [p.pipfile] popen("git add %s" % " ".join(files), quiet=not a.verbose, cwd=p.path) popen("git commit -m '%s'" % title, quiet=not a.verbose, cwd=p.path) popen("git push origin %s" % branch, quiet=not a.verbose, cwd=p.path) if not a.github_reponame: raise ValueError( errstr % ("GitHub Reponame", "--github-reponame", getenvvar("GITHUB_REPONAME"))) if not a.github_username: raise ValueError( errstr % ("GitHub Username", "--github-username", getenvvar("GITHUB_USERNAME"))) url = "/".join([ "https://api.github.com", "repos", a.github_username, a.github_reponame, "pulls" ]) headers = dict({ "Content-Type": "application/json", "Authorization": "token %s" % a.github_access_token }) data = dict(head="%s:%s" % (a.git_username, branch), base=a.target_branch, title=title, body=body) # Although there's monkey patch support for the "requests" # library, avoid using the "json" parameter which was # added in requests 2.4.2+ response = req.post(url, data=json.dumps(data), headers=headers) if response.ok: response = response.json() number = response["number"] url = "/".join( map(str, [ "https://github.com", a.github_username, a.github_reponame, "pull", number ])) message = "Created a Pull Request at %s" % url cli.echo(cli_format(message, cli.GREEN), file=file_) else: response.raise_for_status()