def test_command_doctor(capfd): path_config = get_config_path(name = NAME) path_db = osp.join(path_config, "db.db") assert osp.exists(path_db) command(verbose = True, doctor = True, clean = True) assert not osp.exists(path_db)
import re # imports - module imports from pipupgrade.__attr__ import __name__ as NAME from pipupgrade import _pip, semver from pipupgrade.config import PATH from bpyutils.tree import Node as TreeNode from bpyutils.util.string import kebab_case, strip from bpyutils.util._dict import merge_dict from bpyutils._compat import iterkeys, iteritems, string_types from bpyutils.config import Settings, get_config_path from bpyutils import request as req, db, log logger = log.get_logger(name = NAME) _db = db.get_connection(location = get_config_path(NAME)) _db.from_file(osp.join(PATH["DATA"], "bootstrap.sql")) settings = Settings() def _get_pypi_info(name, raise_err = True): url = "https://pypi.org/pypi/{}/json".format(name) res = req.get(url) info = None if res.ok: data = res.json() info = merge_dict(data["info"], { "releases": data["releases"] }) else: if raise_err:
def _command(*args, **kwargs): check_update_available() a = to_params(kwargs) if not a.verbose: logger.setLevel(log.NOTSET) logger.info("Environment: %s" % environment()) logger.info("Arguments Passed: %s" % locals()) if a.doctor: logger.info("Performing Diagnostics and attempting to fix.") if a.clean: path_config = get_config_path(name=NAME) paths = [osp.join(path_config, "db.db")] for path in paths: remove(path) # check database and repair. else: if a.resolve: import_or_raise("mixology") import_or_raise("semver", name="poetry-semver") populate_db = import_handler("pipupgrade.pubgrub.populate_db") populate_db() file_ = a.output if file_: logger.info("Writing to output file %s..." % file_) touch(file_) cli.echo(cli_format("Checking...", cli.YELLOW), file=file_) pip_path = a.pip_path or [] pip_path = [which(p) for p in pip_path] or _pip._PIP_EXECUTABLES logger.info("`pip` executables found: %s" % pip_path) logger.info("Using %s jobs..." % a.jobs) registries = [] if a.pip: logger.info("Updating pip executables: %s" % " ".join(pip_path)) with parallel.no_daemon_pool(processes=a.jobs) as pool: pool.imap_unordered( partial( update_pip, **{ "user": a.user, "quiet": not a.verbose, "file": file_ }), pip_path) if a.self: package = NAME logger.info("Updating %s..." % package) cli.echo(cli_format("Updating %s..." % package, cli.YELLOW), file=file_) _pip.call("install", package, user=a.user, quiet=not a.verbose, no_cache=True, upgrade=True, output=file_) cli.echo("%s upto date." % cli_format(package, cli.CYAN), file=file_) else: requirements = sequencify(a.requirements or []) pipfile = sequencify(a.pipfile or []) if a.project: project = sequencify(a.project) logger.info("Detecting projects and its dependencies...") with parallel.no_daemon_pool(processes=a.jobs) as pool: project = pool.imap_unordered( partial(Project.from_path, **{"depth_search": a.force}), project) requirements += flatten( map(lambda p: p.requirements, project)) pipfile += flatten( map(lambda p: [p.pipfile] if p.pipfile else [], project)) logger.info("Updating projects %s..." % project) if requirements: logger.info("Detecting requirements...") if not a.no_included_requirements: with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered( get_included_requirements, requirements) requirements += flatten(results) requirements = list(set(requirements)) logger.info("Requirements found: %s" % requirements) with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered( partial( get_registry_from_requirements, **{ "sync": a.no_cache, "jobs": a.jobs, "only_packages": a.packages, "file": file_, "ignore_packages": a.ignore, "resolve": a.resolve, "latest": a.latest }), requirements) registries += results else: with parallel.no_daemon_pool(processes=a.jobs) as pool: for registry in pool.imap_unordered( partial( get_registry_from_pip, **{ "user": a.user, "sync": a.no_cache, "outdated": not a.all, "build_dependency_tree": a.format in _DEPENDENCY_FORMATS, "jobs": a.jobs, "only_packages": a.packages, "ignore_packages": a.ignore, "resolve": a.resolve, "latest": a.latest }), pip_path): registries.append(registry) logger.info("Updating registries: %s..." % registries) for registry in registries: update_registry(registry, yes=a.yes, user=a.user, check=a.check, latest=a.latest, interactive=a.interactive, verbose=a.verbose, format_=a.format, all=a.all, filter_=a.packages, file=file_, raise_err=not a.ignore_error, upgrade_type=a.upgrade_type) if pipfile: logger.info("Updating Pipfiles: %s..." % pipfile) cli.echo(cli_format( "Updating Pipfiles: %s..." % ", ".join(pipfile), cli.YELLOW), file=file_) with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered( partial(update_pipfile, **{"verbose": a.verbose}), pipfile) if builtins.all(results): cli.echo(cli_format("Pipfiles upto date.", cli.GREEN), file=file_) if a.project and a.pull_request: errstr = '%s not found. Use %s or the environment variable "%s" to set value.' if not a.git_username: raise ValueError(errstr % ("Git Username", "--git-username", getenvvar("GIT_USERNAME"))) if not a.git_email: raise ValueError( errstr % ("Git Email", "--git-email", getenvvar("GIT_EMAIL"))) for p in project: popen("git config user.name %s" % a.git_username, cwd=p.path) popen("git config user.email %s" % a.git_email, cwd=p.path) _, output, _ = popen("git status -s", output=True, cwd=p.path) if output: branch = get_timestamp_str(format_="%Y%m%d%H%M%S") popen("git checkout -B %s" % branch, quiet=not a.verbose, cwd=p.path) title = "fix(dependencies): Update dependencies to latest" body = "" # TODO: cross-check with "git add" ? files = p.requirements + [p.pipfile] popen("git add %s" % " ".join(files), quiet=not a.verbose, cwd=p.path) popen("git commit -m '%s'" % title, quiet=not a.verbose, cwd=p.path) popen("git push origin %s" % branch, quiet=not a.verbose, cwd=p.path) if not a.github_reponame: raise ValueError( errstr % ("GitHub Reponame", "--github-reponame", getenvvar("GITHUB_REPONAME"))) if not a.github_username: raise ValueError( errstr % ("GitHub Username", "--github-username", getenvvar("GITHUB_USERNAME"))) url = "/".join([ "https://api.github.com", "repos", a.github_username, a.github_reponame, "pulls" ]) headers = dict({ "Content-Type": "application/json", "Authorization": "token %s" % a.github_access_token }) data = dict(head="%s:%s" % (a.git_username, branch), base=a.target_branch, title=title, body=body) # Although there's monkey patch support for the "requests" # library, avoid using the "json" parameter which was # added in requests 2.4.2+ response = req.post(url, data=json.dumps(data), headers=headers) if response.ok: response = response.json() number = response["number"] url = "/".join( map(str, [ "https://github.com", a.github_username, a.github_reponame, "pull", number ])) message = "Created a Pull Request at %s" % url cli.echo(cli_format(message, cli.GREEN), file=file_) else: response.raise_for_status()
import os.path as osp from {{ cookiecutter.slug }}.__attr__ import __name__ as NAME from bpyutils.config import get_config_path from bpyutils.util.system import pardir PATH = dict() PATH["BASE"] = pardir(__file__, 1) PATH["DATA"] = osp.join(PATH["BASE"], "data") PATH["CACHE"] = get_config_path(NAME)
from tqdm import tqdm from bpyutils.config import get_config_path from bpyutils._compat import iterkeys from bpyutils.util.request import proxy_request, proxy_grequest from bpyutils.util.system import make_temp_dir, popen from bpyutils.util.string import safe_decode from bpyutils.util.array import chunkify from bpyutils.util.datetime import get_timestamp_str from bpyutils.util.environ import getenv from bpyutils.exception import PopenError from pipupgrade.__attr__ import __name__ as NAME from bpyutils import log, db PATH_CACHE = get_config_path(NAME) BASE_INDEX_URL = "https://pypi.org/simple" logger = log.get_logger(name = NAME, level = log.DEBUG) connection = db.get_connection(location = PATH_CACHE) def exception_handler(request, exception): logger.warning("Unable to load request: %s", exception) def run(*args, **kwargs): dir_path = PATH_CACHE # seed database... repo = osp.join(dir_path, "pipupgrade") if not osp.exists(repo):