def test_get_timestamp_str(): assert check_datetime_format(get_timestamp_str(), _DEFAULT_TIMESTAMP_FORMAT) format_ = '%Y-%m-%d' assert check_datetime_format(get_timestamp_str(format_), format_) assert not check_datetime_format(get_timestamp_str(), format_) with pytest.raises(ValueError): assert check_datetime_format(get_timestamp_str(), format_, raise_err=True)
def command(pip_path=[], requirements=[], pipfile=[], project=None, pull_request=False, git_username=None, git_email=None, github_access_token=None, github_reponame=None, github_username=None, target_branch="master", latest=False, format="table", all=False, pip=False, self=False, jobs=1, user=False, check=False, interactive=False, yes=False, no_included_requirements=False, no_cache=False, no_color=True, output=None, force=False, verbose=False): if not verbose: logger.setLevel(log.NOTSET) logger.info("Environment: %s" % environment()) cli.echo(cli_format("Checking...", cli.YELLOW)) logger.info("Arguments Passed: %s" % locals()) pip_path = pip_path or [] pip_path = [which(p) for p in pip_path] or _pip._PIP_EXECUTABLES logger.info("`pip` executables found: %s" % pip_path) logger.info("Using %s jobs..." % jobs) registries = [] if pip: logger.info("Updating pip executables: %s" % " ".join(pip_path)) with parallel.no_daemon_pool(processes=jobs) as pool: pool.map( partial(update_pip, **{ "user": user, "quiet": not verbose }), pip_path) if self: package = __name__ logger.info("Updating %s..." % package) cli.echo(cli_format("Updating %s..." % package, cli.YELLOW)) _pip.call("install", package, user=user, quiet=not verbose, no_cache=True, upgrade=True) cli.echo("%s upto date." % cli_format(package, cli.CYAN)) else: if project: project = sequencify(project) requirements = requirements or [] pipfile = pipfile or [] logger.info("Detecting projects and its dependencies...") with parallel.no_daemon_pool(processes=jobs) as pool: project = pool.map( partial(Project.from_path, **{"recursive_search": force}), project) requirements += flatten(map(lambda p: p.requirements, project)) pipfile += flatten( map(lambda p: [p.pipfile] if p.pipfile else [], project)) logger.info("Updating projects %s..." % project) if requirements: logger.info("Detecting requirements...") if not no_included_requirements: with parallel.no_daemon_pool(processes=jobs) as pool: results = pool.map(get_included_requirements, requirements) requirements += flatten(results) logger.info("Requirements found: %s" % requirements) with parallel.no_daemon_pool(processes=jobs) as pool: results = pool.map( partial(get_registry_from_requirements, **{ "sync": no_cache, "jobs": jobs }), requirements) registries += results else: with parallel.no_daemon_pool(processes=jobs) as pool: results = pool.map( partial( get_registry_from_pip, **{ "user": user, "sync": no_cache, "outdated": not all, "build_dependency_tree": format in _DEPENDENCY_FORMATS, "jobs": jobs }), pip_path) registries += results logger.info("Updating registries: %s..." % registries) # TODO: Tweaks within parallel.no_daemon_pool to run serially. if yes: with parallel.no_daemon_pool(processes=jobs) as pool: results = pool.map( partial( update_registry, **{ "yes": yes, "user": user, "check": check, "latest": latest, "interactive": interactive, "verbose": verbose, "format_": format, "all": all }), registries) else: for registry in registries: update_registry(registry, yes=yes, user=user, check=check, latest=latest, interactive=interactive, verbose=verbose, format_=format, all=all) if pipfile: logger.info("Updating Pipfiles: %s..." % pipfile) cli.echo( cli_format("Updating Pipfiles: %s..." % ", ".join(pipfile), cli.YELLOW)) with parallel.no_daemon_pool(processes=jobs) as pool: results = pool.map( partial(update_pipfile, **{"verbose": verbose}), pipfile) if builtins.all(results): cli.echo(cli_format("Pipfiles upto date.", cli.GREEN)) if project and pull_request: errstr = '%s not found. Use %s or the environment variable "%s" to set value.' if not git_username: raise ValueError(errstr % ("Git Username", "--git-username", getenvvar("GIT_USERNAME"))) if not git_email: raise ValueError( errstr % ("Git Email", "--git-email", getenvvar("GIT_EMAIL"))) for p in project: popen("git config user.name %s" % git_username, cwd=p.path) popen("git config user.email %s" % git_email, cwd=p.path) _, output, _ = popen("git status -s", output=True) if output: branch = get_timestamp_str(format_="%Y%m%d%H%M%S") popen("git checkout -B %s" % branch, quiet=not verbose) title = "fix(dependencies): Update dependencies to latest" body = "" # TODO: cross-check with "git add" ? files = p.requirements + [p.pipfile] popen("git add %s" % " ".join(files), quiet=not verbose, cwd=p.path) popen("git commit -m '%s'" % title, quiet=not verbose, cwd=p.path) popen("git push origin %s" % branch, quiet=not verbose, cwd=p.path) if not github_reponame: raise ValueError( errstr % ("GitHub Reponame", "--github-reponame", getenvvar("GITHUB_REPONAME"))) if not github_username: raise ValueError( errstr % ("GitHub Username", "--github-username", getenvvar("GITHUB_USERNAME"))) url = "/".join([ "https://api.github.com", "repos", github_username, github_reponame, "pulls" ]) headers = dict({ "Content-Type": "application/json", "Authorization": "token %s" % github_access_token }) data = dict(head="%s:%s" % (git_username, branch), base=target_branch, title=title, body=body) # Although there's monkey patch support for the "requests" # library, avoid using the "json" parameter which was # added in requests 2.4.2+ response = req.post(url, data=json.dumps(data), headers=headers) if response.ok: response = response.json() number = response["number"] url = "/".join( map(str, [ "https://github.com", github_username, github_reponame, "pull", number ])) message = "Created a Pull Request at %s" % url cli.echo(cli_format(message, cli.GREEN)) else: response.raise_for_status()
def _command(*args, **kwargs): a = to_params(kwargs) if not a.verbose: logger.setLevel(log.NOTSET) logger.info("Environment: %s" % environment()) logger.info("Arguments Passed: %s" % locals()) file_ = a.output if file_: logger.info("Writing to output file %s..." % file_) touch(file_) cli.echo(cli_format("Checking...", cli.YELLOW), file=file_) pip_path = a.pip_path or [] pip_path = [which(p) for p in pip_path] or _pip._PIP_EXECUTABLES logger.info("`pip` executables found: %s" % pip_path) logger.info("Using %s jobs..." % a.jobs) registries = [] if a.pip: logger.info("Updating pip executables: %s" % " ".join(pip_path)) with parallel.no_daemon_pool(processes=a.jobs) as pool: pool.imap_unordered( partial( update_pip, **{ "user": a.user, "quiet": not a.verbose, "file": file_ }), pip_path) if a.self: package = __name__ logger.info("Updating %s..." % package) cli.echo(cli_format("Updating %s..." % package, cli.YELLOW), file=file_) _pip.call("install", package, user=a.user, quiet=not a.verbose, no_cache=True, upgrade=True) cli.echo("%s upto date." % cli_format(package, cli.CYAN), file=file_) else: requirements = sequencify(a.requirements or []) pipfile = sequencify(a.pipfile or []) if a.project: project = sequencify(a.project) logger.info("Detecting projects and its dependencies...") with parallel.no_daemon_pool(processes=a.jobs) as pool: project = pool.imap_unordered( partial(Project.from_path, **{"depth_search": a.force}), project) requirements += flatten(map(lambda p: p.requirements, project)) pipfile += flatten( map(lambda p: [p.pipfile] if p.pipfile else [], project)) logger.info("Updating projects %s..." % project) if requirements: logger.info("Detecting requirements...") if not a.no_included_requirements: with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered(get_included_requirements, requirements) requirements += flatten(results) requirements = list(set(requirements)) logger.info("Requirements found: %s" % requirements) with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered( partial( get_registry_from_requirements, **{ "sync": a.no_cache, "jobs": a.jobs, "only_packages": a.packages, "file": file_, "ignore_packages": a.ignore }), requirements) registries += results else: with parallel.no_daemon_pool(processes=a.jobs) as pool: for registry in pool.imap_unordered( partial( get_registry_from_pip, **{ "user": a.user, "sync": a.no_cache, "outdated": not a.all, "build_dependency_tree": a.format in _DEPENDENCY_FORMATS, "jobs": a.jobs, "only_packages": a.packages, "ignore_packages": a.ignore, }), pip_path): registries.append(registry) logger.info("Updating registries: %s..." % registries) for registry in registries: update_registry(registry, yes=a.yes, user=a.user, check=a.check, latest=a.latest, interactive=a.interactive, verbose=a.verbose, format_=a.format, all=a.all, filter_=a.packages, file=file_, raise_err=not a.ignore_error, upgrade_type=a.upgrade_type) if pipfile: logger.info("Updating Pipfiles: %s..." % pipfile) cli.echo(cli_format( "Updating Pipfiles: %s..." % ", ".join(pipfile), cli.YELLOW), file=file_) with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered( partial(update_pipfile, **{"a.verbose": a.verbose}), pipfile) if builtins.all(results): cli.echo(cli_format("Pipfiles upto date.", cli.GREEN), file=file_) if a.project and a.pull_request: errstr = '%s not found. Use %s or the environment variable "%s" to set value.' if not a.git_username: raise ValueError(errstr % ("Git Username", "--git-username", getenvvar("GIT_USERNAME"))) if not a.git_email: raise ValueError( errstr % ("Git Email", "--git-email", getenvvar("GIT_EMAIL"))) for p in project: popen("git config user.name %s" % a.git_username, cwd=p.path) popen("git config user.email %s" % a.git_email, cwd=p.path) _, output, _ = popen("git status -s", output=True, cwd=p.path) if output: branch = get_timestamp_str(format_="%Y%m%d%H%M%S") popen("git checkout -B %s" % branch, quiet=not a.verbose, cwd=p.path) title = "fix(dependencies): Update dependencies to latest" body = "" # TODO: cross-check with "git add" ? files = p.requirements + [p.pipfile] popen("git add %s" % " ".join(files), quiet=not a.verbose, cwd=p.path) popen("git commit -m '%s'" % title, quiet=not a.verbose, cwd=p.path) popen("git push origin %s" % branch, quiet=not a.verbose, cwd=p.path) if not a.github_reponame: raise ValueError( errstr % ("GitHub Reponame", "--github-reponame", getenvvar("GITHUB_REPONAME"))) if not a.github_username: raise ValueError( errstr % ("GitHub Username", "--github-username", getenvvar("GITHUB_USERNAME"))) url = "/".join([ "https://api.github.com", "repos", a.github_username, a.github_reponame, "pulls" ]) headers = dict({ "Content-Type": "application/json", "Authorization": "token %s" % a.github_access_token }) data = dict(head="%s:%s" % (a.git_username, branch), base=a.target_branch, title=title, body=body) # Although there's monkey patch support for the "requests" # library, avoid using the "json" parameter which was # added in requests 2.4.2+ response = req.post(url, data=json.dumps(data), headers=headers) if response.ok: response = response.json() number = response["number"] url = "/".join( map(str, [ "https://github.com", a.github_username, a.github_reponame, "pull", number ])) message = "Created a Pull Request at %s" % url cli.echo(cli_format(message, cli.GREEN), file=file_) else: response.raise_for_status()
def command(pip_path=[], requirements=[], pipfile=[], project=None, pull_request=False, git_username=None, git_email=None, github_access_token=None, github_reponame=None, github_username=None, target_branch="master", latest=False, self=False, user=False, check=False, interactive=False, yes=False, no_color=True, verbose=False): cli.echo(cli_format("Checking...", cli.YELLOW)) pip_path = pip_path or [] pip_path = [which(p) for p in pip_path] or _pip._PIP_EXECUTABLES registries = [] if self: package = __name__ _pip.call("install", package, user=user, quiet=not verbose, no_cache=True, upgrade=True) cli.echo("%s upto date." % cli_format(package, cli.CYAN)) else: if project: requirements = requirements or [] pipfile = pipfile or [] for i, p in enumerate(project): project[i] = Project(osp.abspath(p)) requirements += project[i].requirements pipfile += [project[i].pipfile] if requirements: for requirement in requirements: path = osp.realpath(requirement) if not osp.exists(path): cli.echo(cli_format("{} not found.".format(path), cli.RED)) sys.exit(os.EX_NOINPUT) else: requirements += _get_included_requirements(requirement) for requirement in requirements: path = osp.realpath(requirement) if not osp.exists(path): cli.echo(cli_format("{} not found.".format(path), cli.RED)) sys.exit(os.EX_NOINPUT) else: packages = _pip.parse_requirements(requirement, session="hack") registry = Registry(source=path, packages=packages) registries.append(registry) else: for pip_ in pip_path: _, output, _ = _pip.call("list", outdated = True, \ format = "json", pip_exec = pip_) packages = json.loads(output) registry = Registry(source=pip_, packages=packages, installed=True) registries.append(registry) # _pip.get_installed_distributions() # https://github.com/achillesrasquinha/pipupgrade/issues/13 for registry in registries: source = registry.source packages = registry.packages table = Table(header=[ "Name", "Current Version", "Latest Version", "Home Page" ]) dinfo = [] # Information DataFrame for package in packages: package = Package(package) package.source = source package.installed = registry.installed if package.latest_version and package.current_version != package.latest_version: diff_type = None try: diff_type = semver.difference(package.current_version, package.latest_version) except (TypeError, ValueError): pass table.insert([ cli_format(package.name, _SEMVER_COLOR_MAP.get(diff_type, cli.CLEAR)), package.current_version or "na", _cli_format_semver(package.latest_version, diff_type), cli_format(package.home_page, cli.CYAN) ]) package.diff_type = diff_type dinfo.append(package) if not registry.installed: _update_requirements(package.source, package) stitle = "Installed Distributions (%s)" % source if registry.installed else source if not table.empty: string = table.render() cli.echo("\nSource: %s\n" % stitle) if not interactive: cli.echo(string) cli.echo() if not check: packages = [ p for p in dinfo if p.diff_type != "major" or latest ] npackages = len(packages) spackages = pluralize("package", npackages) # Packages "string" query = "Do you wish to update %s %s?" % (npackages, spackages) if npackages and (yes or interactive or cli.confirm(query, quit_=True)): for i, package in enumerate(packages): update = True query = "%s (%s > %s)" % ( cli_format( package.name, _SEMVER_COLOR_MAP.get( package.diff_type, cli.CLEAR)), package.current_version, _cli_format_semver(package.latest_version, package.diff_type)) if interactive: update = yes or cli.confirm(query) if update: cli.echo( cli_format( "Updating %s of %s %s: %s" % (i + 1, npackages, spackages, cli_format(package.name, cli.GREEN)), cli.BOLD)) _pip.call("install", package.name, pip_exec=package.source, user=user, quiet=not verbose, no_cache_dir=True, upgrade=True) if not package.installed: _update_requirements( package.source, package) else: cli.echo("%s upto date." % cli_format(stitle, cli.CYAN)) if pipfile: for pipf in pipfile: realpath = osp.realpath(pipf) basepath = osp.dirname(realpath) pipenv = which("pipenv", raise_err=True) popen("%s update" % pipenv, quiet=not verbose, cwd=basepath) cli.echo("%s upto date." % cli_format(realpath, cli.CYAN)) if project and pull_request: errstr = '%s not found. Use %s or the environment variable "%s" to set value.' if not git_username: raise ValueError(errstr % ("Git Username", "--git-username", getenvvar("GIT_USERNAME"))) if not git_email: raise ValueError( errstr % ("Git Email", "--git-email", getenvvar("GIT_EMAIL"))) for p in project: popen("git config user.name %s" % git_username, cwd=p.path) popen("git config user.email %s" % git_email, cwd=p.path) _, output, _ = popen("git status -s", output=True) if output: branch = get_timestamp_str(format_="%Y%m%d%H%M%S") popen("git checkout -B %s" % branch, quiet=not verbose) title = "fix(dependencies): Update dependencies to latest" body = "" # TODO: cross-check with "git add" ? files = p.requirements + [p.pipfile] popen("git add %s" % " ".join(files), quiet=not verbose, cwd=p.path) popen("git commit -m '%s'" % title, quiet=not verbose, cwd=p.path) popen("git push origin %s" % branch, quiet=not verbose, cwd=p.path) if not github_reponame: raise ValueError( errstr % ("GitHub Reponame", "--github-reponame", getenvvar("GITHUB_REPONAME"))) if not github_username: raise ValueError( errstr % ("GitHub Username", "--github-username", getenvvar("GITHUB_USERNAME"))) url = "/".join([ "https://api.github.com", "repos", github_username, github_reponame, "pulls" ]) headers = dict({ "Content-Type": "application/json", "Authorization": "token %s" % github_access_token }) data = dict(head="%s:%s" % (git_username, branch), base=target_branch, title=title, body=body) # Although there's monkey patch support for the "requests" # library, avoid using the "json" parameter which was # added in requests 2.4.2+ response = req.post(url, data=json.dumps(data), headers=headers) if response.ok: response = response.json() number = response["number"] url = "/".join( map(str, [ "https://github.com", github_username, github_reponame, "pull", number ])) message = "Created a Pull Request at %s" % url cli.echo(cli_format(message, cli.GREEN)) else: response.raise_for_status()
def run(*args, **kwargs): dir_path = PATH["CACHE"] # seed database... repo = osp.join(dir_path, "pipupgrade") if not osp.exists(repo): github_username = getenv("JOBS_GITHUB_USERNAME", raise_err=True) github_oauth_token = getenv("JOBS_GITHUB_OAUTH_TOKEN", raise_err=True) popen( "git clone https://%s:%[email protected]/achillesrasquinha/pipupgrade %s" % (github_username, github_oauth_token, repo), cwd=dir_path) popen("git config user.email '*****@*****.**'", cwd=repo) popen("git config user.name 'pipupgrade bot'", cwd=repo) else: try: popen("git pull origin master", cwd=repo) except PopenError: logger.warn("Unable to pull latest branch") deptree = Dict() path_deptree = osp.join(repo, "data", "dependencies.json.gz") if osp.exists(path_deptree): with gzip.open(path_deptree) as f: content = f.read() deptree = Dict(json.loads(content)) with make_temp_dir() as dir_path: chunk_size = kwargs.get("chunk_size", 1000) index_url = kwargs.get("index_url", BASE_INDEX_URL) logger.info("Fetching Package List...") res = proxy_request("GET", index_url, stream=True) res.raise_for_status() html = "" for content in res.iter_content(chunk_size=1024): html += safe_decode(content) soup = BeautifulSoup(html, 'html.parser') packages = list( filter(lambda x: x not in deptree, map(lambda x: x.text, soup.findAll('a')))) logger.info("%s packages found." % len(packages)) package_chunks = list(chunkify(packages, chunk_size)) for package_chunk in tqdm(package_chunks): requestsmap = (proxy_grequest( "GET", "https://pypi.org/pypi/%s/json" % package) for package in package_chunk) responses = grequests.map(requestsmap, exception_handler=exception_handler) for response in responses: if response.ok: data = response.json() package = data["info"]["name"] releases = list( filter(lambda x: x not in deptree[package], iterkeys(data["releases"]))) release_chunks = chunkify(releases, 100) for release_chunk in release_chunks: requestsmap = (proxy_grequest( "GET", "https://pypi.org/pypi/%s/%s/json" % (package, release)) for release in release_chunk) responses = grequests.map( requestsmap, exception_handler=exception_handler) for response in responses: if response.ok: data = response.json() version = data["info"]["version"] requires = data["info"]["requires_dist"] deptree[package][version] = requires # query = """ # INSERT OR IGNORE INTO `tabPackageDependency` # (name, version, requires) # VALUES # (?, ?, ?) # """ # values = ( # package, # version, # ",".join(requires) if requires else "NULL" # ) # connection.query(query, values) else: logger.info("Unable to load URL: %s" % response.url) else: logger.info("Unable to load URL: %s" % response.url) with gzip.open(path_deptree, mode="wt") as f: content = json.dumps(deptree) f.write(content) popen("git add %s" % path_deptree, cwd=repo) popen( "git commit --allow-empty -m '[skip ci]: Update database - %s'" % get_timestamp_str(), cwd=repo) popen("git push origin master", cwd=repo)