def update_pipfile(pipfile, verbose=False): if not verbose: logger.setLevel(log.NOTSET) realpath = osp.realpath(pipfile) basepath = osp.dirname(realpath) logger.info("Searching for `pipenv`...") pipenv = which("pipenv") if not pipenv: logger.info("Attempting to install pipenv...") _pip.call("install", "pipenv") pipenv = which("pipenv", raise_err=True) logger.info("`pipenv` found.") code = popen("%s update" % pipenv, quiet=not verbose, cwd=basepath) return code == 0
def call(*args, **kwargs): pip_exec = kwargs.pop("pip_exec", None) or _PIP_EXECUTABLE quiet = kwargs.pop("quiet", None) or False output = kwargs.pop("output", None) or False raise_err = kwargs.pop("raise_err", None) or True params = sequencify(pip_exec) + sequencify(args) for flag, value in iteritems(kwargs): if value != False: flag = "--%s" % kebab_case(flag, delimiter="_") params.append(flag) if not isinstance(value, bool): value = value_to_envval(value) params.append(value) output = output or quiet output = popen(*params, output=output, raise_err=raise_err) return output
def run(*args, **kwargs): logger.info("Fetching Proxies...") fetch_proxies(fname = "proxies-all") loop = asyncio.get_event_loop() # proxies = asyncio.Queue() # broker = Broker(proxies) # # broker._resolver = CustomResolver(loop = loop) # tasks = asyncio.gather( # broker.find(types = ["HTTP", "HTTPS"], limit = 100), # save_proxies(proxies) # ) # loop.run_until_complete(tasks) logger.info("Commiting Latest Proxy List...") with make_temp_dir() as dir_path: repo = osp.join(dir_path, "proxy-list") github_username = getenv("JOBS_GITHUB_USERNAME", raise_err = True) github_oauth_token = getenv("JOBS_GITHUB_OAUTH_TOKEN", raise_err = True) popen("git clone https://%s:%[email protected]/achillesrasquinha/proxy-list.git %s" % (github_username, github_oauth_token, repo), cwd = dir_path) popen("git config user.email '*****@*****.**'", cwd = repo) popen("git config user.name 'pipupgrade bot'", cwd = repo) _write_proxies(repo, "proxies-all") logger.info("Checking Proxies...") check_proxies() _write_proxies(repo)
def _write_proxies(repo, fname = "proxies"): proxies_path = osp.join(repo, "%s.csv" % fname) with open(proxies_path, "w") as f: f.write(PROXY_COLUMNS) f.write("\n") for row in connection.query("SELECT %s FROM `tabProxies`" % PROXY_COLUMNS): values = itervalues(row) data = ",".join(map(str, values)) f.write(data) f.write("\n") write(proxies_path, strip(read(proxies_path))) popen("git add %s" % proxies_path, cwd = repo) commit_message = "Update Proxy List: %s" % get_timestamp_str() popen("git commit --allow-empty -m '%s'" % commit_message, cwd = repo) popen("git push origin master", cwd = repo) logger.info("Proxy List upto date.")
def _command(*args, **kwargs): check_update_available() a = to_params(kwargs) if not a.verbose: logger.setLevel(log.NOTSET) logger.info("Environment: %s" % environment()) logger.info("Arguments Passed: %s" % locals()) if a.doctor: logger.info("Performing Diagnostics and attempting to fix.") if a.clean: path_config = get_config_path(name=NAME) paths = [osp.join(path_config, "db.db")] for path in paths: remove(path) # check database and repair. else: if a.resolve: import_or_raise("mixology") import_or_raise("semver", name="poetry-semver") populate_db = import_handler("pipupgrade.pubgrub.populate_db") populate_db() file_ = a.output if file_: logger.info("Writing to output file %s..." % file_) touch(file_) cli.echo(cli_format("Checking...", cli.YELLOW), file=file_) pip_path = a.pip_path or [] pip_path = [which(p) for p in pip_path] or _pip._PIP_EXECUTABLES logger.info("`pip` executables found: %s" % pip_path) logger.info("Using %s jobs..." % a.jobs) registries = [] if a.pip: logger.info("Updating pip executables: %s" % " ".join(pip_path)) with parallel.no_daemon_pool(processes=a.jobs) as pool: pool.imap_unordered( partial( update_pip, **{ "user": a.user, "quiet": not a.verbose, "file": file_ }), pip_path) if a.self: package = NAME logger.info("Updating %s..." % package) cli.echo(cli_format("Updating %s..." % package, cli.YELLOW), file=file_) _pip.call("install", package, user=a.user, quiet=not a.verbose, no_cache=True, upgrade=True, output=file_) cli.echo("%s upto date." % cli_format(package, cli.CYAN), file=file_) else: requirements = sequencify(a.requirements or []) pipfile = sequencify(a.pipfile or []) if a.project: project = sequencify(a.project) logger.info("Detecting projects and its dependencies...") with parallel.no_daemon_pool(processes=a.jobs) as pool: project = pool.imap_unordered( partial(Project.from_path, **{"depth_search": a.force}), project) requirements += flatten( map(lambda p: p.requirements, project)) pipfile += flatten( map(lambda p: [p.pipfile] if p.pipfile else [], project)) logger.info("Updating projects %s..." % project) if requirements: logger.info("Detecting requirements...") if not a.no_included_requirements: with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered( get_included_requirements, requirements) requirements += flatten(results) requirements = list(set(requirements)) logger.info("Requirements found: %s" % requirements) with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered( partial( get_registry_from_requirements, **{ "sync": a.no_cache, "jobs": a.jobs, "only_packages": a.packages, "file": file_, "ignore_packages": a.ignore, "resolve": a.resolve, "latest": a.latest }), requirements) registries += results else: with parallel.no_daemon_pool(processes=a.jobs) as pool: for registry in pool.imap_unordered( partial( get_registry_from_pip, **{ "user": a.user, "sync": a.no_cache, "outdated": not a.all, "build_dependency_tree": a.format in _DEPENDENCY_FORMATS, "jobs": a.jobs, "only_packages": a.packages, "ignore_packages": a.ignore, "resolve": a.resolve, "latest": a.latest }), pip_path): registries.append(registry) logger.info("Updating registries: %s..." % registries) for registry in registries: update_registry(registry, yes=a.yes, user=a.user, check=a.check, latest=a.latest, interactive=a.interactive, verbose=a.verbose, format_=a.format, all=a.all, filter_=a.packages, file=file_, raise_err=not a.ignore_error, upgrade_type=a.upgrade_type) if pipfile: logger.info("Updating Pipfiles: %s..." % pipfile) cli.echo(cli_format( "Updating Pipfiles: %s..." % ", ".join(pipfile), cli.YELLOW), file=file_) with parallel.no_daemon_pool(processes=a.jobs) as pool: results = pool.imap_unordered( partial(update_pipfile, **{"verbose": a.verbose}), pipfile) if builtins.all(results): cli.echo(cli_format("Pipfiles upto date.", cli.GREEN), file=file_) if a.project and a.pull_request: errstr = '%s not found. Use %s or the environment variable "%s" to set value.' if not a.git_username: raise ValueError(errstr % ("Git Username", "--git-username", getenvvar("GIT_USERNAME"))) if not a.git_email: raise ValueError( errstr % ("Git Email", "--git-email", getenvvar("GIT_EMAIL"))) for p in project: popen("git config user.name %s" % a.git_username, cwd=p.path) popen("git config user.email %s" % a.git_email, cwd=p.path) _, output, _ = popen("git status -s", output=True, cwd=p.path) if output: branch = get_timestamp_str(format_="%Y%m%d%H%M%S") popen("git checkout -B %s" % branch, quiet=not a.verbose, cwd=p.path) title = "fix(dependencies): Update dependencies to latest" body = "" # TODO: cross-check with "git add" ? files = p.requirements + [p.pipfile] popen("git add %s" % " ".join(files), quiet=not a.verbose, cwd=p.path) popen("git commit -m '%s'" % title, quiet=not a.verbose, cwd=p.path) popen("git push origin %s" % branch, quiet=not a.verbose, cwd=p.path) if not a.github_reponame: raise ValueError( errstr % ("GitHub Reponame", "--github-reponame", getenvvar("GITHUB_REPONAME"))) if not a.github_username: raise ValueError( errstr % ("GitHub Username", "--github-username", getenvvar("GITHUB_USERNAME"))) url = "/".join([ "https://api.github.com", "repos", a.github_username, a.github_reponame, "pulls" ]) headers = dict({ "Content-Type": "application/json", "Authorization": "token %s" % a.github_access_token }) data = dict(head="%s:%s" % (a.git_username, branch), base=a.target_branch, title=title, body=body) # Although there's monkey patch support for the "requests" # library, avoid using the "json" parameter which was # added in requests 2.4.2+ response = req.post(url, data=json.dumps(data), headers=headers) if response.ok: response = response.json() number = response["number"] url = "/".join( map(str, [ "https://github.com", a.github_username, a.github_reponame, "pull", number ])) message = "Created a Pull Request at %s" % url cli.echo(cli_format(message, cli.GREEN), file=file_) else: response.raise_for_status()
def run(*args, **kwargs): dir_path = PATH_CACHE # seed database... repo = osp.join(dir_path, "pipupgrade") if not osp.exists(repo): github_username = getenv("JOBS_GITHUB_USERNAME", prefix = NAME.upper(), raise_err = True) github_oauth_token = getenv("JOBS_GITHUB_OAUTH_TOKEN", prefix = NAME.upper(), raise_err = True) popen("git clone https://%s:%[email protected]/achillesrasquinha/pipupgrade %s" % (github_username, github_oauth_token, repo), cwd = dir_path) popen("git config user.email '*****@*****.**'", cwd = repo) popen("git config user.name 'pipupgrade bot'", cwd = repo) else: try: popen("git pull origin master", cwd = repo) except PopenError: logger.warning("Unable to pull latest branch") deptree = Dict() path_deptree = osp.join(repo, "data", "dependencies.json.gz") if osp.exists(path_deptree): with gzip.open(path_deptree) as f: content = f.read() deptree = Dict(json.loads(content)) with make_temp_dir() as dir_path: chunk_size = kwargs.get("chunk_size", 1000) index_url = kwargs.get("index_url", BASE_INDEX_URL) logger.info("Fetching Package List...") res = proxy_request("GET", index_url, stream = True) res.raise_for_status() html = "" for content in res.iter_content(chunk_size = 1024): html += safe_decode(content) soup = BeautifulSoup(html, 'html.parser') packages = list(filter(lambda x: x not in deptree, map(lambda x: x.text, soup.findAll('a')))) logger.info("%s packages found." % len(packages)) package_chunks = list(chunkify(packages, chunk_size)) for package_chunk in tqdm(package_chunks): requestsmap = ( proxy_grequest("GET", "https://pypi.org/pypi/%s/json" % package) for package in package_chunk ) responses = grequests.map(requestsmap, exception_handler = exception_handler) for response in responses: if response.ok: data = response.json() package = data["info"]["name"] releases = list(filter(lambda x: x not in deptree[package], iterkeys(data["releases"]))) release_chunks = chunkify(releases, 100) for release_chunk in release_chunks: requestsmap = ( proxy_grequest("GET", "https://pypi.org/pypi/%s/%s/json" % (package, release)) for release in release_chunk ) responses = grequests.map(requestsmap, exception_handler = exception_handler) for response in responses: if response.ok: data = response.json() version = data["info"]["version"] requires = data["info"]["requires_dist"] deptree[package][version] = requires # query = """ # INSERT OR IGNORE INTO `tabPackageDependency` # (name, version, requires) # VALUES # (?, ?, ?) # """ # values = ( # package, # version, # ",".join(requires) if requires else "NULL" # ) # connection.query(query, values) else: logger.info("Unable to load URL: %s" % response.url) else: try: response.raise_for_status() except Exception as e: logger.info("response error: %s" % e) with gzip.open(path_deptree, mode = "wt") as f: content = json.dumps(deptree) f.write(content) popen("git add %s" % path_deptree, cwd = repo) popen("git commit --allow-empty -m '[skip ci]: Update database - %s'" % get_timestamp_str(), cwd = repo) popen("git push origin master", cwd = repo)