def pip_upgrade_all(line): """Attempt to upgrade all packages""" from pip import get_installed_distributions user = set(d.project_name for d in get_installed_distributions(user_only=True)) all = set(d.project_name for d in get_installed_distributions()) for dist in all - user: do_pip(["install", "--upgrade", dist]) for dist in user: do_pip(["install", "--upgrade", "--user", dist])
def pipautoup(): processed = 0 total = len(pip.get_installed_distributions()) for dist in pip.get_installed_distributions(): try: call("pip install --upgrade " + dist.project_name, shell=True) print 'The package %s is upgrading....' % dist.project_name processed += 1 except: print 'Some error happend, debug and check for %s install...' % dist.project_name print "=======================================DONE!=================================" print "All together, there are %d packages." % total print "Upgraded %d."% processed return 0
def create_dependencies_tree_by_req_file_path(requirements_file_path, allow_missing=False): # get all installed packages pkgs = pip.get_installed_distributions(local_only=False, skip=[]) # get all installed packages's distribution object dist_index = utils.build_dist_index(pkgs) # get all installed distributions tree dist_tree = utils.construct_tree(dist_index) # open the requirements.txt file and create dependencies tree out of it with open(requirements_file_path, 'r') as requirements_file: required = get_requirements_list(requirements_file) installed = [p for p in dist_index] packages = [] for r in required: if r.lower() not in installed: msg = 'Required package missing: ' + r.lower() if allow_missing: sys.stderr.write(msg + "\n") else: sys.exit(msg) else: packages.append(r); package_tree = create_tree_of_packages_dependencies( dist_tree, packages, requirements_file_path, allow_missing) print(json.dumps(package_tree))
def _install_or_update(component_name, version, link, private, upgrade=False): if not component_name: raise IncorrectUsageError('Specify a component name.') found = bool([dist for dist in pip.get_installed_distributions(local_only=True) if dist.key == COMPONENT_PREFIX + component_name]) if found and not upgrade: raise CLIError("Component already installed.") else: version_no = '==' + version if version else '' options = ['--quiet', '--isolated', '--disable-pip-version-check'] if upgrade: options.append('--upgrade') pkg_index_options = [] if link: pkg_index_options += ['--find-links', link] if private: if not PRIVATE_PYPI_URL: raise CLIError('{} environment variable not set.' .format(PRIVATE_PYPI_URL_ENV_NAME)) if not PRIVATE_PYPI_HOST: raise CLIError('{} environment variable not set.' .format(PRIVATE_PYPI_HOST_ENV_NAME)) pkg_index_options += ['--extra-index-url', PRIVATE_PYPI_URL, '--trusted-host', PRIVATE_PYPI_HOST] pip.main(['install'] + options + [COMPONENT_PREFIX + component_name+version_no] + pkg_index_options)
def verify_dependencies(root_dir): # Get all the installed libraries # installed_libraries = {"tornado": "version"} installed_libraries = dict( (i.project_name, i.version) for i in pip.get_installed_distributions()) # Get all the libraries required by owtf # owtf_libraries = ["tornado", "lxml"...] owtf_reqs = pip.req.parse_requirements( os.path.join(root_dir, "install", "owtf.pip")) owtf_libraries = [req.req.project_name for req in owtf_reqs] # Iterate over requirements and check if existed missing_libraries = [] for library_name in owtf_libraries: if library_name not in installed_libraries.keys(): missing_libraries.append(library_name) # If there are missing libraries bail out :P if len(missing_libraries) > 0: print("The following python libraries seem missing : ") print(" %s\n" % (','.join(missing_libraries))) print("Haven't you run the install script? ") print(" %s\n" % ("python2 install/install.py")) print("If you are sure you ran the install script, " "install the missing libraries seperately") print(" %s\n" % ("pip install --upgrade -r install/owtf.pip")) exit(1)
def print_bug_report(message=''): """ Prints a usable bug report """ separator = '\n' + ('-' * 69) + '\n' python_version = str(sys.version_info[:3]) arguments = '\n'.join(arg for arg in sys.argv[1:]) try: import pip except ImportError: packages = '`pip` not installed !' else: packages = '\n'.join( '{0} - {1}'.format(package.key, package.version) for package in pip.get_installed_distributions() ) print( '```{0}Bug Report :\n' '`pockyt` has encountered an error ! ' 'Please submit this bug report at \n` {1} `.{0}' 'Python Version : {2}{0}' 'Installed Packages :\n{3}{0}' 'Runtime Arguments :\n{4}{0}' 'Error Message :\n{5}{0}```'.format( separator, API.ISSUE_URL, python_version, packages, arguments, message or traceback.format_exc().strip() ) )
def help_environment(): cmddir = os.path.dirname(os.path.abspath(sys.executable))+os.sep info = Options() # info.python = Options() info.python.version = '%d.%d.%d' % sys.version_info[:3] info.python.executable = sys.executable info.python.platform = sys.platform try: packages = [] import pip for package in pip.get_installed_distributions(): packages.append( Options(name=package.project_name, version=package.version) ) info.python.packages = packages except: pass # info.environment = Options() path = os.environ.get('PATH', None) if not path is None: info.environment['shell path'] = path.split(os.pathsep) info.environment['python path'] = sys.path # print('#') print('# Information About the Python and Shell Environment') print('#') print(str(info))
def getpacks(): if args.skip: return pl = [str(i).split(" ")[0] for i in pip.get_installed_distributions()] installed = False for pack in PACKAGES: if pack in pl: continue installed = True confirm = ("y" if args.all else "") while confirm not in ["y", "n"]: confirm = cinput("Install dependency "+pack+"? (y/n) ").lower().strip().rstrip() if confirm == "n": cprint(bcolors.YELLOW + "WARNING: Program may not run without this library.") continue if pip.main(["install", pack]) and os.name != "nt": confirm = ("y" if args.all else "") while confirm not in ["y", "n"]: confirm = cinput(bcolors.RED + "Install failed, try again with elevated permissions? (y/n) ").lower().strip().rstrip() if confirm == "n": cprint(bcolors.YELLOW + "WARNING: Program may not run without this library.") continue if not os.system("sudo pip3 install "+pack): pl.append(pack) else: pl.append(pack) if installed: for pack in PACKAGES: if pack not in pl: cprint(bcolors.DARKRED + "Failed to install dependency "+pack+".") installed = False if installed: cprint("Sucessfully installed all dependencies!")
def show_version_info_exit(out_file): from pip import get_installed_distributions installed_dists = get_installed_distributions(local_only=True) cli_info = None for dist in installed_dists: if dist.key == CLI_PACKAGE_NAME: cli_info = {'name': dist.key, 'version': dist.version} break if cli_info: print('{} ({})'.format(cli_info['name'], cli_info['version']), file=out_file) component_version_info = sorted([{'name': dist.key.replace(COMPONENT_PREFIX, ''), 'version': dist.version} for dist in installed_dists if dist.key.startswith(COMPONENT_PREFIX)], key=lambda x: x['name']) print(file=out_file) print('\n'.join(['{} ({})'.format(c['name'], c['version']) for c in component_version_info]), file=out_file) print(file=out_file) print('Python ({}) {}'.format(platform.system(), sys.version), file=out_file) sys.exit(0)
def cli(dry_run, force, src_files): if not src_files: if os.path.exists(DEFAULT_REQUIREMENTS_FILE): src_files = (DEFAULT_REQUIREMENTS_FILE,) else: msg = 'No requirement files given and no {} found in the current directory' log.error(msg.format(DEFAULT_REQUIREMENTS_FILE)) sys.exit(2) if any(src_file.endswith('.in') for src_file in src_files): msg = ('Some input files have the .in extension, which is most likely an error and can ' 'cause weird behaviour. You probably meant to use the corresponding *.txt file?') if force: log.warning('WARNING: ' + msg) else: log.error('ERROR: ' + msg) sys.exit(2) requirements = flat_map(lambda src: pip.req.parse_requirements(src, session=True), src_files) try: requirements = sync.merge(requirements, ignore_conflicts=force) except PipToolsError as e: log.error(str(e)) sys.exit(2) installed_dists = pip.get_installed_distributions() to_install, to_uninstall = sync.diff(requirements, installed_dists) sync.sync(to_install, to_uninstall, verbose=True, dry_run=dry_run)
def get_package_information(): """ Read all current installed packages """ packages = {} for dist in pip.get_installed_distributions(): packages[dist.project_name] = dist.version return packages
def setUp(self): """ Download 'nvme-cli'. """ self.device = self.params.get('device', default='/dev/nvme0') self.disk = self.params.get('disk', default='/dev/nvme0n1') cmd = 'ls %s' % self.device if process.system(cmd, ignore_status=True) is not 0: self.skip("%s does not exist" % self.device) smm = SoftwareManager() if not smm.check_installed("nvme-cli") and not \ smm.install("nvme-cli"): self.skip('nvme-cli is needed for the test to be run') python_packages = pip.get_installed_distributions() python_packages_list = [i.key for i in python_packages] python_pkgs = ['nose', 'nose2', 'pep8', 'flake8', 'pylint', 'epydoc'] for py_pkg in python_pkgs: if py_pkg not in python_packages_list: self.skip("python package %s not installed" % py_pkg) url = 'https://codeload.github.com/linux-nvme/nvme-cli/zip/master' tarball = self.fetch_asset("nvme-cli-master.zip", locations=[url], expire='7d') archive.extract(tarball, self.teststmpdir) self.nvme_dir = os.path.join(self.teststmpdir, "nvme-cli-master") print os.listdir(self.nvme_dir) os.chdir(os.path.join(self.nvme_dir, 'tests')) msg = ['{'] msg.append(' \"controller\": \"%s\",' % self.device) msg.append(' \"ns1\": \"%s\",' % self.disk) msg.append(' \"log_dir\": \"%s\"' % self.outputdir) msg.append('}') with open('config.json', 'w') as config_file: config_file.write("\n".join(msg)) process.system("cat config.json")
def get_packages(): if not pip: return [] installed_packages = pip.get_installed_distributions() packages = [{ "name": i.key, "version": i.version} for i in installed_packages] installed_packages_list = sorted(packages, key=lambda x: x['name']) return installed_packages_list
def get_os_distributions(): """ Retrieves list of installed packages (pip) """ installed_dist = pip.get_installed_distributions() flat_installed_dist = [dist.project_name for dist in installed_dist] return flat_installed_dist
def checkDependencies(): """Dependency resolver based on a previously specified CONST_REQUIREMENTS_FILE. Currently checks a list of dependencies from a file and asks for user confirmation on whether to install it with a specific version or not. """ if not args.ignore_deps: modules = [] f = open(CONST_REQUIREMENTS_FILE) for line in f: if not line.find('#'): break else: modules.append([line[:line.index('=')], (line[line.index('=')+2:]).strip()]) f.close() pip_dist = [dist.project_name.lower() for dist in pip.get_installed_distributions()] for module in modules: if module[0].lower() not in pip_dist: try: __import__(module[0]) except ImportError: if query_user_bool("Missing module %s." " Do you wish to install it?" % module[0]): pip.main(['install', "%s==%s" % (module[0], module[1]), '--user']) else: return False return True
def exception_message(): """Creates a message describing an unhandled exception.""" msg = ( "Oops! Cuckoo failed in an unhandled exception!\nSometimes bugs are " "already fixed in the development release, it is therefore " "recommended to retry with the latest development release available " "%s\nIf the error persists please open a new issue at %s\n\n" % \ (GITHUB_URL, ISSUES_PAGE_URL) ) msg += "=== Exception details ===\n" msg += "Cuckoo version: %s\n" % CUCKOO_VERSION msg += "OS version: %s\n" % os.name msg += "Python version: %s\n" % sys.version.split()[0] git_version = os.path.join(CUCKOO_ROOT, ".git", "refs", "heads", "master") if os.path.exists(git_version): try: msg += "Git version: %s\n" % open(git_version, "rb").read().strip() except: pass try: import pip msg += "Modules: %s\n" % " ".join(sorted( "%s:%s" % (package.key, package.version) for package in pip.get_installed_distributions() )) except ImportError: pass msg += "\n" return msg
def get_all_available_modules(self): file_list = [] sc_path = "" installed_packages = pip.get_installed_distributions() for next_pack in installed_packages: if next_pack.key == 'swarmcommander': sc_path = next_pack.location break; if sc_path == "": print("Couldn't find where pip installed swarmcommander.") return file_list zf = zipfile.ZipFile(sc_path, 'r') for next_file in zf.namelist(): if next_file.startswith("SwarmCommander/modules/sc_") and not next_file.endswith(".pyc"): next_module = next_file.replace("SwarmCommander/modules/sc_","") next_module = next_module.replace(".py","") if next_module.rfind("/") != -1: next_module = next_module[0:next_module.rfind("/")] if next_module not in file_list: file_list.append(next_module) return file_list
def list_components(): """ List the installed components """ _verify_not_dev() import pip return sorted([{'name': dist.key.replace(COMPONENT_PREFIX, ''), 'version': dist.version} for dist in pip.get_installed_distributions(local_only=True) if dist.key.startswith(COMPONENT_PREFIX)], key=lambda x: x['name'])
def list_installed_modules(): """Appends installed_modules with installed distributions.""" # Get installed modules. inst = pip.get_installed_distributions() for i in inst: installed_modules.append((i.key, i.version)) return sorted(installed_modules)
def Check(self): try: with warnings.catch_warnings(): warnings.simplefilter("ignore") module = __import__(self.module) except ImportError: raise MissingDependency if self.attr and hasattr(module, self.attr): version = getattr(module, self.attr) elif hasattr(module, "__version__"): version = module.__version__ elif hasattr(module, "VERSION"): version = module.VERSION elif hasattr(module, "version"): version = module.version else: result = [ p.version for p in pip.get_installed_distributions() if str(p).startswith(self.name + " ") ] if result: version = result[0] else: raise Exception("Can't get version information") if not isinstance(version, basestring): version = ".".join(str(x) for x in version) if CompareVersion(version, self.version) < 0: raise WrongVersion
def check_pypi(self): """ If the requirement is frozen to pypi, check for a new version. """ for dist in pip.get_installed_distributions(): name = dist.project_name if name in self.reqs.keys(): self.reqs[name]["dist"] = dist pypi = xmlrpclib.ServerProxy("http://pypi.python.org/pypi") for name, req in self.reqs.items(): if req["url"]: continue # skipping github packages. elif "dist" in req.keys(): dist = req["dist"] available = pypi.package_releases(req["pip_req"].url_name) if not available: msg = "release is not on pypi (check capitalization and/or --extra-index-url)" elif available[0] != dist.version: msg = "{0} available".format(available[0]) else: msg = "up to date" del self.reqs[name] continue pkg_info = "{dist.project_name} {dist.version}".format(dist=dist) else: msg = "not installed" pkg_info = name print("{pkg_info:40} {msg}".format(pkg_info=pkg_info, msg=msg)) del self.reqs[name]
def check_previous(): installed_packages = ([package.project_name for package in pip.get_installed_distributions()]) if 'airflow' in installed_packages: print("An earlier non-apache version of Airflow was installed, " "please uninstall it first. Then reinstall.") sys.exit(1)
def main(): cache = read_cache() for dist in get_installed_distributions(): display_dist(dist, cache=cache) write_cache(cache)
def print_bug_report(message=""): """ Prints a usable bug report """ separator = "\n" + ("-" * 69) + "\n" python_version = str(sys.version_info[:3]) arguments = "\n".join(arg for arg in sys.argv[1:]) try: import pip except ImportError: packages = "`pip` not installed !" else: packages = "\n".join( "{0} - {1}".format(package.key, package.version) for package in pip.get_installed_distributions() ) print( "```{0}Bug Report :\n" "`pockyt` has encountered an error ! " "Please submit this bug report at \n` {1} `.{0}" "Python Version : {2}{0}" "Installed Packages :\n{3}{0}" "Runtime Arguments :\n{4}{0}" "Error Message :\n{5}{0}```".format( separator, API.ISSUE_URL, python_version, packages, arguments, message or traceback.format_exc().strip() ) )
def check_installed_packages(): logger.debug("Gathering napalm packages") installed_packages = pip.get_installed_distributions() napalm_packages = sorted(["{}=={}".format(i.key, i.version) for i in installed_packages if i.key.startswith("napalm")]) for n in napalm_packages: logger.debug(n)
def pack_check(): ''' Check if packages are installed. ''' # Wrap in try statement to catch if 'pip' is not installed. try: import pip # check for globally installed packages. inst_packgs = pip.get_installed_distributions(local_only=False) inst_packgs_lst = ["%s" % (i.key) for i in inst_packgs] missing_pckg = [] for pckg in ['numpy', 'matplotlib', 'scipy', 'astroml', 'scikit-learn']: if pckg not in inst_packgs_lst: missing_pckg.append(pckg) if missing_pckg: print "ERROR: the following packages are missing:\n" for p in missing_pckg: print " - {}".format(p) sys.exit("\nInstall with: pip install <package>\n") except ImportError: # Python versions 2.7.7 onward apparently have 'pip' included by # default, so this check should become obsolete. print(" WARNING: 'pip' is not present. Can't check for installed" " packages.\n") # Return empty list. inst_packgs_lst = [] return inst_packgs_lst
def module_installed(name, version_string=None): """ Test whether queried module is installed. Arguments: name (:obj:`str`): Name of module to query. version_string (:obj:`str`, optional): Specific module version to query. Returns: :obj:`bool`: ``True`` if queried module has a matching string in dictionary values returned by :func:`pip.get_installed_distributions`. """ modules = get_installed_distributions() index = -1 installed = False for idx, module in enumerate(modules): if name in str(module): index = idx installed = True if version_string: installed &= str(version_string) in str(modules[index]) return installed
def get_outdated(): global progress global q global done global outdated outdated = [] progress = None done = [] # Start up worker pool q = Queue() # Ten concurrent connections are alright, I think. num_worker_threads = 10 for i in range(num_worker_threads): t = Thread(target=worker) t.daemon = True t.start() only_local_packages = False pkg_list = pip.get_installed_distributions(local_only=only_local_packages) progress = progressbar.ProgressBar(widgets=[progressbar.SimpleProgress(), ' ', progressbar.Bar(), ' ', progressbar.ETA()], maxval=len(pkg_list)).start() for pkg in pkg_list: q.put(pkg) q.join() progress.finish() return outdated
def check_imports(): #todo is pip always installed? try: import pip package_version = { pkg.key:pkg.version for pkg in pip.get_installed_distributions() if pkg.key in RockPy3.dependencies } for package in RockPy3.dependencies: if package not in package_version: RockPy3.logger.error('please install %s' %package) else: RockPy3.logger.info('using {: <12}: version {}'.format(package, package_version[package])) if 'tabulate' not in package_version: RockPy3.logger.warning('Please install module tabulate for nicer output formatting.') RockPy3.tabulate_available = False except ImportError: try: import matplotlib RockPy3.logger.info('using matplotlib version %s' % matplotlib.__version__) except ImportError: RockPy3.logger.error('please install matplotlib version') try: import lmfit RockPy3.log.info('using lmfit version %s' % lmfit.__version__) except ImportError: RockPy3.log.error('please install lmfit version') try: import pint RockPy3.logger.info('using pint version %s' % pint.__version__) except ImportError: RockPy3.logger.error('please install pint version') try: import numpy RockPy3.logger.info('using numpy version %s' % numpy.__version__) except ImportError: RockPy3.logger.error('please install numpy version') try: import scipy RockPy3.logger.info('using scipy version %s' % scipy.__version__) except ImportError: RockPy3.logger.error('please install scipy version') try: import decorator RockPy3.logger.info('using decorator version %s' % decorator.__version__) except ImportError: RockPy3.logger.error('please install decorator version') try: import tabulate RockPy3.logger.info('using tabulate version {}'.format(tabulate.__version__)) RockPy3.tabulate_available = True except ImportError: RockPy3.tabulate_available = False RockPy3.logger.warning('Please install module tabulate for nicer output formatting.')
def __create_dirs(self): try: import pip dist = None for d in pip.get_installed_distributions(): if d.project_name == 'Flask-Bootstrap4': dist = d.location project_path = '/'.join([dist, 'flask_bootstrap', 'static']) copytree(project_path, self.HTML_STATIC) except ImportError: print('CTS_ERROR:: %s' % ImportError) except TypeError: print('Missing additional package: Flask-Bootstrap4==4.0.2\nReport will be dumped without proper style.') except OSError: pass try: for f in self.NEEDED_FILES: if not exists('/'.join((self.HTML_STATIC, f))): resource_package = __name__ resource_path = '/'.join((self.HTML_TEMPLATE_DIR, f)) template = pkg_resources.resource_string(resource_package, resource_path) with open('/'.join((self.HTML_STATIC, f)), 'w') as resource: resource.write(template) except IOError: print("CTS can't create file or directory. Do you've appropriate permissions?") return False return True
def can_use_gpu(): # Check that 'tensorflow-gpu' is installed on the current code-env import pip installed_packages = pip.get_installed_distributions() return "tensorflow-gpu" in [p.project_name for p in installed_packages]
def verify_packages(): # tmp dir to store all the built packages built_packages_dir = tempfile.mkdtemp() all_modules = automation_path.get_all_module_paths() all_command_modules = automation_path.get_command_modules_paths(include_prefix=True) # STEP 1:: Build the packages for name, path in all_modules: build_package(path, built_packages_dir) # STEP 2:: Install the CLI and dependencies azure_cli_modules_path = next(path for name, path in all_modules if name == 'azure-cli') install_package(azure_cli_modules_path, 'azure-cli', built_packages_dir) # Install the remaining command modules for name, fullpath in all_command_modules: install_package(fullpath, name, built_packages_dir) # STEP 3:: Validate the installation try: az_output = subprocess.check_output(['az', '--debug'], stderr=subprocess.STDOUT, universal_newlines=True) success = 'Error loading command module' not in az_output print(az_output, file=sys.stderr) except subprocess.CalledProcessError as err: success = False print(err, file=sys.stderr) if not success: print_heading('Error running the CLI!', f=sys.stderr) sys.exit(1) pip.utils.pkg_resources = imp.reload(pip.utils.pkg_resources) installed_command_modules = [dist.key for dist in pip.get_installed_distributions(local_only=True) if dist.key.startswith(COMMAND_MODULE_PREFIX)] print('Installed command modules', installed_command_modules) missing_modules = \ set([name for name, fullpath in all_command_modules]) - set(installed_command_modules) if missing_modules: print_heading('Error: The following modules were not installed successfully', f=sys.stderr) print(missing_modules, file=sys.stderr) sys.exit(1) # STEP 4:: Verify the wheels that get produced print_heading('Verifying wheels...') invalid_wheels = [] for wheel_path in glob.glob(os.path.join(built_packages_dir, '*.whl')): # Verify all non-nspkg wheels if 'nspkg' not in wheel_path and not _valid_wheel(wheel_path): invalid_wheels.append(wheel_path) if invalid_wheels: print_heading('Error: The following wheels are invalid', f=sys.stderr) print(invalid_wheels, file=sys.stderr) print(VALID_WHEEL_HELP, file=sys.stderr) sys.exit(1) print_heading('Verified wheels successfully.') print_heading('OK')
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. This is the script for checking the owtf pip dependencies """ import os import uuid try: # Is pip even there? import pip # We do this in order to check for really old versions of pip pip.get_installed_distributions() except ImportError: print("We recommend you run install script before launching owtf" "for first time") print(" python2 install/install.py") exit(1) def verify_dependencies(root_dir): # Get all the installed libraries # installed_libraries = {"tornado": "version"} installed_libraries = dict( (i.project_name, i.version) for i in pip.get_installed_distributions()) # Get all the libraries required by owtf # owtf_libraries = ["tornado", "lxml"...]
DJANGO_RECAPTCHA_INSTALLED = False DJANGO_SIMPLE_CAPTCHA_INSTALLED = False try: from captcha.fields import CaptchaField, CaptchaTextInput DJANGO_SIMPLE_CAPTCHA_INSTALLED = True except ImportError as err: # Logging original exception logger.error(err) # Trying to identify the problem try: import pip installed_packages = pip.get_installed_distributions() for installed_package in installed_packages: if "django-recaptcha" == str(installed_package.key): DJANGO_RECAPTCHA_INSTALLED = True logger.error( "You have installed the `django-recaptcha` in your " "environment. At the moment you can't have both " "`django-recaptcha` and `django-simple-captcha` installed " "alongside due to app name collision (captcha). Remove " "both packages using pip uninstall and reinstall the " "`django-simple-captcha` if you want to make use of the " "`fobi.contrib.plugins.form_elements.security.captcha` " "package." ) if "django-simple-captcha" == str(installed_package.key): DJANGO_SIMPLE_CAPTCHA_INSTALLED = True
#This is a little test script to figure out which directories pip install libraries are located. import pip for package in pip.get_installed_distributions(): name = package.project_name # SQLAlchemy, Django, Flask-OAuthlib key = package.key # sqlalchemy, django, flask-oauthlib module_name = package._get_metadata( "top_level.txt") # sqlalchemy, django, flask_oauthlib location = package.location # virtualenv lib directory etc. version = package.version # version number print name #print key print " " + str(module_name) print " " + str(location) print " " + str(version)
def pip_version_check(path): installed_packages = dict() for dist in pip.get_installed_distributions(local_only=False): installed_packages[dist.project_name.lower()] = dist.version p = re.compile( '\s*(?P<package>[a-zA-Z0-9_.]+)(?P<condition>[<=>]{2}|[<>]{1})(?P<version>\S+)' ) unmet_requirements = [] with open(path, "r") as rf: for requirement in rf.readlines(): match = p.match(requirement.strip()) if match is not None: continue package = match.group('package').lower() version = match.group('version') condition = match.group('condition') if package in installed_packages: pass elif package.replace('_', '-') in installed_packages: package = package.replace('_', '-') else: unmet_requirements.append([requirement, ""]) continue installed_version = installed_packages[package] check = version_number_compare(installed_version, version) if condition == "<": if check >= 0: unmet_requirements.append([requirement, installed_version]) elif condition == "<=": if check > 0: unmet_requirements.append([requirement, installed_version]) elif condition == "==": if check != 0: unmet_requirements.append([requirement, installed_version]) elif condition == ">=": if check < 0: unmet_requirements.append([requirement, installed_version]) elif condition == ">": if check <= 0: unmet_requirements.append([requirement, installed_version]) if unmet_requirements: print("Some GlobaLeaks requirements are unmet\n") print("Unmet requirements:") for unmet_requirement_desc in unmet_requirements: if unmet_requirement_desc[1]: print("\t", unmet_requirement_desc[0] + " [ Installed", unmet_requirement_desc[1], "]") else: print("\t", unmet_requirement_desc[0]) print("\n") print("The problem can be solved by:") print( "1) Following the guidelines at https://github.com/globaleaks/GlobaLeaks/wiki" ) print( "2) Installing missing requirements using rm -rf /tmp/pip-build-root/ && pip install -r /usr/share/globaleaks/requirements.txt" ) sys.exit(54)
# -*- coding: utf-8 -*- import pip from subprocess import call for dist in pip.get_installed_distributions(): call("sudo pip install --upgrade " + dist.project_name, shell=True)
def installed_poppy_creatures_packages(): return [ p.key for p in pip.get_installed_distributions() if p.key.startswith('poppy-') and p.key != 'poppy-creature' ]
import pip modules = set([]) for module in pip.get_installed_distributions(local_only=True): print(module) modules.add(module) modules = sorted(list(modules))
def main(): parser = argparse.ArgumentParser( description=('Dependency tree of the installed python packages')) parser.add_argument('-f', '--freeze', action='store_true', help='Print names so as to write freeze files') parser.add_argument('-a', '--all', action='store_true', help='list all deps at top level') parser.add_argument('-l', '--local-only', action='store_true', help=('If in a virtualenv that has global access ' 'donot show globally installed packages')) parser.add_argument('-w', '--nowarn', action='store_true', help=('Inhibit warnings about possibly ' 'confusing packages')) args = parser.parse_args() default_skip = ['setuptools', 'pip', 'python', 'distribute'] skip = default_skip + ['pipdeptree'] pkgs = pip.get_installed_distributions(local_only=args.local_only, skip=skip) pkg_index = dict((p.key, p) for p in pkgs) req_map = dict((p, p.requires()) for p in pkgs) # show warnings about possibly confusing deps if found and # warnings are enabled if not args.nowarn: confusing = confusing_deps(req_map) if confusing: print('Warning!!! Possible confusing dependencies found:', file=sys.stderr) for xs in confusing: for i, (p, d) in enumerate(xs): if d.key in skip: continue pkg = top_pkg_name(p) req = non_top_pkg_name(d, pkg_index[d.key]) tmpl = ' {0} -> {1}' if i > 0 else '* {0} -> {1}' print(tmpl.format(pkg, req), file=sys.stderr) print('-' * 72, file=sys.stderr) is_empty, cyclic = peek_into(cyclic_deps(pkgs, pkg_index)) if not is_empty: print('Warning!!! Cyclic dependencies found:', file=sys.stderr) for xs in cyclic: print('- {0}'.format(xs), file=sys.stderr) print('-' * 72, file=sys.stderr) if args.freeze: top_pkg_str, non_top_pkg_str = top_pkg_src, non_top_pkg_src else: top_pkg_str, non_top_pkg_str = top_pkg_name, non_top_pkg_name tree = render_tree(pkgs, pkg_index=pkg_index, req_map=req_map, list_all=args.all, top_pkg_str=top_pkg_str, non_top_pkg_str=non_top_pkg_str, bullets=not args.freeze) print(tree) return 0
def main(): parser = argparse.ArgumentParser( description= 'Build a Python virtual env using the versions also available as ' 'official Debian or Ubuntu packages') parser.add_argument( '-u', '--URL', default=[], action='append', help='"wheezy", "xenial-updates", ... or any URL ' 'like https://packages.debian.org/stretch/. Known distributions: "%s", "%s"' % ('", "'.join(ubuntu_distribs), '", "'.join(debian_distribs))) parser.add_argument('-M', '--defaultmap', help='Use name mapping for well-known packages', action='store_true', default=False) parser.add_argument('-P', '--only-python-version', help='Only print the available Python version', action='store_true', default=False) parser.add_argument( '-m', '--mapfile', help='mapping file between Python package names and Debian ones:' ' each line is like "python-package-name=debian-package-name".' 'Otherwise, use the default Debianized name ("python[3]-package-name"). ' 'Add"python-package-name=" to ignore this package', default=None) parser.add_argument('-p', '--python', help='Python version: "2" or "3" (default: "%s")' % sys.version_info[0], default=str(sys.version_info[0])) parser.add_argument('-r', '--requirements', help='Requirements file (otherwise use "pip list")', default=None) args = parser.parse_args() base_urls = [] for url in args.URL: if url in ubuntu_distribs: base_urls.append('http://packages.ubuntu.com/%s/' % url) elif url in debian_distribs: base_urls.append('https://packages.debian.org/%s/' % url) elif url.startswith('http'): base_urls.append(url) else: print('Invalid URL: %s' % url) print('Known default values: %s, %s' % (', '.join(ubuntu_distribs), ', '.join(debian_distribs))) package_mapping = CaseInsensitiveDict() if args.defaultmap: package_mapping.update(default_map) if args.mapfile: with codecs.open(args.mapfile, 'r', encoding='utf-8') as fd: for line in fd: python_name, sep, debian_name = line.partition('=') if sep != '=': continue python_name = python_name.strip() if not python_name.startswith('#'): package_mapping[python_name] = debian_name.strip() required_packages = [] if args.requirements is None: for r in get_installed_distributions(): assert isinstance(r, Distribution) required_packages.append(r.project_name) else: for r in parse_requirements(args.requirements, session=PipSession()): assert isinstance(r, InstallRequirement) required_packages.append(r.name) builder = EnvironmentBuilder(base_urls, python_version=args.python, package_mapping=package_mapping, required_packages=required_packages) if args.only_python_version: builder.print_python_version() else: builder.print_requirements()
def rdeps(pkg_name): return sorted([ pkg.project_name for pkg in pip.get_installed_distributions() if pkg_name in [requirement.project_name for requirement in pkg.requires()] ])
def is_package(origin): packages = pip.get_installed_distributions() for package in packages: if package.project_name.replace('-', '_') == origin: return True return False
action='store_true', default=False, help='list newer versions on pypi than the installed version') parser.add_argument( '-p', '--package_names', dest='package_names', nargs='*', metavar='package_name', help= 'instead of all installed packages, show the meta information only for a specified list of packages' ) args = parser.parse_args() packages = args.package_names if args.package_names else [ dist.project_name for dist in pip.get_installed_distributions() ] for package_name in packages: try: pypi_package = PypiPackage(package_name) list_of_info_to_display = [pypi_package.package_name_version] if args.show_installed_license: list_of_info_to_display.append(' Licence(Installed): %s' % pypi_package.installed_license) if args.show_pypi_license: list_of_info_to_display.append( ' Licence(Pypi): %s' % pypi_package.pypi_license_for_installed_version) if args.show_newer_versions: newer_versions = pypi_package.available_newer_versions()
def update_check(self, silent=False): """ Checks for updates to hyperspy and hyperspyUI. If the packages are not source installs, it checks for a new version on PyPI. Parameters ---------- silent: bool If not silent (default), a message box will appear if no updates are available, with a message to that fact. Returns ------- None. """ self._check_git() available = {} for Name, (enabled, url) in self.packages.items(): name = Name.lower() if enabled: if (check_git_repo(name) and self.settings['check_for_git_updates', bool]): # TODO: Check for commits to pull pass else: import xmlrpc.client pypi = xmlrpc.client.ServerProxy( 'https://pypi.python.org/pypi') found = pypi.package_releases(name) if not found: # Try to capitalize pkg name if name == 'hyperspyui': found = pypi.package_releases('hyperspyUI', True) else: found = pypi.package_releases(Name, True) if found: import pip dist = [ d for d in pip.get_installed_distributions() if d.project_name.lower() == name ] if dist[0].version < found[0]: available[name] = found[0] if available: w = self._get_update_list(available.keys()) diag = self.ui.show_okcancel_dialog("Updates available", w) if diag.result() == QDialog.Accepted: for chk in w.children(): if isinstance(chk, QtWidgets.QCheckBox): name = chk.text() if available[name]: name += '==' + available[name] self._perform_update(name) elif not silent: mb = QMessageBox(QMessageBox.Information, tr("No updates"), tr("No new updates were found."), parent=self.ui) mb.exec_()
import pip lista_paquetes = sorted([(p.key, p.version) for p in pip.get_installed_distributions()]) print("{0:<30}{1:<30}".format('Nombre de Paquete', 'Versión')) for paquete, version in lista_paquetes: print("{0:<30}{1:<30}".format(paquete, version))
def update_all(link=None, private=False): component_names = [dist.key.replace(COMPONENT_PREFIX, '') for dist in pip.get_installed_distributions(local_only=True) if dist.key.startswith(COMPONENT_PREFIX)] for name in component_names: _install_or_update(name, None, link, private, upgrade=True)
def create_lambda_zip(self, prefix='lambda_package', handler_file=None, minify=True, exclude=None, use_precompiled_packages=True, include=None): """ Creates a Lambda-ready zip file of the current virtualenvironment and working directory. Returns path to that file. """ print("Packaging project as zip...") venv = os.environ['VIRTUAL_ENV'] cwd = os.getcwd() zip_fname = prefix + '-' + str(int(time.time())) + '.zip' zip_path = os.path.join(cwd, zip_fname) # Files that should be excluded from the zip if exclude is None: exclude = list() # Exclude the zip itself exclude.append(zip_path) def splitpath(path): parts = [] (path, tail) = os.path.split(path) while path and tail: parts.append(tail) (path, tail) = os.path.split(path) parts.append(os.path.join(path, tail)) return map(os.path.normpath, parts)[::-1] split_venv = splitpath(venv) split_cwd = splitpath(cwd) # Ideally this should be avoided automatically, # but this serves as an okay stop-gap measure. if split_venv[-1] == split_cwd[-1]: print( "Warning! Your project and virtualenv have the same name! You may want to re-create your venv with a new name, or explicitly define a 'project_name', as this may cause errors." ) # First, do the project.. temp_project_path = os.path.join(tempfile.gettempdir(), str(int(time.time()))) if minify: excludes = ZIP_EXCLUDES + exclude + [split_venv[-1]] shutil.copytree(cwd, temp_project_path, symlinks=False, ignore=shutil.ignore_patterns(*excludes)) else: shutil.copytree(cwd, temp_project_path, symlinks=False) # Then, do the site-packages.. # TODO Windows: %VIRTUAL_ENV%\Lib\site-packages temp_package_path = os.path.join(tempfile.gettempdir(), str(int(time.time() + 1))) site_packages = os.path.join(venv, 'lib', 'python2.7', 'site-packages') if minify: excludes = ZIP_EXCLUDES + exclude shutil.copytree(site_packages, temp_package_path, symlinks=False, ignore=shutil.ignore_patterns(*excludes)) else: shutil.copytree(site_packages, temp_package_path, symlinks=False) copy_tree(temp_package_path, temp_project_path, update=True) # Then the pre-compiled packages.. if use_precompiled_packages: installed_packages_name_set = { package.project_name.lower() for package in pip.get_installed_distributions() } for name, details in lambda_packages.items(): if name.lower() in installed_packages_name_set: tar = tarfile.open(details['path'], mode="r:gz") for member in tar.getmembers(): # If we can, trash the local version. if member.isdir(): shutil.rmtree(os.path.join(temp_project_path, member.name), ignore_errors=True) continue tar.extract(member, temp_project_path) # If a handler_file is supplied, copy that to the root of the package, # because that's where AWS Lambda looks for it. It can't be inside a package. if handler_file: filename = handler_file.split(os.sep)[-1] shutil.copy(handler_file, os.path.join(temp_project_path, filename)) # Then zip it all up.. try: import zlib compression_method = zipfile.ZIP_DEFLATED except Exception as e: # pragma: no cover compression_method = zipfile.ZIP_STORED zipf = zipfile.ZipFile(zip_path, 'w', compression_method) for root, dirs, files in os.walk(temp_project_path): for filename in files: # If there is a .pyc file in this package, # we can skip the python source code as we'll just # use the compiled bytecode anyway. if filename[-3:] == '.py': if os.path.isfile(os.path.join(root, filename) + 'c'): continue zipf.write( os.path.join(root, filename), os.path.join(root.replace(temp_project_path, ''), filename)) # And, we're done! zipf.close() # Trash the temp directory shutil.rmtree(temp_project_path) # Warn if this is too large for Lambda. file_stats = os.stat(zip_path) if file_stats.st_size > 52428800: # pragma: no cover print( "\n\nWarning: Application zip package is likely to be too large for AWS Lambda.\n\n" ) return zip_fname
def get_package(self, _id): for d in pip.get_installed_distributions(): if d.key == _id.split('==')[0]: return self.__make_package_pipdist(d)
def requerimientos(): import pip with open("requirements.txt", "w") as f: for dist in pip.get_installed_distributions(): req = dist.as_requirement() f.write(str(req) + "\n")
#!/usr/bin/env python from __future__ import print_function import codecs import os import sys import pip from setuptools import setup, find_packages if 'docker-py' in [x.project_name for x in pip.get_installed_distributions()]: print( 'ERROR: "docker-py" needs to be uninstalled before installing this' ' package:\npip uninstall docker-py', file=sys.stderr) sys.exit(1) ROOT_DIR = os.path.dirname(__file__) SOURCE_DIR = os.path.join(ROOT_DIR) requirements = [ 'requests >= 2.5.2, != 2.11.0, != 2.12.2', 'six >= 1.4.0', 'websocket-client >= 0.32.0', 'docker-pycreds >= 0.2.1' ] if sys.platform == 'win32': requirements.append('pypiwin32 >= 219') extras_require = { ':python_version < "3.5"': 'backports.ssl_match_hostname >= 3.5',
def list_components(): return sorted([{'name': dist.key.replace(COMPONENT_PREFIX, ''), 'version': dist.version} for dist in pip.get_installed_distributions(local_only=True) if dist.key.startswith(COMPONENT_PREFIX)], key=lambda x: x['name'])
def __init__(self): self.installed = pip.get_installed_distributions()
def step_cli_installed(self): """ Make sure saws is in installed packages. """ dists = set([di.key for di in pip.get_installed_distributions()]) assert 'saws' in dists
"""Unit tests for the mycli.config module.""" from io import BytesIO, TextIOWrapper import os import pip import struct import sys import tempfile import pytest from mycli.config import (get_mylogin_cnf_path, open_mylogin_cnf, read_and_decrypt_mylogin_cnf, str_to_bool) with_pycryptodome = [ 'pycryptodome' in set([ package.project_name for package in pip.get_installed_distributions() ]) ] LOGIN_PATH_FILE = os.path.abspath( os.path.join(os.path.dirname(__file__), 'mylogin.cnf')) def open_bmylogin_cnf(name): """Open contents of *name* in a BytesIO buffer.""" with open(name, 'rb') as f: buf = BytesIO() buf.write(f.read()) return buf @pytest.mark.skipif(not with_pycryptodome, reason='requires pycryptodome')
def get_installed_packages(): return pip.get_installed_distributions()
## #!/usr/bin/python from contextlib import contextmanager import sys, os @contextmanager def suppress_stdout(): with open(os.devnull, "w") as devnull: old_stdout = sys.stdout sys.stdout = devnull try: yield finally: sys.stdout = old_stdout import pip required_pkgs = ['docker', 'argparse'] installed_pkgs = [pkg.key for pkg in pip.get_installed_distributions()] for package in required_pkgs: if package not in installed_pkgs: with suppress_stdout(): pip.main(['install', package]) import re import argparse import docker def show_tags(client): tags = []
def step_install_cli(_): """ Check that pgcli is in installed modules. """ dists = set([di.key for di in pip.get_installed_distributions()]) assert 'pgcli' in dists
# -*- coding: utf-8 -*- import pip __copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." __license__ = "MIT license, see LICENSE.txt file." __authors__ = "The AiiDA team." __version__ = "0.7.0" installed_packages = [p.project_name for p in pip.get_installed_distributions()] KOMBU_FOUND = 'kombu' in installed_packages
def doCompleteDoctor(): """Checks for uninstalled or too old versions of requirements and gives a complete output.""" Console.header("Doctor") dists = [dist for dist in pip.get_installed_distributions()] keys = [dist.key for dist in pip.get_installed_distributions()] versions = {} for dist in dists: versions[dist.key] = dist.version def checkSingleInstallation(keys, versions, packageName, minVersion, installPath, updatePath): Console.info('%s:' % packageName) Console.indent() if packageName.lower() in keys: if LooseVersion(minVersion) > LooseVersion("0.0"): if LooseVersion(versions[packageName.lower()]) >= LooseVersion( minVersion): Console.info( Console.colorize( 'Version is OK (required: %s installed: %s)' % (minVersion, versions[packageName.lower()]), "green")) else: Console.info( Console.colorize( Console.colorize( 'Version installed is too old (required: %s installed: %s)' % (minVersion, versions[packageName.lower()]), "red"), "bold")) Console.info( 'Update to the newest version of %s using %s' % (packageName, updatePath)) else: Console.info(Console.colorize('Found installation', "green")) else: Console.info( Console.colorize( Console.colorize('Did NOT find installation', "red"), "bold")) Console.info('Install the newest version of %s using %s' % (packageName, installPath)) Console.outdent() # Required packages Console.info("Required Packages:") Console.indent() for entry in needs: checkSingleInstallation(keys, versions, entry["packageName"], entry["minVersion"], entry["installPath"], entry["updatePath"]) Console.outdent() # Optional packages Console.info("") Console.info("Optional Packages:") Console.indent() for entry in optionals: checkSingleInstallation(keys, versions, entry["packageName"], entry["minVersion"], entry["installPath"], entry["updatePath"]) Console.outdent()