def test_req_file_no_finder(self, tmpdir): """ Test parsing a requirements file without a finder """ with open(tmpdir.join("req.txt"), "w") as fp: fp.write(""" --find-links https://example.com/ --index-url https://example.com/ --extra-index-url https://two.example.com/ --no-use-wheel --no-index """) parse_requirements(tmpdir.join("req.txt"), session=PipSession())
def main(requirements_path): this_requirements_file = os.path.basename(requirements_path) parsed = parse_requirements(requirements_path, session=pip.download.PipSession()) requirements = [ req for req in parsed # Skip packages from other requirements files if this_requirements_file in req.comes_from ] reverse_requirements = {} nested_requirements = set() # Fetch nested reqirements lines, this is mostly copied from # pip so that we support stuff "correctly". Unfortunately there # isn't any good API in pip for it :-/ parser = build_parser() defaults = parser.get_default_values() with open(requirements_path) as fobj: for line in fobj: args_str, options_str = break_args_options(line) opts, _ = parser.parse_args(shlex.split(options_str), defaults) if opts.requirements: nested_requirements.update(opts.requirements) # Build reverse requirements to be able to add a note on who is depending # on what for req in requirements: reverse_requirements[safe_name(req.name)] = rdeps(req.name) output = [] output.extend('-r %s' % req for req in nested_requirements) output.append('') # Let's output the updated, fixed and more correct requirements version for req in sorted(requirements, key=lambda x: safe_name(x.name)): if reverse_requirements.get(safe_name(req.name)): msg = '# %s is required by %s' % (safe_name(req.name), ', '.join( reverse_requirements[safe_name(req.name)])) output.append(msg) output.append('%s%s' % (safe_name(req.name), str(req.specifier))) with open(requirements_path, 'wb') as fobj: fobj.write('\n'.join(output)) with open(requirements_path, 'a') as fobj: fobj.write('\n') for req in requirements: run_single_package( '%s%s' % (safe_name(req.name), str(req.specifier)), requirements_path, 'sha256', # Workaround a bug or feature in hashin which would avoid # fetching wheels e.g for some packages. python_versions=['py27', '2.7'], verbose=True)
def main(requirements_path): this_requirements_file = os.path.basename(requirements_path) parsed = parse_requirements( requirements_path, session=pip.download.PipSession()) requirements = [ req for req in parsed # Skip packages from other requirements files if this_requirements_file in req.comes_from] reverse_requirements = {} nested_requirements = set() # Fetch nested reqirements lines, this is mostly copied from # pip so that we support stuff "correctly". Unfortunately there # isn't any good API in pip for it :-/ parser = build_parser() defaults = parser.get_default_values() with open(requirements_path) as fobj: for line in fobj: args_str, options_str = break_args_options(line) opts, _ = parser.parse_args(shlex.split(options_str), defaults) if opts.requirements: nested_requirements.update(opts.requirements) # Build reverse requirements to be able to add a note on who is depending # on what for req in requirements: reverse_requirements[safe_name(req.name)] = rdeps(req.name) output = [] output.extend('-r %s' % req for req in nested_requirements) output.append('') # Let's output the updated, fixed and more correct requirements version for req in sorted(requirements, key=lambda x: safe_name(x.name)): if reverse_requirements.get(safe_name(req.name)): msg = '# %s is required by %s' % ( safe_name(req.name), ', '.join(reverse_requirements[safe_name(req.name)])) output.append(msg) output.append('%s%s' % (safe_name(req.name), str(req.specifier))) with open(requirements_path, 'wb') as fobj: fobj.write('\n'.join(output)) with open(requirements_path, 'a') as fobj: fobj.write('\n') for req in requirements: run_single_package( '%s%s' % (safe_name(req.name), str(req.specifier)), requirements_path, 'sha256', # Workaround a bug or feature in hashin which would avoid # fetching wheels e.g for some packages. python_versions=['py27', '2.7'], verbose=True)
def test_install_requirements_with_options(self, tmpdir, finder, session, options): global_option = '--dry-run' install_option = '--prefix=/opt' content = ''' --only-binary :all: INITools==2.0 --global-option="{global_option}" \ --install-option "{install_option}" '''.format(global_option=global_option, install_option=install_option) with requirements_file(content, tmpdir) as reqs_file: req = next(parse_requirements(reqs_file.abspath, finder=finder, options=options, session=session)) req.source_dir = os.curdir with patch.object(subprocess, 'Popen') as popen: popen.return_value.stdout.readline.return_value = "" try: req.install([]) except: pass call = popen.call_args_list[0][0][0] assert call.index(install_option) > \ call.index('install') > \ call.index(global_option) > 0 assert options.format_control.no_binary == set([':all:']) assert options.format_control.only_binary == set([])
def _parse_python(self, spec): """ Parse PyPI specification of a single dependency :param spec: str, for example "Django>=1.5,<1.8" :return: """ def _get_pip_spec(requirements): "In Pip 8+ there's no `specs` field and we have to dig the information from the `specifier` field" if hasattr(requirements, 'specs'): return requirements.specs elif hasattr(requirements, 'specifier'): return [(spec.operator, spec.version) for spec in requirements.specifier] # create a temporary file and store the spec there since # `parse_requirements` requires a file with NamedTemporaryFile(mode='w+', suffix='pysolve') as f: f.write(spec) f.flush() parsed = parse_requirements(f.name, session=f.name) dependency = [ Dependency(x.name, _get_pip_spec(x.req) or [('>=', '0.0.0')]) for x in parsed ].pop() return dependency
def test_req_file_parse_no_only_binary(self, data, finder): list( parse_requirements(data.reqfiles.join("supported_options2.txt"), finder, session=PipSession())) expected = pip.index.FormatControl(set(['fred']), set(['wilma'])) assert finder.format_control == expected
def test_install_requirements_with_options(self, tmpdir, finder, session, options): global_option = '--dry-run' install_option = '--prefix=/opt' content = ''' --only-binary :all: INITools==2.0 --global-option="{global_option}" \ --install-option "{install_option}" '''.format(global_option=global_option, install_option=install_option) with requirements_file(content, tmpdir) as reqs_file: req = next( parse_requirements(reqs_file.abspath, finder=finder, options=options, session=session)) req.source_dir = os.curdir with patch.object(subprocess, 'Popen') as popen: popen.return_value.stdout.readline.return_value = "" try: req.install([]) except: pass call = popen.call_args_list[0][0][0] assert call.index(install_option) > \ call.index('install') > \ call.index(global_option) > 0 assert options.format_control.no_binary == set([':all:']) assert options.format_control.only_binary == set([])
def read_requirements(req_file): """Reads a requirements file.""" items = list(parse_requirements(req_file, session={})) for item in items: if item.req: item.req.marker = item.markers return [item.req if item.req else item for item in items]
def test_join_lines(self, tmpdir, finder): with open(tmpdir.join("req1.txt"), "w") as fp: fp.write("--extra-index-url url1 \\\n--extra-index-url url2") list(parse_requirements(tmpdir.join("req1.txt"), finder=finder, session=PipSession())) assert finder.index_urls == ['url1', 'url2']
def test_allow_all_external(self, tmpdir): req_path = tmpdir.join("requirements.txt") with open(req_path, "w") as fh: fh.write(""" --allow-all-external foo """) list(parse_requirements(req_path, session=PipSession()))
def test_multiple_appending_options(self, tmpdir, finder, options): with open(tmpdir.join("req1.txt"), "w") as fp: fp.write("--extra-index-url url1 \n") fp.write("--extra-index-url url2 ") list(parse_requirements(tmpdir.join("req1.txt"), finder=finder, session=PipSession(), options=options)) assert finder.index_urls == ['url1', 'url2']
def _parse_python(spec): """Parse PyPI specification of a single dependency. :param spec: str, for example "Django>=1.5,<1.8" :return: [Django [[('>=', '1.5'), ('<', '1.8')]]] """ def _extract_op_version(spec): # https://www.python.org/dev/peps/pep-0440/#compatible-release if spec.operator == '~=': version = spec.version.split('.') if len(version) in {2, 3, 4}: if len(version) in {3, 4}: del version[-1] # will increase the last but one in next line version[-1] = str(int(version[-1]) + 1) else: raise ValueError('%r must not be used with %r' % (spec.operator, spec.version)) return [('>=', spec.version), ('<', '.'.join(version))] # Trailing .* is permitted per # https://www.python.org/dev/peps/pep-0440/#version-matching elif spec.operator == '==' and spec.version.endswith('.*'): try: result = check_output(['/usr/bin/semver-ranger', spec.version], universal_newlines=True).strip() gte, lt = result.split() return [('>=', gte.lstrip('>=')), ('<', lt.lstrip('<'))] except ValueError: logger.info("couldn't resolve ==%s", spec.version) return spec.operator, spec.version # https://www.python.org/dev/peps/pep-0440/#arbitrary-equality # Use of this operator is heavily discouraged, so just convert it to 'Version matching' elif spec.operator == '===': return '==', spec.version else: return spec.operator, spec.version def _get_pip_spec(requirements): """There's no `specs` field In Pip 8+, take info from `specifier` field.""" if hasattr(requirements, 'specs'): return requirements.specs elif hasattr(requirements, 'specifier'): specs = [_extract_op_version(spec) for spec in requirements.specifier] if len(specs) == 0: specs = [('>=', '0.0.0')] elif len(specs) > 1: specs = [specs] return specs # create a temporary file and store the spec there since # `parse_requirements` requires a file with NamedTemporaryFile(mode='w+', suffix='pysolve') as f: f.write(spec) f.flush() parsed = parse_requirements(f.name, session=f.name) dependency = [Dependency(x.name, _get_pip_spec(x.req)) for x in parsed].pop() return dependency
def test_skip_regex(self, tmpdir, finder, options): options.skip_requirements_regex = '.*Bad.*' with open(tmpdir.join("req1.txt"), "w") as fp: fp.write("--extra-index-url Bad \n") fp.write("--extra-index-url Good ") list(parse_requirements(tmpdir.join("req1.txt"), finder=finder, options=options, session=PipSession())) assert finder.index_urls == ['Good']
def test_remote_reqs_parse(self): """ Test parsing a simple remote requirements file """ # this requirements file just contains a comment previously this has # failed in py3: https://github.com/pypa/pip/issues/760 for req in parse_requirements( 'https://raw.githubusercontent.com/pypa/' 'pip-test-package/master/' 'tests/req_just_comment.txt', session=PipSession()): pass
def test_req_file_parse_comment_start_of_line(self, tmpdir, finder): """ Test parsing comments in a requirements file """ with open(tmpdir.join("req1.txt"), "w") as fp: fp.write("# Comment ") reqs = list(parse_requirements(tmpdir.join("req1.txt"), finder, session=PipSession())) assert not reqs
def test_req_file_parse_egginfo_end_of_line_with_url(self, tmpdir, finder): """ Test parsing comments in a requirements file """ with open(tmpdir.join("req1.txt"), "w") as fp: fp.write("https://example.com/foo.tar.gz#egg=wat") reqs = list(parse_requirements(tmpdir.join("req1.txt"), finder, session=PipSession())) assert len(reqs) == 1 assert reqs[0].name == "wat"
def ensure_pipfile(validate=True): """Creates a Pipfile for the project, if it doesn't exist.""" # Assert Pipfile exists. if not project.pipfile_exists: # If there's a requirements file, but no Pipfile... if project.requirements_exists: click.echo( crayons.yellow( 'Requirements file found, instead of Pipfile! Converting...' )) # Create a Pipfile... project.create_pipfile() # Parse requirements.txt file with Pip's parser. # Pip requires a `PipSession` which is a subclass of requests.Session. # Since we're not making any network calls, it's initialized to nothing. from pip.req.req_file import parse_requirements reqs = [ r for r in parse_requirements(project.requirements_location, session='') ] for package in reqs: if package.name not in BAD_PACKAGES: if package.link is not None: package_string = '-e {0}'.format( package.link) if package.editable else str( package.link) project.add_package_to_pipfile(package_string) else: project.add_package_to_pipfile(str(package.req)) else: click.echo( crayons.yellow('Creating a Pipfile for this project...'), err=True) # Create the pipfile if it doesn't exist. project.create_pipfile() # Validate the Pipfile's contents. if validate and project.virtualenv_exists: # Ensure that Pipfile is using proper casing. p = project.parsed_pipfile changed = ensure_proper_casing(pfile=p) # Write changes out to disk. if changed: click.echo(crayons.yellow('Fixing package names in Pipfile...'), err=True) project.write_toml(p)
def add_to_requirements_file(req, filename): # click.echo("Adding module to requirements") old_reqs = [r for r in parse_requirements(filename, session='')] if req.editable: install_req = InstallRequirement.from_editable( req.line.replace("-e ", "")) else: install_req = InstallRequirement.from_line(req.line) reqs = [] replaced = False for old_req in old_reqs: # click.echo(old_req) if old_req.name.lower() == install_req.name.lower(): replaced = True reqs.append(install_req) # click.echo(install_req) else: reqs.append(old_req) # click.echo(old_req) if not replaced: reqs.append(install_req) if not replaced: reqs.append(install_req) # requirements = [] # click.echo("List of requirements: {0}".format(reqs)) with open(filename + ".tmp", "w") as file: # click.echo(file.name) for package in reqs: # click.echo("Adding package {0}".format(package)) if package.name not in IGNORED_PACKAGES: if package.link is not None: package_string = ('-e {0}'.format(package.link) if package.editable else str(package.link)) # project.add_package_to_pipfile(package_string) # requirements.append(package_string) file.write(package_string + "\n") else: file.write(str(package.req) + "\n") # requirements.append(packa) file.close() # project.recase_pipfile() os.remove(filename) os.rename(filename + ".tmp", filename) return
def get_requirements(pfile): """ Ask pip to parse it's own requirements file to handle comments, line continuations, nested files etc. :param pfile: an open file handle :return a tuple of requirements """ pipSession = PipSession() pip_gen = parse_requirements(pfile.name, session=pipSession) requirements_tuple = tuple(str(x.req) for x in pip_gen) return requirements_tuple
def pip_freeze(): # temp = tempfile.TemporaryFile() # pip_command = '{0} freeze > {1}'.format(which_pip(), temp.name) pip_command = '{0} freeze'.format(which_pip()) c = delegator.run(pip_command) # click.echo(pip_command) # click.echo(c2.out) frozen_reqs = c.out frozen_reqs = [req for req in parse_requirements(frozen_reqs)] # click.echo(frozen_reqs) return frozen_reqs
def _validate_requirements(self, spec): """This function will check if the syntax of the provided specification is valid""" from pip.req.req_file import parse_requirements # create a temporary file and store the spec there since # `parse_requirements` requires a file with NamedTemporaryFile(mode='w+', suffix='pysolve') as f: f.write(spec) f.flush() reqs = parse_requirements(f.name, session=f.name) if reqs: return True return False
def read_requirements(req_file): """Reads a requirements file.""" items = list(parse_requirements(req_file, session={})) result = [] for item in items: # Get line number from item line_number = item.comes_from.split(req_file + ' (line ')[1][:-1] if item.req: item.req.marker = item.markers result.append((item.req, line_number)) else: result.append((item, line_number)) return result
session = PipSession() options = {} print('# AUTOGENERATED by {}\n# DO NOT EDIT\n#\n'.format(sys.argv[0])) if os.path.exists('setup.cfg'): options = read_configuration('setup.cfg').get('options', {}) install_requires = options.get('install_requires', []) extras_require = options.get('extras_require', {}) if install_requires: fname = 'setup.cfg:options.install_requires' print_file( fname, (InstallRequirement.from_line(l, fname) for l in sorted(install_requires)), ) for extra, requires in sorted(extras_require.items()): if extra in args.extras: fname = 'setup.cfg:options.extras_require:' + extra print_file( fname, (InstallRequirement.from_line(l, fname) for l in sorted(requires)), ) for filename in args.requirements: print_file(filename, parse_requirements(filename, session=session))
def read_requirements(req_file): return [x.req for x in parse_requirements(req_file, session={})]
name='halutz', packages=['halutz'], version=halutz.__version__, description=("Halutz is a python library for Swagger, " "inspired by working with network engineers getting started with Python."), # TODO long_description=read('README.rst'), author='Jeremy Schulman', author_email='*****@*****.**', url='https://github.com/jeremyschulman/halutz', include_package_data=True, license='MIT', zip_safe=False, install_requires=[ item.name for item in parse_requirements( 'requirements.txt', session=PipSession())], keywords=('serialization', 'rest', 'json', 'api', 'marshal', 'marshalling', 'deserialization', 'validation', 'schema', 'jsonschema', 'swagger', 'openapi', 'networking', 'automation'), classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', # 'Programming Language :: Python :: 3', # 'Programming Language :: Python :: 3.3', # 'Programming Language :: Python :: 3.4', # 'Programming Language :: Python :: 3.5',
def read_requirements(req_file): """Reads a requirements file.""" return [x.req for x in parse_requirements(req_file, session={})]
def get_requirements(file_name): """Возвращает список зависимостей med.""" requirements = parse_requirements(file_name, session=PipSession()) return [str(ir.req) for ir in requirements]
def _get_requirements(file_name): pip_session = PipSession() requirements = parse_requirements(file_name, session=pip_session) return tuple(str(requirement.req) for requirement in requirements)
def test_req_file_parse_no_only_binary(self, data, finder): list(parse_requirements( data.reqfiles.join("supported_options2.txt"), finder, session=PipSession())) expected = pip.index.FormatControl(set(['fred']), set(['wilma'])) assert finder.format_control == expected
def parse_python(spec): # Ignore PyDocStyleBear """Parse PyPI specification of a single dependency. :param spec: str, for example "Django>=1.5,<1.8" :return: [Django [[('>=', '1.5'), ('<', '1.8')]]] """ def _extract_op_version(spec): # https://www.python.org/dev/peps/pep-0440/#compatible-release if spec.operator == "~=": version = spec.version.split(".") if len(version) in {2, 3, 4}: if len(version) in {3, 4}: del version[ -1] # will increase the last but one in next line version[-1] = str(int(version[-1]) + 1) else: raise ValueError("%r must not be used with %r" % (spec.operator, spec.version)) return [(">=", spec.version), ("<", ".".join(version))] # Trailing .* is permitted per # https://www.python.org/dev/peps/pep-0440/#version-matching elif spec.operator == "==" and spec.version.endswith(".*"): try: result = check_output( ["/usr/bin/semver-ranger", spec.version], universal_newlines=True).strip() gte, lt = result.split() return [(">=", gte.lstrip(">=")), ("<", lt.lstrip("<"))] except ValueError: _LOGGER.warning("couldn't resolve ==%s", spec.version) return spec.operator, spec.version # https://www.python.org/dev/peps/pep-0440/#arbitrary-equality # Use of this operator is heavily discouraged, so just convert it to 'Version matching' elif spec.operator == "===": return "==", spec.version else: return spec.operator, spec.version def _get_pip_spec(requirements): """There is no `specs` field In Pip 8+, take info from `specifier` field.""" if hasattr(requirements, "specs"): return requirements.specs elif hasattr(requirements, "specifier"): specs = [ _extract_op_version(spec) for spec in requirements.specifier ] if len(specs) == 0: # TODO: I'm not sure with this one # we should probably return None instead and let pip deal with this specs = [(">=", "0.0.0")] return specs _LOGGER.info("Parsing dependency %r", spec) # create a temporary file and store the spec there since # `parse_requirements` requires a file with NamedTemporaryFile(mode="w+", suffix="pysolve") as f: f.write(spec) f.flush() parsed = parse_requirements(f.name, session=f.name) dependency = [ Dependency(x.name, _get_pip_spec(x.req)) for x in parsed ].pop() return dependency
def read_requirements(req_file): """Reads a requirements file.""" items = list(parse_requirements(req_file, session={})) for item in items: item.req.marker = item.markers return [item.req for item in items]