def get_requirement(dep): """Pre-clean requirement strings passed to the requirements parser. Ensures that we can accept both local and relative paths, file and VCS URIs, remote URIs, and package names, and that we pass only valid requirement strings to the requirements parser. Performs necessary modifications to requirements object if the user input was a local relative path. :param str dep: A requirement line :returns: :class:`requirements.Requirement` object """ path = None # Split out markers if they are present - similar to how pip does it # See pip.req.req_install.InstallRequirement.from_line if not any(dep.startswith(uri_prefix) for uri_prefix in SCHEME_LIST): marker_sep = ';' else: marker_sep = '; ' if marker_sep in dep: dep, markers = dep.split(marker_sep, 1) markers = markers.strip() if not markers: markers = None else: markers = None # Strip extras from the requirement so we can make a properly parseable req dep, extras = pip.req.req_install._strip_extras(dep) # Only operate on local, existing, non-URI formatted paths if (is_file(dep) and isinstance(dep, six.string_types) and not any(dep.startswith(uri_prefix) for uri_prefix in SCHEME_LIST)): dep_path = Path(dep) # Only parse if it is a file or an installable dir if dep_path.is_file() or (dep_path.is_dir() and pip.utils.is_installable_dir(dep)): if dep_path.is_absolute() or dep_path.as_posix() == '.': path = dep_path.as_posix() else: path = get_converted_relative_path(dep) dep = dep_path.resolve().as_uri() req = [r for r in requirements.parse(dep)][0] # If the result is a local file with a URI and we have a local path, unset the URI # and set the path instead if req.local_file and req.uri and not req.path and path: req.path = path req.uri = None if markers: req.markers = markers if extras: # Bizarrely this is also what pip does... req.extras = [r for r in requirements.parse('fakepkg{0}'.format(extras))][0].extras return req
def process(request): return { 'files': [ { 'name': file_.get('name'), 'dependencies': [ { 'name': req.name, 'specifiers': [ { 'operator': spec[0], 'version': spec[1], } for spec in req.specs ], 'extras': { key: getattr(req, key) for key in ( 'line', 'editable', 'vcs', 'revision', 'uri', 'path', 'extras' ) if getattr(req, key) }, } for req in requirements.parse(file_.get('content', '')) ], } for file_ in request['files'] ], }
def get_lowest_versions(requirements_file): with open(requirements_file) as f: for req in requirements.parse(f): if req.specifier: for spec, version in req.specs: if spec == ">=": yield f"{req.name}=={version}"
def get_requirement(self): prefix = "-e " if self.editable else "" line = "{0}{1}".format(prefix, self.link.url) req = first(requirements.parse(line)) if self.path and self.link and self.link.scheme.startswith("file"): req.local_file = True req.path = self.path if self.editable: req.editable = True req.link = self.link if ( self.uri != unquote(self.link.url_without_fragment) and "git+ssh://" in self.link.url and "git+git@" in self.uri ): req.line = self.uri req.uri = self.uri if not req.name: raise ValueError( "pipenv requires an #egg fragment for version controlled " "dependencies. Please install remote dependency " "in the form {0}#egg=<package-name>.".format(req.uri) ) if self.vcs and not req.vcs: req.vcs = self.vcs if self.ref and not req.revision: req.revision = self.ref if self.extras and not req.extras: req.extras = self.extras return req
def parse_extras(extras_str): """Turn a string of extras into a parsed extras list""" import requirements extras = first( requirements.parse("fakepkg{0}".format(extras_to_string(extras_str))) ).extras return extras
def convert_deps_from_pip(dep): """"Converts a pip-formatted dependency to a Pipfile-formatted one.""" dependency = {} req = [r for r in requirements.parse(dep)][0] # Comparison operators: e.g. Django>1.10 if req.specs: r = multi_split(dep, '=<>') dependency[req.name] = dep[len(r[0]):] # Extras: e.g. requests[socks] elif req.extras: r = multi_split(dep, '[]') dependency[req.name] = {'extras': req.extras} # VCS Installs. elif req.vcs: # Crop off the git+, etc part. dependency[req.name] = {req.vcs: req.uri[len(req.vcs) + 1:]} # Add --editable, if it's there. if req.editable: dependency[req.name].update({'editable': True}) # Add the specifier, if it was provided. if req.revision: dependency[req.name].update({'ref': req.revision}) # Bare dependencies: e.g. requests else: dependency[dep] = '*' return dependency
def local(): """Load local requirements file.""" logger.info("Loading requirements from local file.") with open(REQUIREMENTS_FILE, "r") as f: requirements = parse(f) for r in requirements: logger.debug("Creating new package: %r", r) create_package_version(r)
def get_requirement(self): from pkg_resources import RequirementParseError try: req = first(requirements.parse("{0}{1}".format(self.name, self.version))) except RequirementParseError: raise RequirementError( "Error parsing requirement: %s%s" % (self.name, self.version) ) return req
def from_pipfile(cls, name, pipfile): creation_args = {} if hasattr(pipfile, "keys"): creation_args = {k: v for k, v in pipfile.items() if k in cls.attr_fields()} creation_args["name"] = name version = get_version(pipfile) creation_args["version"] = version creation_args["req"] = first(requirements.parse("{0}{1}".format(name, version))) return cls(**creation_args)
def from_line(cls, line): hashes = None if "--hash=" in line: hashes = line.split(" --hash=") line, hashes = hashes[0], hashes[1:] editable = line.startswith("-e ") line = line.split(" ", 1)[1] if editable else line line, markers = split_markers_from_line(line) line, extras = _strip_extras(line) line = line.strip('"').strip("'").strip() line_with_prefix = "-e {0}".format(line) if editable else line vcs = None # Installable local files and installable non-vcs urls are handled # as files, generally speaking if is_installable_file(line) or (is_valid_url(line) and not is_vcs(line)): r = FileRequirement.from_line(line_with_prefix) elif is_vcs(line): r = VCSRequirement.from_line(line_with_prefix) vcs = r.vcs elif line == "." and not is_installable_file(line): raise RequirementError( "Error parsing requirement %s -- are you sure it is installable?" % line ) else: specs = "!=<>~" spec_matches = set(specs) & set(line) version = None name = line if spec_matches: spec_idx = min((line.index(match) for match in spec_matches)) name = line[:spec_idx] version = line[spec_idx:] if not extras: name, extras = _strip_extras(name) if version: name = "{0}{1}".format(name, version) r = NamedRequirement.from_line(line) if extras: extras = first( requirements.parse("fakepkg{0}".format(extras_to_string(extras))) ).extras r.req.extras = extras if markers: r.req.markers = markers args = { "name": r.name, "vcs": vcs, "req": r, "markers": markers, "editable": editable, } if extras: args["extras"] = extras if hashes: args["hashes"] = hashes return cls(**args)
def _get_installed_packages(self): """ Get a set of installed packages :return: Set of installed packages """ args = [ "pip", "freeze", ] installed = subprocess.check_output(args, universal_newlines=True) return set(requirements.parse(installed))
def parse_requirements(req_files): """Parse a list of requirement file and return a set of packages""" reqs = [] for req_file in req_files: with open(req_file) as f: for req in requirements.parse(f): reqs.append(req) return set(reqs)
def __init__(self, url): filename = url.split('/')[-1] urllib.urlretrieve(url, filename) self.reqs = {} with open(filename, 'r') as f: for req in requirements.parse(f): self.reqs[str(req.values()[1]).lower()] = req.values()[2] for key in self.reqs.keys(): self.reqs[key] = dict(self.reqs[key])
def __init__(self, url): filename = url.split("/")[-1] urllib.urlretrieve(url, filename) self.reqs = {} with open(filename, "r") as f: for req in requirements.parse(f): self.reqs[str(req.values()[1]).lower()] = req.values()[2] for key in self.reqs.keys(): self.reqs[key] = dict(self.reqs[key])
def test_modules(): """tests presence of required modules""" from askbot import REQUIREMENTS # flatten requirements into file-like string req_text = '\n'.join(REQUIREMENTS.values()) import requirements parsed_requirements = requirements.parse(req_text) for req in parsed_requirements: pip_path = unparse_requirement(req) mod_name = find_mod_name(req.name) try_import(mod_name, pip_path) test_specs(req)
def test_modules(): """tests presence of required modules""" from askbot import REQUIREMENTS #flatten requirements into file-like string req_text = '\n'.join(REQUIREMENTS.values()) import requirements parsed_requirements = requirements.parse(req_text) for req in parsed_requirements: pip_path = unparse_requirement(req) mod_name = find_mod_name(req.name) try_import(mod_name, pip_path) test_specs(req)
def test_build_requirements_file_valid_requirement(self, mock_gh_helper): self.experiment.requirements.add(self.requirement_one) self.experiment.requirements.add(self.requirement_two) self.experiment.requirements.add(self.requirement_three) requirements_list = [ self.requirement_one, self.requirement_two, self.requirement_three ] requirements_list_name = [x.package_name for x in requirements_list] language_helper = self.experiment.language_helper() requirements_txt = language_helper.build_requirements_file() for req in requirements.parse(requirements_txt): self.assertTrue(req.name in requirements_list_name)
def requirements_file_to_dependency_list( requirements_filename='requirements.txt'): fh = open(requirements_filename) dependency_libraries = [] for req in requirements.parse(fh): if req.editable: raise NameError('Editable packages not supported yet "%s"' % req.line) dependency_libraries.append({"pypi": {"package": req.name}}) fh.close() return dependency_libraries
def get_missing_packages(requirements_file: str = None, special_reqs: List[str] = []) -> List[Requirement]: needed = [Requirement.parse_line(sr) for sr in special_reqs] if requirements_file is not None: with open(requirements_file, 'r') as fd: needed += [r for r in requirements.parse(fd) if r.specifier] installed = get_installed_packages() needed = [ r for r in needed if pkg_resources.safe_name(r.name) not in installed ] return needed
def load_requirements(self, requirements): # create new environment file conda_env = dict() conda_env['channels'] = self.extra_channels reqs = [MarkerRequirement(next(parse(r))) for r in requirements['pip']] pip_requirements = [] while reqs: conda_env['dependencies'] = [ r.tostr().replace('==', '=') for r in reqs ] with self.temp_file("conda_env", yaml.dump(conda_env), suffix=".yml") as name: print('Conda: Trying to install requirements:\n{}'.format( conda_env['dependencies'])) result = self._run_command( ("env", "update", "-p", self.path, "--file", name)) # check if we need to remove specific packages bad_req = self._parse_conda_result_bad_packges(result) if not bad_req: break solved = False for bad_r in bad_req: name = bad_r.split('[')[0].split('=')[0] # look for name in requirements for r in reqs: if r.name.lower() == name.lower(): pip_requirements.append(r) reqs.remove(r) solved = True break # we couldn't remove even one package, # nothing we can do but try pip if not solved: pip_requirements.extend(reqs) break if pip_requirements: try: pip_req_str = [r.tostr() for r in pip_requirements] print( 'Conda: Installing requirements: step 2 - using pip:\n{}'. format(pip_req_str)) self.pip.load_requirements('\n'.join(pip_req_str)) except Exception as e: print(e) raise e self.requirements_manager.post_install() return True
def get_requirements_list(requirements_file_path, dev_deps=False): # TODO: refactor recognizing the dependency manager to a single place if os.path.basename(requirements_file_path) == 'Pipfile': with io.open(requirements_file_path, 'r', encoding='utf-8') as f: requirements_data = f.read() parsed_reqs = pipfile.parse(requirements_data) req_list = list(parsed_reqs.get('packages', [])) if dev_deps: req_list.extend(parsed_reqs.get('dev-packages', [])) if not req_list: return [] else: for r in req_list: r.provenance = (requirements_file_path, r.provenance[1], r.provenance[2]) elif os.path.basename(requirements_file_path) == 'setup.py': with open(requirements_file_path, 'r') as f: setup_py_file_content = f.read() requirements_data = setup_file.parse_requirements(setup_py_file_content) req_list = list(requirements.parse(requirements_data)) provenance = setup_file.get_provenance(setup_py_file_content) for req in req_list: req.provenance = ( os.path.basename(requirements_file_path), provenance, provenance ) else: # assume this is a requirements.txt formatted file # Note: requirements.txt files are unicode and can be in any encoding. with open(requirements_file_path, 'r') as f: req_list = list(requirements.parse(f)) req_list = filter(matches_environment, req_list) req_list = filter(is_testable, req_list) req_list = filter(matches_python_version, req_list) req_list = [r for r in req_list if r.name] for req in req_list: req.name = req.name.lower().replace('_', '-') return req_list
def get_requirement(self): base = "{0}".format(self.link) req = first(requirements.parse(base)) if self.editable: req.editable = True if self.link and self.link.scheme.startswith("file"): if self.path: req.path = self.path req.local_file = True self._uri_scheme = "file" req.uri = None req.link = self.link return req
def from_pipfile(cls, name, pipfile): creation_args = {} if hasattr(pipfile, "keys"): creation_args = { k: v for k, v in pipfile.items() if k in cls.attr_fields() } creation_args["name"] = name version = get_version(pipfile) creation_args["version"] = version creation_args["req"] = first( requirements.parse("{0}{1}".format(name, version))) return cls(**creation_args)
def filter_requirements(self): """ Filter requirements from mentioned no_deploy paramter """ with open(self.requirements_path, "r") as f: requirements = {r.name: r.line for r in req.parse(f)} for n in self.no_deploy: try: requirements.pop(n) except: pass return requirements
def get_req(): try: with open('requirements.txt', 'r') as f: req = list(requirements.parse(f)) except OSError: req = [] # req.name, req.specs, req.extras # Django [('>=', '1.11'), ('<', '1.12')] # six [('==', '1.10.0')] req = [x.name + ','.join([a + b for a, b in x.specs]) for x in req] return req
def parse_upper_constraints(sha): """ Parses openstack upstream upper-constraints file into name, constraints specs, and extra data. :param sha: The SHA of the openstack requirements used to fetch the upper constraints file :returns: A detailed requirement, each requirement being a tuple containing: - package 'name' (string) - package 'specs' (list of tuples) - package 'extras' (list) """ url = "https://raw.githubusercontent.com/openstack/requirements/{}/upper-constraints.txt".format( sha) response = requests.get(url) for req in pyrequirements.parse(response.text): yield req
def from_text(cls, name: str, specs: str) -> "Package": """Create package from text. Args: name: name of package specs: package constraints Returns: Package instance """ if "http" in specs: req = next(requirements.parse(specs)) return cls(**req.__dict__) if "-e" in specs: req = next(requirements.parse(specs)) return cls(name, req.specs, path=req.path) req_name = name if specs != "*": req_name = f"{name}{specs}" req = next(requirements.parse(req_name)) return cls(req.name, req.specs)
def get_requirement(self): prefix = "-e " if self.editable else "" line = "{0}{1}".format(prefix, self.link.url) req = first(requirements.parse(line)) if self.path and self.link and self.link.scheme.startswith("file"): req.local_file = True req.path = self.path req.uri = None self._uri_scheme = "file" if self.editable: req.editable = True req.link = self.link return req
def test_deps_consistency(): IGNORE = [ "flake8", "isort", "black", "mypy", "pydocstyle", "importlib_metadata", "tensorflow-cpu" ] # Collect the deps from all requirements.txt REQ_FILES = [ "requirements.txt", "requirements-pt.txt", "tests/requirements.txt", "docs/requirements.txt" ] folder = Path(__file__).parent.parent.parent.absolute() req_deps = {} for file in REQ_FILES: with open(folder.joinpath(file), "r") as f: _deps = [(req.name, req.specs) for req in requirements.parse(f)] for _dep in _deps: lib, specs = _dep assert req_deps.get(lib, specs) == specs, f"conflicting deps for {lib}" req_deps[lib] = specs # Collect the one from setup.py setup_deps = {} with open(folder.joinpath("setup.py"), "r") as f: setup = f.readlines() lines = setup[setup.index("_deps = [\n") + 1:] lines = [_dep.strip() for _dep in lines[:lines.index("]\n")]] lines = [_dep.split('"')[1] for _dep in lines if _dep.startswith('"')] _reqs = [Requirement.parse(_line) for _line in lines] _deps = [(req.name, req.specs) for req in _reqs] for _dep in _deps: lib, specs = _dep assert setup_deps.get(lib) is None, f"conflicting deps for {lib}" setup_deps[lib] = specs # Remove ignores for k in IGNORE: if isinstance(req_deps.get(k), list): del req_deps[k] if isinstance(setup_deps.get(k), list): del setup_deps[k] # Compare them assert len(req_deps) == len(setup_deps) for k, v in setup_deps.items(): assert isinstance(req_deps.get(k), list) assert req_deps[ k] == v, f"Mismatch on dependency {k}: {v} from setup.py, {req_deps[k]} from requirements.txt"
def get_version(name, reqfile=REQFILE): """ Given the name of the package and requirements file, determine the package version. """ requirements = parse(reqfile) for requirement in requirements: if name == requirement.name: version = Version(requirement.revision) return version return None
def parse_requirements(file_name): """ pip install requirements-parser fname='requirements.txt' """ import requirements from os.path import dirname, join, exists require_path = join(dirname(__file__), file_name) if exists(require_path): with open(require_path, 'r') as file: requires = list(requirements.parse(file)) return [r.name for r in requires] return []
def extract_tags_from_requirements_txt(repository, branch): """ Extracts tags from a requirements.txt file inside the repository on the given branch. :param repository: repository to look in :param branch: specific branch inside the repository :return: set of tags """ logger.debug("Extracting tags from requirements file on {}".format(branch)) tags = set() try: requirements_str = repository.get_file_content(branch, "requirements.txt") except IOError: logger.info( "Error: Can't get requirements file from {}".format(branch)) return tags try: parsed_requirements = requirements.parse(requirements_str) except Exception as err: logger.info( "Error {0}: Can't parse requirements file {1} in {2}".format( err.__class__.__name__, requirements_str, branch)) return tags for req in parsed_requirements: # check if this requirement is a normal specifier, i.e. no git url etc. try: if not req.specifier: continue req_name = req.name.lower() tags.add(req_name) for version_spec in req.specs: truncated_version = re.match(r"^(\d+\.\d+)", version_spec[1]) if truncated_version is None: continue truncated_version = truncated_version.group(0) if version_spec[0] in ["==", "===", "~="]: tags.add(req_name + "-" + truncated_version) except Exception as err: logger.info( "Error {}: Can't parse requirements file {} in {}: {}, {}, {}". format(err.__class__.__name__, requirements_str, branch, req.name, req.specs, req.extras)) return tags
def replace(self, requirements): # type: (Text) -> Text parsed_requirements = tuple( map( MarkerRequirement, filter( None, parse(requirements) if isinstance(requirements, six.text_type) else (next(parse(line), None) for line in requirements)))) if not parsed_requirements: # return the original requirements just in case return requirements def replace_one(i, req): # type: (int, MarkerRequirement) -> Optional[Text] try: return self._replace_one(req) except FatalSpecsResolutionError: warning( 'could not resolve python wheel replacement for {}'.format( req)) raise except Exception: warning('could not resolve python wheel replacement for {}, ' 'using original requirements line: {}'.format(req, i)) return None new_requirements = tuple( replace_one(i, req) for i, req in enumerate(parsed_requirements)) conda = is_conda(self.config) result = map( lambda x, y: (x if x is not None else y.tostr(markers=not conda)), new_requirements, parsed_requirements) if not conda: result = map(self.translator.translate, result) return join_lines(result)
def read(self): if self.lines is None: src = path(self.entity) if src.exists(): for req in requirements.parse(src.text()): if req.name is None: raise BuildError( 'Unable to determine package name for "{}"; ' 'did you forget "#egg=..."?'.format( req.line.strip())) self._layer_refs[safe_name(req.name)] = self.layer.url self.lines = (['# ' + self.layer.url] + src.lines(retain=False) + ['']) else: self.lines = []
def _get_requirements_from_files(self): """ Get a dictionary, keyed by filename, of requirements per file :return: Dictionary, keyed by filename, of requirements per file """ req_files = {} for req_filename in os.listdir(self.requirements_dir): with open(os.path.join(self.requirements_dir, req_filename)) as requirements_file: req_file = RequirementsFile() for line in requirements_file.readlines(): if line.strip().startswith('-r'): req_file.included_files.append(line) requirements_file.seek(0) req_file.packages = set(requirements.parse(requirements_file)) req_files[req_filename] = req_file return req_files
def load_requirements_file(filename): """Loads the specified requirements file and checks that the specs are OK by themselves """ print("Loading {0}".format(filename)) good_requirements = True with open(filename, 'r') as fd: reqs = sorted(list(requirements.parse(fd)), key=lambda x: x.name) for r in reqs: good_requirements = good_requirements and check_requirement_specs( r) return good_requirements, reqs
def get_packages_needing_upgrade(requirements_file: str) -> List[Requirement]: with open(requirements_file, 'r') as fd: needed = [r for r in requirements.parse(fd) if r.specifier] pkg_resource_map = {p.project_name: p for p in pkg_resources.working_set} needs_upgrade = [] for r in needed: safe_name = pkg_resources.safe_name(r.name) if safe_name in pkg_resource_map: existing = pkg_resource_map[safe_name] requirement = PkgRequirement.parse(r.line) if existing.version not in requirement.specifier: needs_upgrade.append(r) return needs_upgrade
def parse_requirements_alt(fname='requirements.txt'): """ pip install requirements-parser fname='requirements.txt' """ import requirements from os.path import dirname, join, exists require_fpath = join(dirname(__file__), fname) if exists(require_fpath): # Dont use until this handles platform specific dependencies with open(require_fpath, 'r') as file: requires = list(requirements.parse(file)) packages = [r.name for r in requires] return packages return []
def parse_requirements(fname): """Read a requirements file and return a dict of modules name / specification""" try: with open(fname, "r") as fd: try: # pylint: disable=no-member rdict = {req.name: req.specs for req in requirements.parse(fd)} except pkg_resources.RequirementParseError: print(f"Unable to parse the requirements file: {fname}") sys.exit(1) except Exception as e: print(f"Unable to open file {fname}: {str(e)}") sys.exit(1) return rdict
def convert_deps_from_pip(dep): """"Converts a pip-formatted dependency to a Pipfile-formatted one.""" dependency = {} import requirements req = [r for r in requirements.parse(dep)][0] # VCS Installs. if req.vcs: if req.name is None: raise ValueError( 'pipenv requires an #egg fragment for version controlled ' 'dependencies. Please install remote dependency ' 'in the form {0}#egg=<package-name>.'.format(req.uri)) # Crop off the git+, etc part. dependency[req.name] = {req.vcs: req.uri[len(req.vcs) + 1:]} # Add --editable, if it's there. if req.editable: dependency[req.name].update({'editable': True}) # Add the specifier, if it was provided. if req.revision: dependency[req.name].update({'ref': req.revision}) elif req.specs or req.extras: specs = None # Comparison operators: e.g. Django>1.10 if req.specs: r = multi_split(dep, '=<>') specs = dep[len(r[0]):] dependency[req.name] = specs # Extras: e.g. requests[socks] if req.extras: r = multi_split(dep, '[]') dependency[req.name] = {'extras': req.extras} if specs: dependency[req.name].update({'version': specs}) # Bare dependencies: e.g. requests else: dependency[dep] = '*' return dependency
def _test_req_file(self, req_file: str) -> None: fp = join(TestParser._requirements_files_dir, req_file) with open(fp) as req_fh: with warnings.catch_warnings(): warnings.simplefilter("ignore") parsed = parse(req_fh) if 'fail' in req_file: with self.assertRaises(ValueError): list([dict(r) for r in parsed]) else: with open(fp[:-4] + '.expected', 'r') as f2: self.assertIsInstance(parsed, GeneratorType) self.assertEqual(json.loads(f2.read()), listify(dict(r) for r in parsed), msg=f'Failed on {fp}')
def split_dependency(dep: str) -> (str, List): """Split a dependency in package and specs. A dependency is a string (a line in requirements.txt, for example. :param dep: dependency string :return: pair (package, list of specs) """ req = next(requirements.parse(dep)) package = req.name specs = [ s[0] + str(semantic_version.Version.coerce(s[1])) for s in req.specs ] spec = semantic_version.Spec(*specs) return (package, spec)
def _clean_requirements(raw): clean = [] for item in raw: try: item = requirements.parse(item).next().name except: pass bad_chars = [' ', '!', '<', ')', '='] if item is not None: if len(item) > 0: if not (any((c in bad_chars) for c in item)): clean.append(item) clean = [x for x in clean if not x.startswith('#')] clean = [x.strip(' ') for x in clean] return clean
def parse_pip_requirements(self, path): target_config = {'dependencies': {}} with open(path, 'rb') as f: requirements_list = f.read() for requirement in requirements.parse(requirements_list): soup_requirement_name = requirement.name if requirement.specs: soup_version = pysoup.utils.version_notation_pip_to_soup(''.join(requirement.specs[0])) else: soup_version = '*' target_config['dependencies'][soup_requirement_name] = soup_version return target_config
def dump_requirement_versions(): """Check the latest version of package and set to requirements.txt. This will be apply to packages without version only. """ req_files = ["base.txt", "development.txt", "production.txt"] try: import requirements except ImportError as ex: print("Please install 'requirements-parser' to make " "dump requirement versions feature run properly.") return # Parse requirements print("Dumping requirement versions, it might take a few minutes...") for req_file in req_files: replacements = {} file_path = os.path.join(PROJECT_DIRECTORY, "requirements", req_file) with open(file_path, "r") as file_handler: reqs = [req for req in requirements.parse(file_handler)] for req in reqs: #Check if requirement will be fetched from Pypi or not. if req.line == req.name: #Check latest version of this package in Pypi version = get_latest_version(req.line) if version: replacements[req.line] = "{0}=={1}\n".format( req.line, version ) if replacements: #Replace file content with open(file_path, "r") as file_handler: lines = file_handler.readlines() for idx, line in enumerate(lines): if line.strip() in replacements: lines[idx] = replacements[line.strip()] with open(file_path, "w") as file_handler: file_handler.write("".join(lines))
def check_fail(s): with warnings.catch_warnings(): warnings.simplefilter("ignore") list([dict(r) for r in parse(s)])
return entry else: return cves.getcpe(cpeid=entry) def is_number(s): try: ret = float(s) return ret except ValueError: return False if pyReq: with open(pyReq, 'r') as f: for req in requirements.parse(f): lib = req.name specs = req.specs # get vulnerable versions vulns = {} for item in db.cvesForCPE(lib): if 'vulnerable_configuration' in item: for entry in item['vulnerable_configuration']: vulns[vuln_config(entry)] = ["CVE: " + item['id'], "DATE: " + str(item['Published']), "CVSS: " + str(item['cvss']), item['summary']] #check if any of those is allowed according to specs found = False for vuln in vulns.keys(): sp = vuln.split(':') ind = -1 num = sp[ind]
def scan(self): result = {} with open(self.file, 'r') as f: for req in requirements.parse(f): result[req.name] = req.specs return result
def get_requirements_list(requirements_file): req_list = list(requirements.parse(requirements_file)) req_list = filter(matches_environment, req_list) req_list = filter(is_testable, req_list) required = [req.name.replace('_', '-') for req in req_list] return required
def check(s, expected): with warnings.catch_warnings(): warnings.simplefilter("ignore") assert_equal(listify(dict(r) for r in parse(s)), expected)
def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')]
def from_line(cls, line): req = first(requirements.parse(line)) specifiers = None if req.specifier: specifiers = specs_to_string(req.specs) return cls(name=req.name, version=specifiers, req=req)