def test_parser_class(): dep_file = parse("", file_type=filetypes.requirements_txt) assert isinstance(dep_file.parser, parser.RequirementsTXTParser) dep_file = parse("", path="req.txt") assert isinstance(dep_file.parser, parser.RequirementsTXTParser) dep_file = parse("", file_type=filetypes.tox_ini) assert isinstance(dep_file.parser, parser.ToxINIParser) dep_file = parse("", path="tox.ini") assert isinstance(dep_file.parser, parser.ToxINIParser) dep_file = parse("", file_type=filetypes.conda_yml) assert isinstance(dep_file.parser, parser.CondaYMLParser) dep_file = parse("", path="conda.yml") assert isinstance(dep_file.parser, parser.CondaYMLParser) dep_file = parse("", parser=parser.CondaYMLParser) assert isinstance(dep_file.parser, parser.CondaYMLParser) with pytest.raises(errors.UnknownDependencyFileError) as e: parse("")
def parse_dependencies(self, file_type): klass = self.get_requirement_class() result = parse( self.content, path=self.path, sha=self.sha, file_type=file_type, marker=( ("pyup: ignore file", "pyup:ignore file"), # file marker ("pyup: ignore", "pyup:ignore"), # line marker )) for dep in result.dependencies: req = klass( name=dep.name, specs=dep.specs, line=dep.line, lineno=dep.line_numbers[0] if dep.line_numbers else 0, extras=dep.extras, file_type=file_type, ) req.index_server = dep.index_server if self.is_pipfile: req.pipfile = self.path if req.package: req.hashes = dep.hashes self._requirements.append(req) self._other_files = result.resolved_files
def parse_with_dparse(location): is_dir = filetype.is_dir(location) if is_dir: return file_name = fileutils.file_name(location) dependency_type = get_dependency_type(file_name) if dependency_type not in (filetypes.requirements_txt, filetypes.conda_yml, filetypes.tox_ini, filetypes.pipfile, filetypes.pipfile_lock): return if py2: mode = 'rb' else: mode = 'r' with open(location, mode) as f: content = f.read() df = dparse.parse(content, file_type=dependency_type) df_dependencies = df.dependencies if not df_dependencies: return package_dependencies = [] for df_dependency in df_dependencies: specs = list(df_dependency.specs._specs) is_resolved = False requirement = None purl = PackageURL( type='pypi', name=df_dependency.name ).to_string() if specs: requirement = str(df_dependency.specs) for spec in specs: operator = spec.operator version = spec.version if any(operator == element for element in ('==', '===')): is_resolved = True purl = PackageURL( type='pypi', name=df_dependency.name, version=version ).to_string() package_dependencies.append( models.DependentPackage( purl=purl, scope='dependencies', is_runtime=True, is_optional=False, is_resolved=is_resolved, requirement=requirement ) ) return package_dependencies
def parse_with_dparse(location): is_dir = filetype.is_dir(location) if is_dir: return file_name = fileutils.file_name(location) if file_name not in (filetypes.requirements_txt, filetypes.conda_yml, filetypes.tox_ini, filetypes.pipfile, filetypes.pipfile_lock): return if py2: mode = 'rb' else: mode = 'r' with open(location, mode) as f: content = f.read() df = dparse.parse(content, file_type=file_name) df_dependencies = df.dependencies if not df_dependencies: return package_dependencies = [] for df_dependency in df_dependencies: specs = df_dependency.specs requirement = None if specs: requirement = str(specs) package_dependencies.append( models.DependentPackage( purl=PackageURL(type='pypi', name=df_dependency.name).to_string(), scope='dependencies', is_runtime=True, is_optional=False, requirement=requirement, )) return package_dependencies
def parse_with_dparse(location, dependency_type=None): """ Return a list of DependentPackage built from a dparse-supported dependency manifest such as requirements.txt, Conda manifest or Pipfile.lock files, or return an empty list. """ with open(location) as f: content = f.read() dep_file = dparse.parse(content, file_type=dependency_type) if not dep_file: return [] dependent_packages = [] for dependency in dep_file.dependencies: requirement = dependency.name is_resolved = False purl = PackageURL(type='pypi', name=dependency.name) # note: dparse.dependencies.Dependency.specs comes from # packaging.requirements.Requirement.specifier # which in turn is a packaging.specifiers.SpecifierSet objects # and a SpecifierSet._specs is a set of either: # packaging.specifiers.Specifier or packaging.specifiers.LegacySpecifier # and each of these have a .operator and .version property # a packaging.specifiers.SpecifierSet specifiers_set = dependency.specs # a list of packaging.specifiers.Specifier specifiers = specifiers_set._specs if specifiers: # SpecifierSet stringifies to comma-separated sorted Specifiers requirement = str(specifiers_set) # are we pinned e.g. resolved? if len(specifiers) == 1: specifier = list(specifiers)[0] if specifier.operator in ('==', '==='): is_resolved = True purl = purl._replace(version=specifier.version) dependent_packages.append( models.DependentPackage( purl=purl.to_string(), # are we always this scope? what if we have requirements-dev.txt? scope='install', is_runtime=True, is_optional=False, is_resolved=is_resolved, requirement=requirement ) ) return dependent_packages
def get_from_files(fhs: List[IO[str]]) -> List[Package]: """ Read packages from a list of file handles. The version specifier(s) for a package can be written in a number of ways: pkg pkg == 0.1 pkg > 1, <= 3 pkg == 1.0pre pkg == 1.0pre1.dev2 pkg > 1!2rc3.dev6 pkg ===abcd pkg != 3 pkg == 3.2.* pkg ~= 1.2 Also, the `packaging` module accepts bogus version specifiers such as: pkg >5, <4, ==9, ==1 For now, the lowest value of the operators ==, ~=, > and >= is used as the coordinate version for OSS Index. Upper bounds and negations are ignored. Furthermore, only the `final release` part of the version is used (see PEP 440). The reason is that OSS Index doesn't seem to handle other parts of a version specifier very well. For example, the coordinate `pkg:pypi/[email protected]` gives a number of vulnerabilities that `pkg:pypi/[email protected]` is missing. """ pkgs = [] # type: List[Package] for f in fhs: with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) dependencies = dparse.parse(f.read(), path=f.name).dependencies for dep in dependencies: versions = [] for spec in dep.specs: if spec.operator in ["==", "~=", ">=", ">"]: version = _Version(spec.version.replace("*", "0")) if version.is_prerelease: version -= 1 if spec.operator == ">": version += 1 versions.append(version) version = min(versions or [_Version("0")]) pkg = Package(dep.name, version.base_version) if pkg not in pkgs: pkgs.append(pkg) return pkgs
def test_dependency_file_serialize(): content = "django==1.2\nrequests==1.2.3" dep_file = parse(content=content, file_type=filetypes.requirements_txt, path="req.txt", sha="sha") serialized = dep_file.serialize() assert serialized["file_type"] == dep_file.file_type assert serialized["content"] == dep_file.content assert serialized["path"] == dep_file.path assert serialized["sha"] == dep_file.sha assert serialized["dependencies"][0]["name"] == "django" assert serialized["dependencies"][1]["name"] == "requests"
import sys import dparse import dlex import dinterp # ###Interactive Mode### ### in development interp = dinterp.DinoInterp(prog=None) vars = {'this': 'Dino Language'} while True: try: line = input("[Dino] ") if line == "<end>": break except EOFError: raise SystemExit if not line: continue line += "\n" prog = dparse.parse(line) interp.setVariables(vars) interp.comp(prog) vars = interp.getVariables interp.run() if not prog: continue # #########################
import dlex import dinterp # sys.path.insert(0, "Dino>>") # # if len(sys.argv) == 2: # with open(sys.argv[1]) as f: # data = "1 plus 1" # prog = dparse.parse(data) # if not prog: # raise SystemExit fhand = open("test.txt") data = fhand.read() prog = dparse.parse(data) interp = dinterp.DinoInterp(prog) # interp.readData() print() interp.run() # keys = list(prog) # if keys[0] > 0: # b.add_statements(prog) # else: # stat = prog[keys[0]] # if stat[0] == 'RUN': # try: # b.run()