def build_packages_from_gemfile_lock(gemfile_lock): """ Yield RubyGem Packages from a given GemfileLockParser `gemfile_lock` """ package_dependencies = [] for _, gem in gemfile_lock.all_gems.items(): package_dependencies.append( models.DependentPackage( purl=PackageURL(type='gem', name=gem.name, version=gem.version).to_string(), requirement=', '.join(gem.requirements), scope='dependencies', is_runtime=True, is_optional=False, is_resolved=True, )) yield RubyGem(dependencies=package_dependencies) for _, gem in gemfile_lock.all_gems.items(): deps = [] for _dep_name, dep in gem.dependencies.items(): deps.append( models.DependentPackage( purl=PackageURL(type='gem', name=dep.name, version=dep.version).to_string(), requirement=', '.join(dep.requirements), scope='dependencies', is_runtime=True, is_optional=False, is_resolved=True, )) yield RubyGem(name=gem.name, version=gem.version, dependencies=deps)
def recognize(cls, location): """ Yield one or more Package manifest objects given a file ``location`` pointing to a package archive, manifest or similar. """ data = cls.read_podfile_lock(location) pods = data['PODS'] pod_deps = [] for pod in pods: if isinstance(pod, dict): for main_pod, _dep_pods in pod.items(): podname, namespace, version = get_data_from_pods(main_pod) purl = PackageURL( type='pods', namespace=namespace, name=podname, version=version, ).to_string() pod_deps.append( models.DependentPackage( purl=purl, scope='requires-dev', requirement=version, is_runtime=False, is_optional=True, is_resolved=True, ) ) elif isinstance(pod, str): podname, namespace, version = get_data_from_pods(pod) purl = PackageURL( type='pods', namespace=namespace, name=podname, version=version, ).to_string() pod_deps.append( models.DependentPackage( purl=purl, scope='requires-dev', requirement=version, is_runtime=False, is_optional=True, is_resolved=True, ) ) yield cls( dependencies=pod_deps, declared_license=None, )
def build_xcode_package_from_lockfile(podfile_lock_data): """ Return a Package object from a data mapping obtained from a podfile.lock """ pods = podfile_lock_data['PODS'] pod_deps = [] for pod in pods: if isinstance(pod, dict): for main_pod, _dep_pods in pod.items(): podname, namespace, version = get_data_from_pods(main_pod) purl = PackageURL( type='pods', namespace=namespace, name=podname, version=version, ).to_string() pod_deps.append( models.DependentPackage( purl=purl, scope='requires-dev', requirement=version, is_runtime=False, is_optional=True, is_resolved=True, )) elif isinstance(pod, str): podname, namespace, version = get_data_from_pods(pod) purl = PackageURL( type='pods', namespace=namespace, name=podname, version=version, ).to_string() pod_deps.append( models.DependentPackage( purl=purl, scope='requires-dev', requirement=version, is_runtime=False, is_optional=True, is_resolved=True, )) yield CocoapodsPackage( dependencies=pod_deps, declared_license=None, )
def parse(cls, location): gomods = go_mod.parse_gomod(location) dependencies = [] require = gomods.require or [] for gomod in require: dependencies.append( models.DependentPackage( purl=gomod.purl(include_version=True), extracted_requirement=gomod.version, scope='require', is_runtime=True, is_optional=False, is_resolved=False, )) exclude = gomods.exclude or [] for gomod in exclude: dependencies.append( models.DependentPackage( purl=gomod.purl(include_version=True), extracted_requirement=gomod.version, scope='exclude', is_runtime=True, is_optional=False, is_resolved=False, )) name = gomods.name namespace = gomods.namespace homepage_url = f'https://pkg.go.dev/{gomods.namespace}/{gomods.name}' vcs_url = f'https://{gomods.namespace}/{gomods.name}.git' repository_homepage_url = None if namespace and name: repository_homepage_url = f'https://pkg.go.dev/{namespace}/{name}' yield models.PackageData( datasource_id=cls.datasource_id, type=cls.default_package_type, name=name, namespace=namespace, vcs_url=vcs_url, homepage_url=homepage_url, repository_homepage_url=repository_homepage_url, dependencies=dependencies, primary_language=cls.default_primary_language, )
def parse(cls, location): """ Yield PackageData from a YAML Podfile.lock. """ with open(location) as pfl: data = saneyaml.load(pfl) pods = data['PODS'] dependencies = [] for pod in pods: if isinstance(pod, dict): for main_pod, _dep_pods in pod.items(): purl, xreq = parse_dep_requirements(main_pod) dependencies.append( models.DependentPackage( purl=str(purl), # FIXME: why dev? scope='requires', extracted_requirement=xreq, is_runtime=False, is_optional=True, is_resolved=True, )) elif isinstance(pod, str): purl, xreq = parse_dep_requirements(pod) dependencies.append( models.DependentPackage( purl=str(purl), # FIXME: why dev? scope='requires', extracted_requirement=xreq, is_runtime=False, is_optional=True, is_resolved=True, )) yield models.PackageData( datasource_id=cls.datasource_id, type=cls.default_package_type, primary_language=cls.default_primary_language, dependencies=dependencies, )
def parse(cls, location): gemfile_lock = GemfileLockParser(location) dependencies = [] for _, gem in gemfile_lock.all_gems.items(): dependencies.append( models.DependentPackage( purl=PackageURL(type='gem', name=gem.name, version=gem.version).to_string(), extracted_requirement=', '.join(gem.requirements), # FIXME: get proper scope... This does not seem right scope='dependencies', is_runtime=True, is_optional=False, is_resolved=True, )) yield models.PackageData( datasource_id=cls.datasource_id, type=cls.default_package_type, dependencies=dependencies, primary_language=cls.default_primary_language, ) for _, gem in gemfile_lock.all_gems.items(): deps = [] for _dep_name, dep in gem.dependencies.items(): deps.append( models.DependentPackage( purl=PackageURL(type='gem', name=dep.name, version=dep.version).to_string(), extracted_requirement=', '.join(dep.requirements), scope='dependencies', is_runtime=True, is_optional=False, is_resolved=True, )) urls = get_urls(gem.name, gem.version) yield models.PackageData( datasource_id=cls.datasource_id, primary_language=cls.default_primary_language, type=cls.default_package_type, name=gem.name, version=gem.version, dependencies=deps, **urls)
def get_dependent_packages(lines, location, package_type): """ Yield DependentPackage from the file at ``location`` for the given ``package_type``. """ flags_by_scope = { 'runtime': dict(is_runtime=True, is_optional=False), 'dependency': dict(is_runtime=True, is_optional=False), 'production': dict(is_runtime=True, is_optional=False), 'development': dict(is_runtime=False, is_optional=True), 'test': dict(is_runtime=False, is_optional=True), 'metrics': dict(is_runtime=False, is_optional=True), } dependencies = LinesBasedGemfileParser(lines=lines, filepath=location).parse() for key in dependencies: depends = dependencies.get(key, []) or [] for dep in depends: flags = flags_by_scope.get(key, 'runtime') yield models.DependentPackage( purl=PackageURL(type=package_type, name=dep.name).to_string(), extracted_requirement=', '.join(dep.requirement), scope=key, is_resolved=False, **flags, )
def parse_with_dparse(location): is_dir = filetype.is_dir(location) if is_dir: return file_name = fileutils.file_name(location) if file_name not in (filetypes.requirements_txt, filetypes.conda_yml, filetypes.tox_ini, filetypes.pipfile, filetypes.pipfile_lock): return if py2: mode = 'rb' else: mode = 'r' with open(location, mode) as f: content = f.read() df = dparse.parse(content, file_type=file_name) df_dependencies = df.dependencies if not df_dependencies: return package_dependencies = [] for df_dependency in df_dependencies: specs = df_dependency.specs requirement = None if specs: requirement = str(specs) package_dependencies.append( models.DependentPackage( purl=PackageURL(type='pypi', name=df_dependency.name).to_string(), scope='dependencies', is_runtime=True, is_optional=False, requirement=requirement, )) return package_dependencies
def dependency_mapper(dependencies, scope='dependencies'): """ Yield DependentPackage collected from a list of cargo dependencies """ is_runtime = not scope.endswith(('dev-dependencies', 'build-dependencies')) for name, requirement in dependencies.items(): if isinstance(requirement, str): # plain version requirement is_optional = False elif isinstance(requirement, dict): # complex requirement, with more than version are harder to handle # so we just dump is_optional = requirement.pop('optional', False) requirement = saneyaml.dump(requirement) yield models.DependentPackage( purl=PackageURL( type='cargo', name=name, ).to_string(), extracted_requirement=requirement, scope=scope, is_runtime=is_runtime, is_optional=is_optional, is_resolved=False, )
def parse_with_dparse(location): is_dir = filetype.is_dir(location) if is_dir: return file_name = fileutils.file_name(location) dependency_type = get_dependency_type(file_name) if dependency_type not in (filetypes.requirements_txt, filetypes.conda_yml, filetypes.tox_ini, filetypes.pipfile, filetypes.pipfile_lock): return if py2: mode = 'rb' else: mode = 'r' with open(location, mode) as f: content = f.read() df = dparse.parse(content, file_type=dependency_type) df_dependencies = df.dependencies if not df_dependencies: return package_dependencies = [] for df_dependency in df_dependencies: specs = list(df_dependency.specs._specs) is_resolved = False requirement = None purl = PackageURL( type='pypi', name=df_dependency.name ).to_string() if specs: requirement = str(df_dependency.specs) for spec in specs: operator = spec.operator version = spec.version if any(operator == element for element in ('==', '===')): is_resolved = True purl = PackageURL( type='pypi', name=df_dependency.name, version=version ).to_string() package_dependencies.append( models.DependentPackage( purl=purl, scope='dependencies', is_runtime=True, is_optional=False, is_resolved=is_resolved, requirement=requirement ) ) return package_dependencies
def parse(cls, location): godeps = Godep(location) if godeps.import_path: # we create a purl from the import path to parse ns/name nicely purl = PackageURL.from_string(f'pkg:golang/{godeps.import_path}') namespace = purl.namespace name = purl.name else: namespace = None name = None dependencies = [] deps = godeps.dependencies or [] for dep in deps: dependencies.append( models.DependentPackage( purl=str( PackageURL.from_string( f'pkg:golang/{dep.import_path}')), extracted_requirement=dep.revision, scope='Deps', is_runtime=True, is_optional=False, is_resolved=False, )) yield models.PackageData( datasource_id=cls.datasource_id, type=cls.default_package_type, namespace=namespace, name=name, primary_language=cls.default_primary_language, dependencies=dependencies, )
def build_package(cls, dependencies): package_dependencies = [] for dependency in dependencies: # Ignore collected dependencies that do not have a name name = dependency.get('name', '') if not name: continue namespace = dependency.get('namespace', '') version = dependency.get('version', '') scope = dependency.get('scope', '') is_runtime = True is_optional = False if 'test' in scope.lower(): is_runtime = False is_optional = True package_dependencies.append( models.DependentPackage( purl=PackageURL(type=cls.default_package_type, namespace=namespace, name=name, version=version).to_string(), scope=scope, extracted_requirement=version, is_runtime=is_runtime, is_optional=is_optional, )) yield models.PackageData( datasource_id=cls.datasource_id, type=cls.default_package_type, primary_language=BuildGradleHandler.default_primary_language, dependencies=package_dependencies, )
def build_dep(name, version, scope, is_runtime=True, is_optional=False): """ Return DependentPackage from the provided data. """ # TODO: these can be more complex for SDKs # https://dart.dev/tools/pub/dependencies#dependency-sources if isinstance(version, dict) and 'sdk' in version: # {'sdk': 'flutter'} type of deps.... # which is a wart that we keep as a requiremnet version = ', '.join(': '.join([k, str(v)]) for k, v in version.items()) if version.replace('.', '').isdigit(): # version is pinned exactly if it is only made of dots and digits purl = PackageURL(type='pubspec', name=name, version=version) is_resolved = True else: purl = PackageURL(type='pubspec', name=name) is_resolved = False dep = models.DependentPackage( purl=purl.to_string(), requirement=version, scope=scope, is_runtime=is_runtime, is_optional=is_optional, is_resolved=is_resolved, ) return dep
def build_dep_package(package, scope, is_runtime, is_optional): return models.DependentPackage( purl=package.purl, scope=scope, is_runtime=is_runtime, is_optional=is_optional, is_resolved=True, )
def deps_mapper(deps, package, field_name): """ Handle deps such as dependencies, devDependencies, peerDependencies, optionalDependencies return a tuple of (dep type, list of deps) https://docs.npmjs.com/files/package.json#dependencies https://docs.npmjs.com/files/package.json#peerdependencies https://docs.npmjs.com/files/package.json#devdependencies https://docs.npmjs.com/files/package.json#optionaldependencies """ npm_dependency_scopes_attributes = { 'dependencies': dict(is_runtime=True, is_optional=False), 'devDependencies': dict(is_runtime=False, is_optional=True), 'peerDependencies': dict(is_runtime=True, is_optional=False), 'optionalDependencies': dict(is_runtime=True, is_optional=True), } dependencies = package.dependencies deps_by_name = {} if field_name == 'optionalDependencies': # optionalDependencies override the dependencies with the same name # so we build a map of name->dep object for use later for d in dependencies: if d.scope != 'dependencies': continue purl = PackageURL.from_string(d.purl) npm_name = purl.name if purl.namespace: npm_name = '/'.join([purl.namespace, purl.name]) deps_by_name[npm_name] = d for fqname, requirement in deps.items(): ns, name = split_scoped_package_name(fqname) if not name: continue purl = PackageURL(type='npm', namespace=ns, name=name).to_string() # optionalDependencies override the dependencies with the same name # https://docs.npmjs.com/files/package.json#optionaldependencies # therefore we update/override the dependency of the same name overridable = deps_by_name.get(fqname) if overridable and field_name == 'optionalDependencies': overridable.purl = purl overridable.is_optional = True overridable.scope = field_name else: dependency_attributes = npm_dependency_scopes_attributes.get(field_name, dict()) dep = models.DependentPackage( purl=purl, scope=field_name, extracted_requirement=requirement, **dependency_attributes ) dependencies.append(dep) return package
def parse_with_dparse(location, dependency_type=None): """ Return a list of DependentPackage built from a dparse-supported dependency manifest such as requirements.txt, Conda manifest or Pipfile.lock files, or return an empty list. """ with open(location) as f: content = f.read() dep_file = dparse.parse(content, file_type=dependency_type) if not dep_file: return [] dependent_packages = [] for dependency in dep_file.dependencies: requirement = dependency.name is_resolved = False purl = PackageURL(type='pypi', name=dependency.name) # note: dparse.dependencies.Dependency.specs comes from # packaging.requirements.Requirement.specifier # which in turn is a packaging.specifiers.SpecifierSet objects # and a SpecifierSet._specs is a set of either: # packaging.specifiers.Specifier or packaging.specifiers.LegacySpecifier # and each of these have a .operator and .version property # a packaging.specifiers.SpecifierSet specifiers_set = dependency.specs # a list of packaging.specifiers.Specifier specifiers = specifiers_set._specs if specifiers: # SpecifierSet stringifies to comma-separated sorted Specifiers requirement = str(specifiers_set) # are we pinned e.g. resolved? if len(specifiers) == 1: specifier = list(specifiers)[0] if specifier.operator in ('==', '==='): is_resolved = True purl = purl._replace(version=specifier.version) dependent_packages.append( models.DependentPackage( purl=purl.to_string(), # are we always this scope? what if we have requirements-dev.txt? scope='install', is_runtime=True, is_optional=False, is_resolved=is_resolved, requirement=requirement ) ) return dependent_packages
def build_package(package_data): """ Return a Package object from a package_data mapping (from a metadata.json or similar) or None. """ name = package_data.get('name') version = package_data.get('version') if not name or not version: # a metadata.json without name and version is not a usable chef package # FIXME: raise error? return maintainer_name = package_data.get('maintainer', '') maintainer_email = package_data.get('maintainer_email', '') parties = [] if maintainer_name or maintainer_email: parties.append( models.Party( name=maintainer_name or None, role='maintainer', email=maintainer_email or None, )) description = package_data.get('description', '') or package_data.get( 'long_description', '') lic = package_data.get('license', '') code_view_url = package_data.get('source_url', '') bug_tracking_url = package_data.get('issues_url', '') dependencies = package_data.get('dependencies', {}) or package_data.get( 'depends', {}) package_dependencies = [] for dependency_name, requirement in dependencies.items(): package_dependencies.append( models.DependentPackage( purl=PackageURL(type='chef', name=dependency_name).to_string(), scope='dependencies', requirement=requirement, is_runtime=True, is_optional=False, )) return ChefPackage( name=name, version=version, parties=parties, description=description.strip() or None, declared_license=lic.strip() or None, code_view_url=code_view_url.strip() or None, bug_tracking_url=bug_tracking_url.strip() or None, download_url=chef_download_url(name, version).strip(), dependencies=package_dependencies, )
def recognize(cls, location): """ Yield one or more Package manifest objects given a file ``location`` pointing to a package archive, manifest or similar. { "name": "haxelib", "url" : "https://lib.haxe.org/documentation/", "license": "GPL", "tags": ["haxelib", "core"], "description": "The haxelib client", "classPath": "src", "version": "3.4.0", "releasenote": " * Fix password input issue in Windows (#421).\n * ....", "contributors": ["back2dos", "ncannasse", "jason", "Simn", "nadako", "andyli"] } """ with io.open(location, encoding='utf-8') as loc: package_data = json.load(loc) package = cls( name=package_data.get('name'), version=package_data.get('version'), homepage_url=package_data.get('url'), declared_license=package_data.get('license'), keywords=package_data.get('tags'), description=package_data.get('description'), ) package.download_url = package.repository_download_url() for contrib in package_data.get('contributors', []): party = models.Party( type=models.party_person, name=contrib, role='contributor', url='https://lib.haxe.org/u/{}'.format(contrib)) package.parties.append(party) for dep_name, dep_version in package_data.get('dependencies', {}).items(): dep_version = dep_version and dep_version.strip() is_resolved = bool(dep_version) dep_purl = PackageURL(type='haxe', name=dep_name, version=dep_version).to_string() dep = models.DependentPackage( purl=dep_purl, is_resolved=is_resolved, ) package.dependencies.append(dep) yield package
def build_gomod_package(gomods): """ Return a Package object from a go.mod file or None. """ package_dependencies = [] require = gomods.require or [] for gomod in require: package_dependencies.append( models.DependentPackage( purl=gomod.purl(include_version=False), requirement=gomod.version, scope='require', is_runtime=True, is_optional=False, is_resolved=False, )) exclude = gomods.exclude or [] for gomod in exclude: package_dependencies.append( models.DependentPackage( purl=gomod.purl(include_version=False), requirement=gomod.version, scope='exclude', is_runtime=True, is_optional=False, is_resolved=False, )) name = gomods.name namespace = gomods.namespace homepage_url = 'https://pkg.go.dev/{}/{}'.format(gomods.namespace, gomods.name) vcs_url = 'https://{}/{}.git'.format(gomods.namespace, gomods.name) return GolangPackage(name=name, namespace=namespace, vcs_url=vcs_url, homepage_url=homepage_url, dependencies=package_dependencies)
def build_packages_from_gemspec(location): """ Return RubyGem Package from gemspec file. """ gemspec_object = Spec() gemspec_data = gemspec_object.parse_spec(location) name = gemspec_data.get('name') version = gemspec_data.get('version') homepage_url = gemspec_data.get('homepage_url') summary = gemspec_data.get('summary') description = gemspec_data.get('description') if len(summary) > len(description): description = summary declared_license = gemspec_data.get('license') if declared_license: declared_license = declared_license.split(',') author = gemspec_data.get('author') or [] email = gemspec_data.get('email') or [] parties = list(party_mapper(author, email)) package = RubyGem( name=name, version=version, parties=parties, homepage_url=homepage_url, description=description, declared_license=declared_license ) dependencies = gemspec_data.get('dependencies', {}) or {} package_dependencies = [] for name, version in dependencies.items(): package_dependencies.append( models.DependentPackage( purl=PackageURL( type='gem', name=name ).to_string(), requirement=', '.join(version), scope='dependencies', is_runtime=True, is_optional=False, is_resolved=False, ) ) package.dependencies = package_dependencies return package
def recognize(cls, location): """ Yield one or more Package manifest objects given a file ``location`` pointing to a package archive, manifest or similar. """ gemfile_lock = GemfileLockParser(location) package_dependencies = [] for _, gem in gemfile_lock.all_gems.items(): package_dependencies.append( models.DependentPackage( purl=PackageURL(type='gem', name=gem.name, version=gem.version).to_string(), requirement=', '.join(gem.requirements), scope='dependencies', is_runtime=True, is_optional=False, is_resolved=True, )) yield cls(dependencies=package_dependencies) for _, gem in gemfile_lock.all_gems.items(): deps = [] for _dep_name, dep in gem.dependencies.items(): deps.append( models.DependentPackage( purl=PackageURL(type='gem', name=dep.name, version=dep.version).to_string(), requirement=', '.join(dep.requirements), scope='dependencies', is_runtime=True, is_optional=False, is_resolved=True, )) yield cls(name=gem.name, version=gem.version, dependencies=deps)
def get_dependencies(pom): """ Return a list of Dependent package objects found in a MavenPom `pom` object. """ dependencies = [] for scope, deps in pom.dependencies.items(): if TRACE: logger.debug('parse: dependencies.deps: {}'.format(deps)) if scope: scope = scope.strip().lower() if not scope: # maven default scope = 'compile' for (dgroup_id, dartifact_id, dversion), drequired in deps: if TRACE: logger.debug('parse: dependencies.deps: {}, {}, {}, {}'. format(dgroup_id, dartifact_id, dversion, drequired)) # pymaven whart if dversion == 'latest.release': dversion = None dqualifiers = {} # FIXME: this is missing from the original Pom parser # classifier = dep.get('classifier') # if classifier: # qualifiers['classifier'] = classifier # # packaging = dep.get('type') # if packaging and packaging != 'jar': # qualifiers['packaging'] = packaging dep_id = models.PackageURL( type='maven', namespace=dgroup_id, name=dartifact_id, qualifiers=dqualifiers or None) # TODO: handle dependency management and pom type is_runtime = scope in ('runtime', 'compile', 'system', 'provided') is_optional = bool(scope in ('test',) or not drequired) if scope not in (('runtime', 'compile', 'system', 'provided', 'test')): is_runtime = True dep_pack = models.DependentPackage( purl=str(dep_id), requirement=dversion, scope=scope, is_runtime=is_runtime, is_optional=is_optional, is_resolved=False) dependencies.append(dep_pack) return dependencies
def build_package(package_data): """ Return a Package object from a package_data mapping (from a haxelib.json or similar) or None. { "name": "haxelib", "url" : "https://lib.haxe.org/documentation/", "license": "GPL", "tags": ["haxelib", "core"], "description": "The haxelib client", "classPath": "src", "version": "3.4.0", "releasenote": " * Fix password input issue in Windows (#421).\n * ....", "contributors": ["back2dos", "ncannasse", "jason", "Simn", "nadako", "andyli"] } """ package = HaxePackage( name=package_data.get('name'), version=package_data.get('version'), homepage_url=package_data.get('url'), declared_license=package_data.get('license'), keywords=package_data.get('tags'), description=package_data.get('description'), ) package.download_url = package.repository_download_url() for contrib in package_data.get('contributors', []): party = models.Party( type=models.party_person, name=contrib, role='contributor', url='https://lib.haxe.org/u/{}'.format(contrib)) package.parties.append(party) for dep_name, dep_version in package_data.get('dependencies', {}).items(): dep_version = dep_version and dep_version.strip() is_resolved = bool(dep_version) dep_purl = PackageURL( type='haxe', name=dep_name, version=dep_version ).to_string() dep = models.DependentPackage(purl=dep_purl, is_resolved=is_resolved,) package.dependencies.append(dep) return package
def parse(cls, location): metayaml = get_meta_yaml_data(location) package_element = metayaml.get('package') or {} name = package_element.get('name') if not name: return version = package_element.get('version') package = models.PackageData( datasource_id=cls.datasource_id, type=cls.default_package_type, name=name, version=version, ) # FIXME: source is source, not download source = metayaml.get('source') or {} package.download_url = source.get('url') package.sha256 = source.get('sha256') about = metayaml.get('about') or {} package.homepage_url = about.get('home') package.declared_license = about.get('license') if package.declared_license: package.license_expression = cls.compute_normalized_license( package) package.description = about.get('summary') package.vcs_url = about.get('dev_url') requirements = metayaml.get('requirements') or {} for scope, reqs in requirements.items(): # requirements format is like: # (u'run', [u'mccortex ==1.0', u'nextflow ==19.01.0', u'cortexpy # ==0.45.7', u'kallisto ==0.44.0', u'bwa', u'pandas', # u'progressbar2', u'python >=3.6'])]) for req in reqs: name, _, requirement = req.partition(" ") purl = PackageURL(type=cls.default_package_type, name=name) package.dependencies.append( models.DependentPackage( purl=purl.to_string(), extracted_requirement=requirement, scope=scope, is_runtime=True, is_optional=False, )) yield package
def get_requires_dependencies(requires, default_scope='install'): """ Return a list of DependentPackage found in a ``requires`` list of requirement strings or an empty list. """ if not is_simple_requires(requires): return [] dependent_packages = [] for req in (requires or []): req = Requirement(req) name = canonicalize_name(req.name) is_resolved = False purl = PackageURL(type='pypi', name=name) # note: packaging.requirements.Requirement.specifier is a # packaging.specifiers.SpecifierSet object and a SpecifierSet._specs is # a set of either: packaging.specifiers.Specifier or # packaging.specifiers.LegacySpecifier and each of these have a # .operator and .version property # a packaging.specifiers.SpecifierSet specifiers_set = req.specifier # a list of packaging.specifiers.Specifier specifiers = specifiers_set._specs requirement = None if specifiers: # SpecifierSet stringifies to comma-separated sorted Specifiers requirement = str(specifiers_set) # are we pinned e.g. resolved? this is true if we have a single # equality specifier if len(specifiers) == 1: specifier = list(specifiers)[0] if specifier.operator in ('==', '==='): is_resolved = True purl = purl._replace(version=specifier.version) # we use the extra as scope if avialble scope = get_extra(req.marker) or default_scope dependent_packages.append( models.DependentPackage( purl=purl.to_string(), scope=scope, is_runtime=True, is_optional=False, is_resolved=is_resolved, requirement=requirement, )) return dependent_packages
def recognize(cls, location): """ Yield one or more Package manifest objects given a file ``location`` pointing to a package archive, manifest or similar. """ gemspec_object = Spec() gemspec_data = gemspec_object.parse_spec(location) name = gemspec_data.get('name') version = gemspec_data.get('version') homepage_url = gemspec_data.get('homepage_url') summary = gemspec_data.get('summary') description = gemspec_data.get('description') if len(summary) > len(description): description = summary declared_license = gemspec_data.get('license') if declared_license: declared_license = declared_license.split(',') author = gemspec_data.get('author') or [] email = gemspec_data.get('email') or [] parties = list(party_mapper(author, email)) package_manifest = cls(name=name, version=version, parties=parties, homepage_url=homepage_url, description=description, declared_license=declared_license) dependencies = gemspec_data.get('dependencies', {}) or {} package_dependencies = [] for name, version in dependencies.items(): package_dependencies.append( models.DependentPackage( purl=PackageURL(type='gem', name=name).to_string(), requirement=', '.join(version), scope='dependencies', is_runtime=True, is_optional=False, is_resolved=False, )) package_manifest.dependencies = package_dependencies yield package_manifest
def _deps_mapper(deps, package, scope, is_runtime=False, is_optional=False): """ Handle deps such as dependencies, devDependencies return a tuple of (dep type, list of deps) https://getcomposer.org/doc/04-schema.md#package-links """ for ns_name, requirement in deps.items(): ns, _, name = ns_name.rpartition('/') purl = models.PackageURL(type='composer', namespace=ns, name=name).to_string() dep = models.DependentPackage( purl=purl, extracted_requirement=requirement, scope=scope, is_runtime=is_runtime, is_optional=is_optional) package.dependencies.append(dep) return package
def build_gosum_package(gosums): """ Return a Package object from a go.sum file. """ package_dependencies = [] for gosum in gosums: package_dependencies.append( models.DependentPackage( purl=gosum.purl(), requirement=gosum.version, scope='dependency', is_runtime=True, is_optional=False, is_resolved=True, )) return GolangPackage(dependencies=package_dependencies)
def parse(cls, location): gosums = go_mod.parse_gosum(location) package_dependencies = [] for gosum in gosums: package_dependencies.append( models.DependentPackage( purl=gosum.purl(), extracted_requirement=gosum.version, scope='dependency', is_runtime=True, is_optional=False, is_resolved=True, )) yield models.PackageData( datasource_id=cls.datasource_id, type=cls.default_package_type, dependencies=package_dependencies, primary_language=cls.default_primary_language, )
def bundle_deps_mapper(bundle_deps, package): """ https://docs.npmjs.com/files/package.json#bundleddependencies "This defines an array of package names that will be bundled when publishing the package." """ for bdep in (bundle_deps or []): bdep = bdep and bdep.strip() if not bdep: continue ns, name = split_scoped_package_name(bdep) purl = models.PackageURL(type='npm', namespace=ns, name=name) dep = models.DependentPackage(purl=purl.to_string(), scope='bundledDependencies', is_runtime=True, ) package.dependencies.append(dep) return package