Esempio n. 1
0
 def test_parse_depends_multiarch(self):
     # strip multiarch
     deps = apt_pkg.parse_depends("po4a:native", True)
     self.assertEqual(deps[0][0][0], "po4a")
     # do not strip multiarch
     deps = apt_pkg.parse_depends("po4a:native", False)
     self.assertEqual(deps[0][0][0], "po4a:native")
Esempio n. 2
0
 def test_parse_depends_multiarch(self):
     # strip multiarch
     deps = apt_pkg.parse_depends("po4a:native", True)
     self.assertEqual(deps[0][0][0], "po4a")
     # do not strip multiarch
     deps = apt_pkg.parse_depends("po4a:native", False)
     self.assertEqual(deps[0][0][0], "po4a:native")
Esempio n. 3
0
def check_override_compliance(package, priority, archive_path, suite_name, cnf, session):
    print "Checking compliance with related overrides..."

    depends = set()
    rdepends = set()
    components = get_component_names(session)
    arches = set([x.arch_string for x in get_suite_architectures(suite_name)])
    arches -= set(["source", "all"])
    for arch in arches:
        for component in components:
            Packages = utils.get_packages_from_ftp(archive_path, suite_name, component, arch)
            while Packages.step():
                package_name = Packages.section.find("Package")
                dep_list = Packages.section.find("Depends")
                if dep_list:
                    if package_name == package:
                        for d in apt_pkg.parse_depends(dep_list):
                            for i in d:
                                depends.add(i[0])
                    else:
                        for d in apt_pkg.parse_depends(dep_list):
                            for i in d:
                                if i[0] == package:
                                    rdepends.add(package_name)

    query = """SELECT o.package, p.level, p.priority
               FROM override o
               JOIN suite s ON s.id = o.suite
               JOIN priority p ON p.id = o.priority
               WHERE s.suite_name = '%s'
               AND o.package in ('%s')""" % (
        suite_name,
        "', '".join(depends.union(rdepends)),
    )
    packages = session.execute(query)

    excuses = []
    for p in packages:
        if p[0] == package or not p[1]:
            continue
        if p[0] in depends:
            if priority.level < p[1]:
                excuses.append(
                    "%s would have priority %s, its dependency %s has priority %s"
                    % (package, priority.priority, p[0], p[2])
                )
        if p[0] in rdepends:
            if priority.level > p[1]:
                excuses.append(
                    "%s would have priority %s, its reverse dependency %s has priority %s"
                    % (package, priority.priority, p[0], p[2])
                )

    if excuses:
        for ex in excuses:
            print ex
    else:
        print "Proposed override change complies with Debian Policy"
Esempio n. 4
0
def check_override_compliance(package, priority, archive_path, suite_name, cnf,
                              session):
    print "Checking compliance with related overrides..."

    depends = set()
    rdepends = set()
    components = get_component_names(session)
    arches = set([x.arch_string for x in get_suite_architectures(suite_name)])
    arches -= set(["source", "all"])
    for arch in arches:
        for component in components:
            Packages = utils.get_packages_from_ftp(archive_path, suite_name,
                                                   component, arch)
            while Packages.step():
                package_name = Packages.section.find("Package")
                dep_list = Packages.section.find("Depends")
                if dep_list:
                    if package_name == package:
                        for d in apt_pkg.parse_depends(dep_list):
                            for i in d:
                                depends.add(i[0])
                    else:
                        for d in apt_pkg.parse_depends(dep_list):
                            for i in d:
                                if i[0] == package:
                                    rdepends.add(package_name)

    query = """SELECT o.package, p.level, p.priority
               FROM override o
               JOIN suite s ON s.id = o.suite
               JOIN priority p ON p.id = o.priority
               WHERE s.suite_name = '%s'
               AND o.package in ('%s')""" \
               % (suite_name, "', '".join(depends.union(rdepends)))
    packages = session.execute(query)

    excuses = []
    for p in packages:
        if p[0] == package or not p[1]:
            continue
        if p[0] in depends:
            if priority.level < p[1]:
                excuses.append("%s would have priority %s, its dependency %s has priority %s" \
                      % (package, priority.priority, p[0], p[2]))
        if p[0] in rdepends:
            if priority.level > p[1]:
                excuses.append("%s would have priority %s, its reverse dependency %s has priority %s" \
                      % (package, priority.priority, p[0], p[2]))

    if excuses:
        for ex in excuses:
            print ex
    else:
        print "Proposed override change complies with Debian Policy"
Esempio n. 5
0
    def _parsePackage(self, section, pkgtype):
        """Parse a section from a Packages file."""
        pkg = section["Package"]
        ver = section["Version"]

        # If we have already seen an equal or newer version of this package,
        # then skip this section.
        if pkg in self.packages:
            last_ver = self.packages[pkg]["Version"]
            if apt_pkg.version_compare(last_ver, ver) >= 0:
                return

        self.packages[pkg] = {}
        self.packagetype[pkg] = pkgtype
        self.pruned[pkg] = set()

        self.packages[pkg]["Section"] = \
            section.get("Section", "").split('/')[-1]

        self.packages[pkg]["Version"] = ver

        self.packages[pkg]["Maintainer"] = \
            unicode(section.get("Maintainer", ""), "utf8", "replace")

        self.packages[pkg]["Essential"] = section.get("Essential", "")

        for field in "Pre-Depends", "Depends", "Recommends", "Suggests":
            value = section.get(field, "")
            self.packages[pkg][field] = apt_pkg.parse_depends(value)

        for field in "Size", "Installed-Size":
            value = section.get(field, "0")
            self.packages[pkg][field] = int(value)

        src = section.get("Source", pkg)
        idx = src.find("(")
        if idx != -1:
            src = src[:idx].strip()
        self.packages[pkg]["Source"] = src

        provides = apt_pkg.parse_depends(section.get("Provides", ""))
        for prov in provides:
            if prov[0][0] not in self.provides:
                self.provides[prov[0][0]] = []
                if prov[0][0] in self.packages:
                    self.provides[prov[0][0]].append(prov[0][0])
            self.provides[prov[0][0]].append(pkg)
        self.packages[pkg]["Provides"] = provides

        if pkg in self.provides:
            self.provides[pkg].append(pkg)

        self.packages[pkg]["Kernel-Version"] = section.get("Kernel-Version", "")
Esempio n. 6
0
def read_binaries(mirror_dist, packages=None, intern=intern):
    if packages is None:
        packages = {}

    for filename in mirror_dist.packages_files:
        tag_file = apt_pkg.TagFile(filename)
        get_field = tag_file.section.get
        step = tag_file.step

        while step():
            pkg = intern(get_field('Package'))
            version = intern(get_field('Version'))
            source = get_field('Source', pkg)
            source_version = version

            # There may be multiple versions of any arch:all packages
            # (in unstable) if some architectures have out-of-date
            # binaries.
            if pkg in packages and apt_pkg.version_compare(packages[pkg].version, version) > 0:
                continue

            if "(" in source:
                source, v = (x.strip() for x in source.split("("))
                v.rstrip(" )")
                source = intern(source)
                source_version = intern(v)

            section = intern(get_field('Section', 'N/A'))

            depends_field = get_field('Depends')
            predepends_field = get_field('Pre-Depends')
            depends = []
            if depends_field:
                depends.extend(apt_pkg.parse_depends(depends_field))
            if predepends_field:
                depends.extend(apt_pkg.parse_depends(predepends_field))

            bin_pkg = BinaryPackage(
                package=pkg,
                version=version,
                architecture=get_field('Architecture'),
                source=source,
                source_version=source_version,
                section=section,
                depends=depends,
                reverse_depends=set(),
            )

            packages[pkg] = bin_pkg


    return packages
Esempio n. 7
0
def read_binaries(mirror_dist, packages=None, intern=intern):
    if packages is None:
        packages = {}

    for filename in mirror_dist.packages_files:
        tag_file = apt_pkg.TagFile(filename)
        get_field = tag_file.section.get
        step = tag_file.step

        while step():
            pkg = intern(get_field('Package'))
            version = intern(get_field('Version'))
            source = get_field('Source', pkg)
            source_version = version

            # There may be multiple versions of any arch:all packages
            # (in unstable) if some architectures have out-of-date
            # binaries.
            if pkg in packages and apt_pkg.version_compare(
                    packages[pkg].version, version) > 0:
                continue

            if "(" in source:
                source, v = (x.strip() for x in source.split("("))
                v.rstrip(" )")
                source = intern(source)
                source_version = intern(v)

            section = intern(get_field('Section', 'N/A'))

            depends_field = get_field('Depends')
            predepends_field = get_field('Pre-Depends')
            depends = []
            if depends_field:
                depends.extend(apt_pkg.parse_depends(depends_field))
            if predepends_field:
                depends.extend(apt_pkg.parse_depends(predepends_field))

            bin_pkg = BinaryPackage(
                package=pkg,
                version=version,
                architecture=get_field('Architecture'),
                source=source,
                source_version=source_version,
                section=section,
                depends=depends,
                reverse_depends=set(),
            )

            packages[pkg] = bin_pkg

    return packages
Esempio n. 8
0
def _test():
    """Test function"""
    from apt.cache import Cache
    from apt.progress import DpkgInstallProgress

    cache = Cache()

    vp = "www-browser"
    #print "%s virtual: %s" % (vp, cache.isVirtualPackage(vp))
    providers = cache.get_providing_packages(vp)
    print "Providers for %s :" % vp
    for pkg in providers:
        print " %s" % pkg.name

    d = DebPackage(sys.argv[1], cache)
    print "Deb: %s" % d.pkgname
    if not d.check():
        print "can't be satified"
        print d._failure_string
    print "missing deps: %s" % d.missing_deps
    print d.required_changes

    print "Installing ..."
    ret = d.install(DpkgInstallProgress())
    print ret

    #s = DscSrcPackage(cache, "../tests/3ddesktop_0.2.9-6.dsc")
    #s.check_dep()
    #print "Missing deps: ",s.missingDeps
    #print "Print required changes: ", s.requiredChanges

    s = DscSrcPackage(cache=cache)
    d = "libc6 (>= 2.3.2), libaio (>= 0.3.96) | libaio1 (>= 0.3.96)"
    print s._satisfy_depends(apt_pkg.parse_depends(d))
Esempio n. 9
0
 def conflicts(self):
     """List of package names conflicting with this package."""
     key = "Conflicts"
     try:
         return apt_pkg.parse_depends(self._sections[key], False)
     except KeyError:
         return []
Esempio n. 10
0
 def replaces(self):
     """List of packages which are replaced by this package."""
     key = "Replaces"
     try:
         return apt_pkg.parse_depends(self._sections[key], False)
     except KeyError:
         return []
Esempio n. 11
0
 def provides(self):
     """List of virtual packages which are provided by this package."""
     key = "Provides"
     try:
         return apt_pkg.parse_depends(self._sections[key], False)
     except KeyError:
         return []
Esempio n. 12
0
def _test():
    """Test function"""
    from apt.cache import Cache
    from apt.progress import DpkgInstallProgress

    cache = Cache()

    vp = "www-browser"
    #print "%s virtual: %s" % (vp, cache.isVirtualPackage(vp))
    providers = cache.get_providing_packages(vp)
    print "Providers for %s :" % vp
    for pkg in providers:
        print " %s" % pkg.name

    d = DebPackage(sys.argv[1], cache)
    print "Deb: %s" % d.pkgname
    if not d.check():
        print "can't be satified"
        print d._failure_string
    print "missing deps: %s" % d.missing_deps
    print d.required_changes

    print "Installing ..."
    ret = d.install(DpkgInstallProgress())
    print ret

    #s = DscSrcPackage(cache, "../tests/3ddesktop_0.2.9-6.dsc")
    #s.check_dep()
    #print "Missing deps: ",s.missingDeps
    #print "Print required changes: ", s.requiredChanges

    s = DscSrcPackage(cache=cache)
    d = "libc6 (>= 2.3.2), libaio (>= 0.3.96) | libaio1 (>= 0.3.96)"
    print s._satisfy_depends(apt_pkg.parse_depends(d))
Esempio n. 13
0
 def conflicts(self):
     """List of package names conflicting with this package."""
     key = "Conflicts"
     try:
         return apt_pkg.parse_depends(self._sections[key], False)
     except KeyError:
         return []
Esempio n. 14
0
 def provides(self):
     """List of virtual packages which are provided by this package."""
     key = "Provides"
     try:
         return apt_pkg.parse_depends(self._sections[key], False)
     except KeyError:
         return []
Esempio n. 15
0
 def replaces(self):
     """List of packages which are replaced by this package."""
     key = "Replaces"
     try:
         return apt_pkg.parse_depends(self._sections[key], False)
     except KeyError:
         return []
Esempio n. 16
0
def _build_inst_tester_on_suite_arch(builder, suite_info, suite, arch):
    packages_s_a = suite.binaries[arch]
    is_target = suite.suite_class.is_target
    bin_prov = [(s.binaries[arch], s.provides_table[arch]) for s in suite_info]
    solvers = get_dependency_solvers
    for pkgdata in packages_s_a.values():
        pkg_id = pkgdata.pkg_id
        if not builder.add_binary(pkg_id,
                                  essential=pkgdata.is_essential,
                                  in_testing=is_target):
            continue

        if pkgdata.conflicts:
            conflicts = []
            conflicts_parsed = apt_pkg.parse_depends(pkgdata.conflicts, False)
            # Breaks/Conflicts are so simple that we do not need to keep align the relation
            # with the suite.  This enables us to do a few optimizations.
            for dep_binaries_s_a, dep_provides_s_a in bin_prov:
                for block in (relation for relation in conflicts_parsed):
                    # if a package satisfies its own conflicts relation, then it is using §7.6.2
                    conflicts.extend(s.pkg_id for s in solvers(block, dep_binaries_s_a, dep_provides_s_a)
                                     if s.pkg_id != pkg_id)
        else:
            conflicts = None

        if pkgdata.depends:
            depends = _compute_depends(pkgdata, bin_prov, solvers)
        else:
            depends = None

        builder.set_relations(pkg_id, depends, conflicts)
 def conflicts(self):
     # type: () -> List[List[Tuple[str, str, str]]]
     """List of packages conflicting with this package."""
     key = "Conflicts"
     try:
         return apt_pkg.parse_depends(self._sections[key], False)
     except KeyError:
         return []
 def replaces(self):
     # type: () -> List[List[Tuple[str, str, str]]]
     """List of packages which are replaced by this package."""
     key = "Replaces"
     try:
         return apt_pkg.parse_depends(self._sections[key], False)
     except KeyError:
         return []
Esempio n. 19
0
 def replaces(self):
     # type: () -> List[List[Tuple[str, str, str]]]
     """List of packages which are replaced by this package."""
     key = "Replaces"
     try:
         return apt_pkg.parse_depends(self._sections[key], False)
     except KeyError:
         return []
Esempio n. 20
0
 def conflicts(self):
     # type: () -> List[List[Tuple[str, str, str]]]
     """List of packages conflicting with this package."""
     key = "Conflicts"
     try:
         return apt_pkg.parse_depends(self._sections[key], False)
     except KeyError:
         return []
Esempio n. 21
0
 def depends(self):
     """List of package names on which this package depends on."""
     depends = []
     # find depends
     for key in "Depends", "Pre-Depends":
         try:
             depends.extend(apt_pkg.parse_depends(self._sections[key]))
         except KeyError:
             pass
     return depends
Esempio n. 22
0
 def depends(self):
     """List of package names on which this package depends on."""
     depends = []
     # find depends
     for key in "Depends", "Pre-Depends":
         try:
             depends.extend(apt_pkg.parse_depends(self._sections[key]))
         except KeyError:
             pass
     return depends
 def depends(self):
     # type: () -> List[List[Tuple[str, str, str]]]
     """List of packages on which this package depends on."""
     depends = []
     # find depends
     for key in "Depends", "Pre-Depends":
         try:
             depends.extend(
                 apt_pkg.parse_depends(self._sections[key], False))
         except KeyError:
             pass
     return depends
Esempio n. 24
0
 def depends(self):
     # type: () -> List[List[Tuple[str, str, str]]]
     """List of packages on which this package depends on."""
     depends = []
     # find depends
     for key in "Depends", "Pre-Depends":
         try:
             depends.extend(
                 apt_pkg.parse_depends(self._sections[key], False))
         except KeyError:
             pass
     return depends
Esempio n. 25
0
def ma_parse_depends(dep_str):
    """Parse a dependency string into a list of triples

    This is like apt_pkg.parse_depends but filters out :any and :native
    Multi-Arch prefixes. We don't use apt_pkg.parse_depends(dep_str, True)
    as that would also filter out arch specific dependencies like :amd64.
    """
    res = apt_pkg.parse_depends(dep_str, False)
    filtered = []
    for or_clause in res:
        filtered.append([(p.replace(':any', '').replace(':native', ''), v, r)
                         for (p, v, r) in or_clause])
    return filtered
Esempio n. 26
0
    def mark_install(self, pstr):
        deps = apt_pkg.parse_depends(pstr)
        have_version = False
        for ord in deps:
            if have_version:
                break

            print pstr, ord
            for d in ord:
                name = d[0]
                version_num = d[1]
                version_op = d[2]

                p = self.cache[name]
                if not p:
                    logging.error("Could not find package %s in cache", name)
                    continue

                if len(version_num) > 0:
                    highest_v = None
                    highest_vnum = 0

                    for version in p.versions:
                        if apt_pkg.check_dep(version.version, version_op, version_num):
                            have_version = True
                            logging.info("package: %s, version: %s, priority: %s/%d",
                                    name, version.version, version.priority, version.policy_priority)

                            if (version.policy_priority > highest_vnum):
                                highest_vnum = version.policy_priority
                                highest_v = version

                    if not have_version:
                        logging.error("Could not required version of the package %s, must be %s %s",
                                name, version_op, version_num)
                        # going for the next ORed version if any
                        continue

                    p.candidate = highest_v
                    logging.info("package %s, selected version: %s, priority: %s/%d",
                            name, p.candidate.version, p.candidate.priority, p.candidate.policy_priority)

                logging.info("Going to install package %s", name)
                p.mark_install(auto_fix=True, auto_inst=True)
                have_version = True

                # do not run for the subsequent ORed packages
                break

        if not have_version:
            logging.fatal("Could not find suitable package %s", pstr)
	def __init__(self,lines) :
		self.version = ""
		self.name = ""
		self.recommends = []
		self.suggests = []
		self.pre_depends = []
		self.depends = []
		self.conflicts = []
		self.provides = []
		self.arch = ""
		self.priority = ""
		for line in lines:
			if line.startswith(" " or "\t") or line.strip() == "" :
					None
			elif line.startswith(_package):
				self.name = line[len(_package):].strip()
			elif line.startswith(_version):
				self.version = line[len(_version):].strip()
			elif line.startswith(_depends):
				self.depends = apt_pkg.parse_depends(line[len(_depends):].strip())
			elif line.startswith(_pre_depends):
				self.pre_depends = apt_pkg.parse_depends(line[len(_pre_depends):].strip())
			elif line.startswith(_provides):
				self.provides = apt_pkg.parse_depends(line[len(_provides):].strip())
			elif line.startswith(_recommends):
				self.recommends = apt_pkg.parse_depends(line[len(_recommends):].strip())
			elif line.startswith(_suggests):
				self.suggests = apt_pkg.parse_depends(line[len(_suggests):].strip())
			elif line.startswith(_enhances):
				None
			elif line.startswith(_breaks):
				None
			elif line.startswith(_conflicts):
				self.conflicts = apt_pkg.parse_depends(line[len(_conflicts):].strip())
			elif line.startswith(_priority):
				self.priority = line[len(_package):].strip()
			elif line.startswith(_description):
				None
			elif line.startswith(_section):
				None
			elif line.startswith(_installed_size):
				None
			elif line.startswith(_maintainer):
				None
			elif line.startswith(_source):
				None
			elif line.startswith(_architecture):
				self.arch = line[len(_architecture):].strip()
			elif line.startswith(_original_maintainer):
				None
			elif line.startswith(_replaces):
				None
			elif line.startswith(_homepage):
				None
		else : None
Esempio n. 28
0
    def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
        """Add Built-Using sources to C{db_binary.extra_sources}
        """
        session = self.session
        built_using = control.get('Built-Using', None)

        if built_using is not None:
            for dep in apt_pkg.parse_depends(built_using):
                assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
                bu_source_name, bu_source_version, comp = dep[0]
                assert comp == '=', 'Built-Using must contain strict dependencies'

                bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
                if bu_source is None:
                    raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))

                self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)

                db_binary.extra_sources.append(bu_source)
Esempio n. 29
0
def parse_builtusing(builtusing_raw, pkg_id=None, logger=None):
    parts = apt_pkg.parse_depends(builtusing_raw, False)
    nbu = []
    for or_clause in parts:
        if len(or_clause) != 1:  # pragma: no cover
            if logger is not None:
                msg = "Ignoring invalid builtusing in %s: Alternatives [%s]"
                logger.warning(msg, str(pkg_id), str(or_clause))
            continue
        for part in or_clause:
            bu, bu_version, op = part
            if op != '=':  # pragma: no cover
                if logger is not None:
                    msg = "Ignoring invalid builtusing in %s: %s (%s %s)"
                    logger.warning(msg, str(pkg_id), bu, op, bu_version)
                continue
            bu = sys.intern(bu)
            bu_version = sys.intern(bu_version)
            part = (bu, bu_version)
            nbu.append(part)
    return nbu
Esempio n. 30
0
def parse_record_field(record, record_field, relation_type,
                       or_relation_type=None):
    """Parse an apt C{Record} field and return skeleton relations

    @param record: An C{apt.package.Record} instance with package information.
    @param record_field: The name of the record field to parse.
    @param relation_type: The deb relation that can be passed to
        C{skeleton.add_relation()}
    @param or_relation_type: The deb relation that should be used if
        there is more than one value in a relation.
    """
    relations = set()
    values = apt_pkg.parse_depends(record.get(record_field, ""))
    for value in values:
        value_strings = [relation_to_string(relation) for relation in value]
        value_relation_type = relation_type
        if len(value_strings) > 1:
            value_relation_type = or_relation_type
        relation_string = " | ".join(value_strings)
        relations.add((value_relation_type, relation_string))
    return relations
Esempio n. 31
0
def parse_provides(provides_raw, pkg_id=None, logger=None):
    parts = apt_pkg.parse_depends(provides_raw, False)
    nprov = []
    for or_clause in parts:
        if len(or_clause) != 1:  # pragma: no cover
            if logger is not None:
                msg = "Ignoring invalid provides in %s: Alternatives [%s]"
                logger.warning(msg, str(pkg_id), str(or_clause))
            continue
        for part in or_clause:
            provided, provided_version, op = part
            if op != '' and op != '=':  # pragma: no cover
                if logger is not None:
                    msg = "Ignoring invalid provides in %s: %s (%s %s)"
                    logger.warning(msg, str(pkg_id), provided, op, provided_version)
                continue
            provided = sys.intern(provided)
            provided_version = sys.intern(provided_version)
            part = (provided, provided_version, sys.intern(op))
            nprov.append(part)
    return nprov
Esempio n. 32
0
def parse_builtusing(builtusing_raw, pkg_id=None, logger=None):
    parts = apt_pkg.parse_depends(builtusing_raw, False)
    nbu = []
    for or_clause in parts:
        if len(or_clause) != 1:  # pragma: no cover
            if logger is not None:
                msg = "Ignoring invalid builtusing in %s: Alternatives [%s]"
                logger.warning(msg, str(pkg_id), str(or_clause))
            continue
        for part in or_clause:
            bu, bu_version, op = part
            if op != '=':  # pragma: no cover
                if logger is not None:
                    msg = "Ignoring invalid builtusing in %s: %s (%s %s)"
                    logger.warning(msg, str(pkg_id), bu, op, bu_version)
                continue
            bu = sys.intern(bu)
            bu_version = sys.intern(bu_version)
            part = (bu, bu_version)
            nbu.append(part)
    return nbu
Esempio n. 33
0
def parse_built_using(control):
    """source packages referenced via Built-Using

    @type  control: dict-like
    @param control: control file to take Built-Using field from

    @rtype:  list of (str, str)
    @return: list of (source_name, source_version) pairs
    """
    built_using = control.get('Built-Using', None)
    if built_using is None:
        return []

    bu = []
    for dep in apt_pkg.parse_depends(built_using):
        assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
        source_name, source_version, comp = dep[0]
        assert comp == '=', 'Built-Using must contain strict dependencies'
        bu.append((source_name, source_version))

    return bu
Esempio n. 34
0
def parse_built_using(control):
    """source packages referenced via Built-Using

    @type  control: dict-like
    @param control: control file to take Built-Using field from

    @rtype:  list of (str, str)
    @return: list of (source_name, source_version) pairs
    """
    built_using = control.get('Built-Using', None)
    if built_using is None:
        return []

    bu = []
    for dep in apt_pkg.parse_depends(built_using):
        assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
        source_name, source_version, comp = dep[0]
        assert comp == '=', 'Built-Using must contain strict dependencies'
        bu.append((source_name, source_version))

    return bu
Esempio n. 35
0
def parse_provides(provides_raw, pkg_id=None, logger=None):
    parts = apt_pkg.parse_depends(provides_raw, False)
    nprov = []
    for or_clause in parts:
        if len(or_clause) != 1:  # pragma: no cover
            if logger is not None:
                msg = "Ignoring invalid provides in %s: Alternatives [%s]"
                logger.warning(msg, str(pkg_id), str(or_clause))
            continue
        for part in or_clause:
            provided, provided_version, op = part
            if op != '' and op != '=':  # pragma: no cover
                if logger is not None:
                    msg = "Ignoring invalid provides in %s: %s (%s %s)"
                    logger.warning(msg, str(pkg_id), provided, op,
                                   provided_version)
                continue
            provided = sys.intern(provided)
            provided_version = sys.intern(provided_version)
            part = (provided, provided_version, sys.intern(op))
            nprov.append(part)
    return nprov
Esempio n. 36
0
def _compute_depends(pkgdata, bin_prov, solvers):
    depends = []
    possible_dep_ranges = {}
    for block in apt_pkg.parse_depends(pkgdata.depends, False):
        sat = {s.pkg_id for binaries_s_a, provides_s_a in bin_prov
               for s in solvers(block, binaries_s_a, provides_s_a)}

        if len(block) != 1:
            depends.append(sat)
        else:
            # This dependency might be a part
            # of a version-range a la:
            #
            #   Depends: pkg-a (>= 1),
            #            pkg-a (<< 2~)
            #
            # In such a case we want to reduce
            # that to a single clause for
            # efficiency.
            #
            # In theory, it could also happen
            # with "non-minimal" dependencies
            # a la:
            #
            #   Depends: pkg-a, pkg-a (>= 1)
            #
            # But dpkg is known to fix that up
            # at build time, so we will
            # probably only see "ranges" here.
            key = block[0][0]
            if key in possible_dep_ranges:
                possible_dep_ranges[key] &= sat
            else:
                possible_dep_ranges[key] = sat

    if possible_dep_ranges:
        depends.extend(possible_dep_ranges.values())

    return depends
    def updateDependencies(self):
        """See `IBuild`."""

        # apt_pkg requires init_system to get version_compare working
        # properly.
        apt_pkg.init_system()

        # Check package build dependencies using apt_pkg
        try:
            parsed_deps = apt_pkg.parse_depends(self.dependencies)
        except (ValueError, TypeError):
            raise UnparsableDependencies(
                "Build dependencies for %s (%s) could not be parsed: '%s'\n"
                "It indicates that something is wrong in buildd-slaves."
                % (self.title, self.id, self.dependencies))

        remaining_deps = [
            self._toAptFormat(token) for token in parsed_deps
            if not self._isDependencySatisfied(token)]

        # Update dependencies line
        self.dependencies = u", ".join(remaining_deps)
Esempio n. 38
0
    def test_parse_depends(self):
        """dependencies: Test apt_pkg.parse_depends()"""
        deps = apt_pkg.parse_depends("p1a (<< 1a) | p1b (>> 1b)")
        self.assertTrue(isinstance(deps, list))
        self.assertEqual(len(deps), 1)
        self.assertTrue(isinstance(deps[0], list))
        self.assertEqual(len(deps[0]), 2)
        self.assertEqual(len(deps[0][0]), 3)
        self.assertEqual(len(deps[0][1]), 3)
        self.assertEqual(deps[0][0][0], "p1a")
        self.assertEqual(deps[0][0][1], "1a")
        self.assertEqual(deps[0][0][2], "<")
        self.assertEqual(deps[0][1][0], "p1b")
        self.assertEqual(deps[0][1][1], "1b")
        self.assertEqual(deps[0][1][2], ">")

        # Check that the type of comparison is parsed correctly.
        self.assertEqual("<", apt_pkg.parse_depends("p1 (<< 1)")[0][0][2])
        self.assertEqual("<=", apt_pkg.parse_depends("p1 (< 1)")[0][0][2])
        self.assertEqual("<=", apt_pkg.parse_depends("p1 (<= 1)")[0][0][2])
        self.assertEqual("=", apt_pkg.parse_depends("p1 (= 1)")[0][0][2])
        self.assertEqual(">=", apt_pkg.parse_depends("p1 (>= 1)")[0][0][2])
        self.assertEqual(">=", apt_pkg.parse_depends("p1 (> 1)")[0][0][2])
        self.assertEqual(">", apt_pkg.parse_depends("p1 (>> 1)")[0][0][2])
Esempio n. 39
0
    def test_parse_depends(self):
        """dependencies: Test apt_pkg.parse_depends()"""
        deps = apt_pkg.parse_depends("p1a (<< 1a) | p1b (>> 1b)")
        self.assertTrue(isinstance(deps, list))
        self.assertEqual(len(deps), 1)
        self.assertTrue(isinstance(deps[0], list))
        self.assertEqual(len(deps[0]), 2)
        self.assertEqual(len(deps[0][0]), 3)
        self.assertEqual(len(deps[0][1]), 3)
        self.assertEqual(deps[0][0][0], "p1a")
        self.assertEqual(deps[0][0][1], "1a")
        self.assertEqual(deps[0][0][2], "<")
        self.assertEqual(deps[0][1][0], "p1b")
        self.assertEqual(deps[0][1][1], "1b")
        self.assertEqual(deps[0][1][2], ">")

        # Check that the type of comparison is parsed correctly.
        self.assertEqual("<", apt_pkg.parse_depends("p1 (<< 1)")[0][0][2])
        self.assertEqual("<=", apt_pkg.parse_depends("p1 (< 1)")[0][0][2])
        self.assertEqual("<=", apt_pkg.parse_depends("p1 (<= 1)")[0][0][2])
        self.assertEqual("=", apt_pkg.parse_depends("p1 (= 1)")[0][0][2])
        self.assertEqual(">=", apt_pkg.parse_depends("p1 (>= 1)")[0][0][2])
        self.assertEqual(">=", apt_pkg.parse_depends("p1 (> 1)")[0][0][2])
        self.assertEqual(">", apt_pkg.parse_depends("p1 (>> 1)")[0][0][2])
Esempio n. 40
0
def _test():
    # type: () -> None
    """Test function"""
    from apt.cache import Cache
    from apt.progress.base import InstallProgress

    cache = Cache()

    vp = "www-browser"
    print("%s virtual: %s" % (vp, cache.is_virtual_package(vp)))
    providers = cache.get_providing_packages(vp)
    print("Providers for %s :" % vp)
    for pkg in providers:
        print(" %s" % pkg.name)

    d = DebPackage(sys.argv[1], cache)
    print("Deb: %s" % d.pkgname)
    if not d.check():
        print("can't be satified")
        print(d._failure_string)
    print("missing deps: %s" % d.missing_deps)
    print(d.required_changes)

    print(d.filelist)

    print("Installing ...")
    ret = d.install(InstallProgress())
    print(ret)

    #s = DscSrcPackage(cache, "../tests/3ddesktop_0.2.9-6.dsc")
    #s.check_dep()
    #print "Missing deps: ",s.missingDeps
    #print "Print required changes: ", s.requiredChanges

    s = DscSrcPackage(cache=cache)
    ds = "libc6 (>= 2.3.2), libaio (>= 0.3.96) | libaio1 (>= 0.3.96)"
    print(s._satisfy_depends(apt_pkg.parse_depends(ds, False)))
def _test():
    # type: () -> None
    """Test function"""
    from apt.cache import Cache
    from apt.progress.base import InstallProgress

    cache = Cache()

    vp = "www-browser"
    print("%s virtual: %s" % (vp, cache.is_virtual_package(vp)))
    providers = cache.get_providing_packages(vp)
    print("Providers for %s :" % vp)
    for pkg in providers:
        print(" %s" % pkg.name)

    d = DebPackage(sys.argv[1], cache)
    print("Deb: %s" % d.pkgname)
    if not d.check():
        print("can't be satified")
        print(d._failure_string)
    print("missing deps: %s" % d.missing_deps)
    print(d.required_changes)

    print(d.filelist)

    print("Installing ...")
    ret = d.install(InstallProgress())
    print(ret)

    #s = DscSrcPackage(cache, "../tests/3ddesktop_0.2.9-6.dsc")
    #s.check_dep()
    #print "Missing deps: ",s.missingDeps
    #print "Print required changes: ", s.requiredChanges

    s = DscSrcPackage(cache=cache)
    ds = "libc6 (>= 2.3.2), libaio (>= 0.3.96) | libaio1 (>= 0.3.96)"
    print(s._satisfy_depends(apt_pkg.parse_depends(ds, False)))
Esempio n. 42
0
    def _parseSource(self, section):
        """Parse a section from a Sources file."""
        src = section["Package"]
        ver = section["Version"]

        # If we have already seen an equal or newer version of this source,
        # then skip this section.
        if src in self.sources:
            last_ver = self.sources[src]["Version"]
            if apt_pkg.version_compare(last_ver, ver) >= 0:
                return

        self.sources[src] = {}

        self.sources[src]["Maintainer"] = \
            unicode(section.get("Maintainer", ""), "utf8", "replace")
        self.sources[src]["Version"] = ver

        for field in "Build-Depends", "Build-Depends-Indep":
            value = section.get(field, "")
            self.sources[src][field] = apt_pkg.parse_src_depends(value)

        binaries = apt_pkg.parse_depends(section.get("Binary", src))
        self.sources[src]["Binaries"] = [ b[0][0] for b in binaries ]
Esempio n. 43
0
    def _load_package_information(session, suite_id, suite_archs2id):
        package_dependencies = defaultdict(lambda: defaultdict(set))
        arch_providers_of = defaultdict(lambda: defaultdict(set))
        arch_provided_by = defaultdict(lambda: defaultdict(set))
        source_deps = defaultdict(set)
        metakey_d = get_or_set_metadatakey("Depends", session)
        metakey_p = get_or_set_metadatakey("Provides", session)
        params = {
            'suite_id':     suite_id,
            'arch_all_id':  suite_archs2id['all'],
            'metakey_d_id': metakey_d.key_id,
            'metakey_p_id': metakey_p.key_id,
        }
        all_arches = set(suite_archs2id)
        all_arches.discard('source')

        package_dependencies['source'] = source_deps

        for architecture in all_arches:
            deps = defaultdict(set)
            providers_of = defaultdict(set)
            provided_by = defaultdict(set)
            arch_providers_of[architecture] = providers_of
            arch_provided_by[architecture] = provided_by
            package_dependencies[architecture] = deps

            params['arch_id'] = suite_archs2id[architecture]

            statement = '''
                    SELECT b.package,
                        (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
                        (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
                        FROM binaries b
                        JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
                        WHERE b.architecture = :arch_id OR b.architecture = :arch_all_id'''
            query = session.query('package', 'depends', 'provides'). \
                from_statement(statement).params(params)
            for package, depends, provides in query:

                if depends is not None:
                    try:
                        parsed_dep = []
                        for dep in apt_pkg.parse_depends(depends):
                            parsed_dep.append(frozenset(d[0] for d in dep))
                        deps[package].update(parsed_dep)
                    except ValueError as e:
                        print "Error for package %s: %s" % (package, e)
                # Maintain a counter for each virtual package.  If a
                # Provides: exists, set the counter to 0 and count all
                # provides by a package not in the list for removal.
                # If the counter stays 0 at the end, we know that only
                # the to-be-removed packages provided this virtual
                # package.
                if provides is not None:
                    for virtual_pkg in provides.split(","):
                        virtual_pkg = virtual_pkg.strip()
                        if virtual_pkg == package:
                            continue
                        provided_by[virtual_pkg].add(package)
                        providers_of[package].add(virtual_pkg)

        # Check source dependencies (Build-Depends and Build-Depends-Indep)
        metakey_bd = get_or_set_metadatakey("Build-Depends", session)
        metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
        params = {
            'suite_id':    suite_id,
            'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
        }
        statement = '''
            SELECT s.source, string_agg(sm.value, ', ') as build_dep
               FROM source s
               JOIN source_metadata sm ON s.id = sm.src_id
               WHERE s.id in
                   (SELECT source FROM src_associations
                       WHERE suite = :suite_id)
                   AND sm.key_id in :metakey_ids
               GROUP BY s.id, s.source'''
        query = session.query('source', 'build_dep').from_statement(statement). \
            params(params)
        for source, build_dep in query:
            if build_dep is not None:
                # Remove [arch] information since we want to see breakage on all arches
                build_dep = re_build_dep_arch.sub("", build_dep)
                try:
                    parsed_dep = []
                    for dep in apt_pkg.parse_src_depends(build_dep):
                        parsed_dep.append(frozenset(d[0] for d in dep))
                    source_deps[source].update(parsed_dep)
                except ValueError as e:
                    print "Error for package %s: %s" % (source, e)

        return package_dependencies, arch_providers_of, arch_provided_by
Esempio n. 44
0
    def _load_package_information(session, suite_id, suite_archs2id):
        package_dependencies = defaultdict(lambda: defaultdict(set))
        arch_providers_of = defaultdict(lambda: defaultdict(set))
        arch_provided_by = defaultdict(lambda: defaultdict(set))
        source_deps = defaultdict(set)
        metakey_d = get_or_set_metadatakey("Depends", session)
        metakey_p = get_or_set_metadatakey("Provides", session)
        params = {
            'suite_id': suite_id,
            'arch_all_id': suite_archs2id['all'],
            'metakey_d_id': metakey_d.key_id,
            'metakey_p_id': metakey_p.key_id,
        }
        all_arches = set(suite_archs2id)
        all_arches.discard('source')

        package_dependencies['source'] = source_deps

        for architecture in all_arches:
            deps = defaultdict(set)
            providers_of = defaultdict(set)
            provided_by = defaultdict(set)
            arch_providers_of[architecture] = providers_of
            arch_provided_by[architecture] = provided_by
            package_dependencies[architecture] = deps

            params['arch_id'] = suite_archs2id[architecture]

            statement = '''
                    SELECT b.package,
                        (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
                        (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
                        FROM binaries b
                        JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
                        WHERE b.architecture = :arch_id OR b.architecture = :arch_all_id'''
            query = session.query('package', 'depends', 'provides'). \
                from_statement(statement).params(params)
            for package, depends, provides in query:

                if depends is not None:
                    try:
                        parsed_dep = []
                        for dep in apt_pkg.parse_depends(depends):
                            parsed_dep.append(frozenset(d[0] for d in dep))
                        deps[package].update(parsed_dep)
                    except ValueError as e:
                        print "Error for package %s: %s" % (package, e)
                # Maintain a counter for each virtual package.  If a
                # Provides: exists, set the counter to 0 and count all
                # provides by a package not in the list for removal.
                # If the counter stays 0 at the end, we know that only
                # the to-be-removed packages provided this virtual
                # package.
                if provides is not None:
                    for virtual_pkg in provides.split(","):
                        virtual_pkg = virtual_pkg.strip()
                        if virtual_pkg == package:
                            continue
                        provided_by[virtual_pkg].add(package)
                        providers_of[package].add(virtual_pkg)

        # Check source dependencies (Build-Depends and Build-Depends-Indep)
        metakey_bd = get_or_set_metadatakey("Build-Depends", session)
        metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
        params = {
            'suite_id': suite_id,
            'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
        }
        statement = '''
            SELECT s.source, string_agg(sm.value, ', ') as build_dep
               FROM source s
               JOIN source_metadata sm ON s.id = sm.src_id
               WHERE s.id in
                   (SELECT source FROM src_associations
                       WHERE suite = :suite_id)
                   AND sm.key_id in :metakey_ids
               GROUP BY s.id, s.source'''
        query = session.query('source', 'build_dep').from_statement(statement). \
            params(params)
        for source, build_dep in query:
            if build_dep is not None:
                # Remove [arch] information since we want to see breakage on all arches
                build_dep = re_build_dep_arch.sub("", build_dep)
                try:
                    parsed_dep = []
                    for dep in apt_pkg.parse_src_depends(build_dep):
                        parsed_dep.append(frozenset(d[0] for d in dep))
                    source_deps[source].update(parsed_dep)
                except ValueError as e:
                    print "Error for package %s: %s" % (source, e)

        return package_dependencies, arch_providers_of, arch_provided_by
Esempio n. 45
0
        sys.exit(1)
    file = sys.argv[1]

    print "Working on: %s" % file
    print "Displaying data.tar.gz:"
    apt_inst.DebFile(open(file)).data.go(Callback)

    print "Now extracting the control file:"
    control = apt_inst.DebFile(open(file)).control.extractdata("control")
    sections = apt_pkg.TagSection(control)

    print "Maintainer is: "
    print sections["Maintainer"]

    print
    print "DependsOn: "
    depends = sections["Depends"]
    print apt_pkg.parse_depends(depends)

    print "extracting archive"
    dir = "/tmp/deb"
    os.mkdir(dir)
    apt_inst.DebFile(open(file)).data.extractall(dir)

    def visit(arg, dirname, names):
        print "%s/" % dirname
        for file in names:
            print "\t%s" % file

    os.path.walk(dir, visit, None)
Esempio n. 46
0
    def check_binary(self, upload, binary):
        fn = binary.hashed_file.filename
        control = binary.control

        for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'):
            if field not in control:
                raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))

        check_fields_for_valid_utf8(fn, control)

        # check fields

        package = control['Package']
        if not re_field_package.match(package):
            raise Reject('{0}: Invalid Package field'.format(fn))

        version = control['Version']
        version_match = re_field_version.match(version)
        if not version_match:
            raise Reject('{0}: Invalid Version field'.format(fn))
        version_without_epoch = version_match.group('without_epoch')

        architecture = control['Architecture']
        if architecture not in upload.changes.architectures:
            raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
        if architecture == 'source':
            raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))

        source = control.get('Source')
        if source is not None and not re_field_source.match(source):
            raise Reject('{0}: Invalid Source field'.format(fn))

        # check filename

        match = re_file_binary.match(fn)
        if package != match.group('package'):
            raise Reject('{0}: filename does not match Package field'.format(fn))
        if version_without_epoch != match.group('version'):
            raise Reject('{0}: filename does not match Version field'.format(fn))
        if architecture != match.group('architecture'):
            raise Reject('{0}: filename does not match Architecture field'.format(fn))

        # check dependency field syntax

        for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
                      'Provides', 'Recommends', 'Replaces', 'Suggests'):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(fn, field))
                try:
                    apt_pkg.parse_depends(value)
                except:
                    raise Reject('{0}: APT could not parse {1} field'.format(fn, field))

        for field in ('Built-Using',):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(fn, field))
                try:
                    apt_pkg.parse_src_depends(value)
                except:
                    raise Reject('{0}: APT could not parse {1} field'.format(fn, field))

        # "Multi-Arch: no" breaks wanna-build, #768353
        multi_arch = control.get("Multi-Arch")
        if multi_arch == 'no':
            raise Reject('{0}: Multi-Arch: no support in Debian is broken (#768353)'.format(fn))
Esempio n. 47
0
 def satisfy_depends_str(self, dependsstr):
     """Satisfy the dependencies in the given string."""
     return self._satisfy_depends(apt_pkg.parse_depends(dependsstr))
Esempio n. 48
0
def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False, quiet=False, include_arch_all=True):
    dbsuite = get_suite(suite, session)
    overridesuite = dbsuite
    if dbsuite.overridesuite is not None:
        overridesuite = get_suite(dbsuite.overridesuite, session)
    dep_problem = 0
    p2c = {}
    all_broken = defaultdict(lambda: defaultdict(set))
    if arches:
        all_arches = set(arches)
    else:
        all_arches = set(x.arch_string for x in get_suite_architectures(suite))
    all_arches -= set(["source", "all"])
    removal_set = set(removals)
    metakey_d = get_or_set_metadatakey("Depends", session)
    metakey_p = get_or_set_metadatakey("Provides", session)
    params = {
        'suite_id':     dbsuite.suite_id,
        'metakey_d_id': metakey_d.key_id,
        'metakey_p_id': metakey_p.key_id,
    }
    if include_arch_all:
        rdep_architectures = all_arches | set(['all'])
    else:
        rdep_architectures = all_arches
    for architecture in rdep_architectures:
        deps = {}
        sources = {}
        virtual_packages = {}
        try:
            params['arch_id'] = get_architecture(architecture, session).arch_id
        except AttributeError:
            continue

        statement = sql.text('''
            SELECT b.package, s.source, c.name as component,
                (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
                (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
                FROM binaries b
                JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
                JOIN source s ON b.source = s.id
                JOIN files_archive_map af ON b.file = af.file_id
                JOIN component c ON af.component_id = c.id
                WHERE b.architecture = :arch_id''')
        query = session.query('package', 'source', 'component', 'depends', 'provides'). \
            from_statement(statement).params(params)
        for package, source, component, depends, provides in query:
            sources[package] = source
            p2c[package] = component
            if depends is not None:
                deps[package] = depends
            # Maintain a counter for each virtual package.  If a
            # Provides: exists, set the counter to 0 and count all
            # provides by a package not in the list for removal.
            # If the counter stays 0 at the end, we know that only
            # the to-be-removed packages provided this virtual
            # package.
            if provides is not None:
                for virtual_pkg in provides.split(","):
                    virtual_pkg = virtual_pkg.strip()
                    if virtual_pkg == package: continue
                    if virtual_pkg not in virtual_packages:
                        virtual_packages[virtual_pkg] = 0
                    if package not in removals:
                        virtual_packages[virtual_pkg] += 1

        # If a virtual package is only provided by the to-be-removed
        # packages, treat the virtual package as to-be-removed too.
        removal_set.update(virtual_pkg for virtual_pkg in virtual_packages if not virtual_packages[virtual_pkg])

        # Check binary dependencies (Depends)
        for package in deps:
            if package in removals: continue
            try:
                parsed_dep = apt_pkg.parse_depends(deps[package])
            except ValueError as e:
                print "Error for package %s: %s" % (package, e)
                parsed_dep = []
            for dep in parsed_dep:
                # Check for partial breakage.  If a package has a ORed
                # dependency, there is only a dependency problem if all
                # packages in the ORed depends will be removed.
                unsat = 0
                for dep_package, _, _ in dep:
                    if dep_package in removals:
                        unsat += 1
                if unsat == len(dep):
                    component = p2c[package]
                    source = sources[package]
                    if component != "main":
                        source = "%s/%s" % (source, component)
                    all_broken[source][package].add(architecture)
                    dep_problem = 1

    if all_broken and not quiet:
        if cruft:
            print "  - broken Depends:"
        else:
            print "# Broken Depends:"
        for source, bindict in sorted(all_broken.items()):
            lines = []
            for binary, arches in sorted(bindict.items()):
                if arches == all_arches or 'all' in arches:
                    lines.append(binary)
                else:
                    lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
            if cruft:
                print '    %s: %s' % (source, lines[0])
            else:
                print '%s: %s' % (source, lines[0])
            for line in lines[1:]:
                if cruft:
                    print '    ' + ' ' * (len(source) + 2) + line
                else:
                    print ' ' * (len(source) + 2) + line
        if not cruft:
            print

    # Check source dependencies (Build-Depends and Build-Depends-Indep)
    all_broken = defaultdict(set)
    metakey_bd = get_or_set_metadatakey("Build-Depends", session)
    metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
    if include_arch_all:
        metakey_ids = (metakey_bd.key_id, metakey_bdi.key_id)
    else:
        metakey_ids = (metakey_bd.key_id,)

    params = {
        'suite_id':    dbsuite.suite_id,
        'metakey_ids': metakey_ids,
    }
    statement = sql.text('''
        SELECT s.source, string_agg(sm.value, ', ') as build_dep
           FROM source s
           JOIN source_metadata sm ON s.id = sm.src_id
           WHERE s.id in
               (SELECT src FROM newest_src_association
                   WHERE suite = :suite_id)
               AND sm.key_id in :metakey_ids
           GROUP BY s.id, s.source''')
    query = session.query('source', 'build_dep').from_statement(statement). \
        params(params)
    for source, build_dep in query:
        if source in removals: continue
        parsed_dep = []
        if build_dep is not None:
            # Remove [arch] information since we want to see breakage on all arches
            build_dep = re_build_dep_arch.sub("", build_dep)
            try:
                parsed_dep = apt_pkg.parse_src_depends(build_dep)
            except ValueError as e:
                print "Error for source %s: %s" % (source, e)
        for dep in parsed_dep:
            unsat = 0
            for dep_package, _, _ in dep:
                if dep_package in removals:
                    unsat += 1
            if unsat == len(dep):
                component, = session.query(Component.component_name) \
                    .join(Component.overrides) \
                    .filter(Override.suite == overridesuite) \
                    .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
                    .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
                    .first()
                key = source
                if component != "main":
                    key = "%s/%s" % (source, component)
                all_broken[key].add(pp_deps(dep))
                dep_problem = 1

    if all_broken and not quiet:
        if cruft:
            print "  - broken Build-Depends:"
        else:
            print "# Broken Build-Depends:"
        for source, bdeps in sorted(all_broken.items()):
            bdeps = sorted(bdeps)
            if cruft:
                print '    %s: %s' % (source, bdeps[0])
            else:
                print '%s: %s' % (source, bdeps[0])
            for bdep in bdeps[1:]:
                if cruft:
                    print '    ' + ' ' * (len(source) + 2) + bdep
                else:
                    print ' ' * (len(source) + 2) + bdep
        if not cruft:
            print

    return dep_problem
Esempio n. 49
0
        sys.exit(1)
    file = sys.argv[1]

    print("Working on: %s" % file)
    print("Displaying data.tar.gz:")
    apt_inst.DebFile(open(file)).data.go(Callback)

    print("Now extracting the control file:")
    control = apt_inst.DebFile(open(file)).control.extractdata("control")
    sections = apt_pkg.TagSection(control)

    print("Maintainer is: ")
    print(sections["Maintainer"])

    print()
    print("DependsOn: ")
    depends = sections["Depends"]
    print(apt_pkg.parse_depends(depends))

    print("extracting archive")
    dir = "/tmp/deb"
    os.mkdir(dir)
    apt_inst.DebFile(open(file)).data.extractall(dir)

    def visit(arg, dirname, names):
        print("%s/" % dirname)
        for file in names:
            print("\t%s" % file)

    os.path.walk(dir, visit, None)
Esempio n. 50
0
def main():
    cnf = Config()
    Arguments = [
        ('h', 'help', 'Override-Disparity::Options::Help'),
        ('f', 'file', 'Override-Disparity::Options::File', 'HasArg'),
        ('s', 'suite', 'Override-Disparity::Options::Suite', 'HasArg'),
        ('p', 'package', 'Override-Disparity::Options::Package', 'HasArg')
    ]

    for i in ['help', 'package']:
        if not cnf.has_key('Override-Disparity::Options::%s' % (i)):
            cnf['Override-Disparity::Options::%s' % (i)] = ''
    if not cnf.has_key('Override-Disparity::Options::Suite'):
        cnf['Override-Disparity::Options::Suite'] = 'unstable'

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree('Override-Disparity::Options')

    if Options['help']:
        usage()

    depends = {}
    session = DBConn().session()
    suite_name = Options['suite']
    suite = get_suite(suite_name, session)
    if suite is None:
        utils.fubar("Unknown suite '{0}'".format(suite_name))
    components = get_component_names(session)
    arches = set([x.arch_string for x in get_suite_architectures(suite_name)])
    arches -= set(['source', 'all'])
    for arch in arches:
        for component in components:
            Packages = utils.get_packages_from_ftp(suite.archive.path,
                                                   suite_name, component, arch)
            while Packages.step():
                package = Packages.section.find('Package')
                dep_list = Packages.section.find('Depends')
                if Options['package'] and package != Options['package']:
                    continue
                if dep_list:
                    for d in apt_pkg.parse_depends(dep_list):
                        for i in d:
                            if not depends.has_key(package):
                                depends[package] = set()
                            depends[package].add(i[0])

    priorities = {}
    query = """SELECT DISTINCT o.package, p.level, p.priority, m.name
               FROM override o
               JOIN suite s ON s.id = o.suite
               JOIN priority p ON p.id = o.priority
               JOIN binaries b ON b.package = o.package
               JOIN maintainer m ON m.id = b.maintainer
               JOIN bin_associations ba ON ba.bin = b.id
               WHERE s.suite_name = '%s'
               AND ba.suite = s.id
               AND p.level <> 0""" % suite_name
    packages = session.execute(query)

    out = {}
    if Options.has_key('file'):
        outfile = file(os.path.expanduser(Options['file']), 'w')
    else:
        outfile = sys.stdout
    for p in packages:
        priorities[p[0]] = [p[1], p[2], p[3], True]
    for d in sorted(depends.keys()):
        for p in depends[d]:
            if priorities.has_key(d) and priorities.has_key(p):
                if priorities[d][0] < priorities[p][0]:
                    if priorities[d][3]:
                        if not out.has_key(d):
                            out[d] = {}
                        out[d]['priority'] = priorities[d][1]
                        out[d]['maintainer'] = unicode(priorities[d][2],
                                                       'utf-8')
                        out[d]['priority'] = priorities[d][1]
                        priorities[d][3] = False
                    if not out[d].has_key('dependency'):
                        out[d]['dependency'] = {}
                    out[d]['dependency'][p] = priorities[p][1]
    yaml.safe_dump(out, outfile, default_flow_style=False)
    if Options.has_key('file'):
        outfile.close()
Esempio n. 51
0
 def satisfy_depends_str(self, dependsstr):
     """Satisfy the dependencies in the given string."""
     return self._satisfy_depends(apt_pkg.parse_depends(dependsstr, False))
Esempio n. 52
0
def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False, quiet=False):
    dbsuite = get_suite(suite, session)
    overridesuite = dbsuite
    if dbsuite.overridesuite is not None:
        overridesuite = get_suite(dbsuite.overridesuite, session)
    dep_problem = 0
    p2c = {}
    all_broken = defaultdict(lambda: defaultdict(set))
    if arches:
        all_arches = set(arches)
    else:
        all_arches = set(x.arch_string for x in get_suite_architectures(suite))
    all_arches -= set(["source", "all"])
    removal_set = set(removals)
    metakey_d = get_or_set_metadatakey("Depends", session)
    metakey_p = get_or_set_metadatakey("Provides", session)
    params = {
        'suite_id':     dbsuite.suite_id,
        'metakey_d_id': metakey_d.key_id,
        'metakey_p_id': metakey_p.key_id,
    }
    for architecture in all_arches | set(['all']):
        deps = {}
        sources = {}
        virtual_packages = {}
        params['arch_id'] = get_architecture(architecture, session).arch_id

        statement = '''
            SELECT b.package, s.source, c.name as component,
                (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
                (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
                FROM binaries b
                JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
                JOIN source s ON b.source = s.id
                JOIN files_archive_map af ON b.file = af.file_id
                JOIN component c ON af.component_id = c.id
                WHERE b.architecture = :arch_id'''
        query = session.query('package', 'source', 'component', 'depends', 'provides'). \
            from_statement(statement).params(params)
        for package, source, component, depends, provides in query:
            sources[package] = source
            p2c[package] = component
            if depends is not None:
                deps[package] = depends
            # Maintain a counter for each virtual package.  If a
            # Provides: exists, set the counter to 0 and count all
            # provides by a package not in the list for removal.
            # If the counter stays 0 at the end, we know that only
            # the to-be-removed packages provided this virtual
            # package.
            if provides is not None:
                for virtual_pkg in provides.split(","):
                    virtual_pkg = virtual_pkg.strip()
                    if virtual_pkg == package: continue
                    if not virtual_packages.has_key(virtual_pkg):
                        virtual_packages[virtual_pkg] = 0
                    if package not in removals:
                        virtual_packages[virtual_pkg] += 1

        # If a virtual package is only provided by the to-be-removed
        # packages, treat the virtual package as to-be-removed too.
        removal_set.update(virtual_pkg for virtual_pkg in virtual_packages if not virtual_packages[virtual_pkg])

        # Check binary dependencies (Depends)
        for package in deps:
            if package in removals: continue
            try:
                parsed_dep = apt_pkg.parse_depends(deps[package])
            except ValueError as e:
                print "Error for package %s: %s" % (package, e)
                parsed_dep = []
            for dep in parsed_dep:
                # Check for partial breakage.  If a package has a ORed
                # dependency, there is only a dependency problem if all
                # packages in the ORed depends will be removed.
                unsat = 0
                for dep_package, _, _ in dep:
                    if dep_package in removals:
                        unsat += 1
                if unsat == len(dep):
                    component = p2c[package]
                    source = sources[package]
                    if component != "main":
                        source = "%s/%s" % (source, component)
                    all_broken[source][package].add(architecture)
                    dep_problem = 1

    if all_broken and not quiet:
        if cruft:
            print "  - broken Depends:"
        else:
            print "# Broken Depends:"
        for source, bindict in sorted(all_broken.items()):
            lines = []
            for binary, arches in sorted(bindict.items()):
                if arches == all_arches or 'all' in arches:
                    lines.append(binary)
                else:
                    lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
            if cruft:
                print '    %s: %s' % (source, lines[0])
            else:
                print '%s: %s' % (source, lines[0])
            for line in lines[1:]:
                if cruft:
                    print '    ' + ' ' * (len(source) + 2) + line
                else:
                    print ' ' * (len(source) + 2) + line
        if not cruft:
            print

    # Check source dependencies (Build-Depends and Build-Depends-Indep)
    all_broken = defaultdict(set)
    metakey_bd = get_or_set_metadatakey("Build-Depends", session)
    metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
    params = {
        'suite_id':    dbsuite.suite_id,
        'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
    }
    statement = '''
        SELECT s.source, string_agg(sm.value, ', ') as build_dep
           FROM source s
           JOIN source_metadata sm ON s.id = sm.src_id
           WHERE s.id in
               (SELECT source FROM src_associations
                   WHERE suite = :suite_id)
               AND sm.key_id in :metakey_ids
           GROUP BY s.id, s.source'''
    query = session.query('source', 'build_dep').from_statement(statement). \
        params(params)
    for source, build_dep in query:
        if source in removals: continue
        parsed_dep = []
        if build_dep is not None:
            # Remove [arch] information since we want to see breakage on all arches
            build_dep = re_build_dep_arch.sub("", build_dep)
            try:
                parsed_dep = apt_pkg.parse_src_depends(build_dep)
            except ValueError as e:
                print "Error for source %s: %s" % (source, e)
        for dep in parsed_dep:
            unsat = 0
            for dep_package, _, _ in dep:
                if dep_package in removals:
                    unsat += 1
            if unsat == len(dep):
                component, = session.query(Component.component_name) \
                    .join(Component.overrides) \
                    .filter(Override.suite == overridesuite) \
                    .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
                    .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
                    .first()
                key = source
                if component != "main":
                    key = "%s/%s" % (source, component)
                all_broken[key].add(pp_deps(dep))
                dep_problem = 1

    if all_broken and not quiet:
        if cruft:
            print "  - broken Build-Depends:"
        else:
            print "# Broken Build-Depends:"
        for source, bdeps in sorted(all_broken.items()):
            bdeps = sorted(bdeps)
            if cruft:
                print '    %s: %s' % (source, bdeps[0])
            else:
                print '%s: %s' % (source, bdeps[0])
            for bdep in bdeps[1:]:
                if cruft:
                    print '    ' + ' ' * (len(source) + 2) + bdep
                else:
                    print ' ' * (len(source) + 2) + bdep
        if not cruft:
            print

    return dep_problem
Esempio n. 53
0
#!/usr/bin/env python
import apt_pkg

Parse = apt_pkg.TagFile(open("/var/lib/dpkg/status", "r"))

while Parse.step() == 1:
    print Parse.section.get("Package")
    print apt_pkg.parse_depends(Parse.section.get("Depends", ""))
Esempio n. 54
0
    def check_binary(self, upload, binary):
        fn = binary.hashed_file.filename
        control = binary.control

        for field in ('Package', 'Architecture', 'Version', 'Description'):
            if field not in control:
                raise Reject('{0}: Missing mandatory field {0}.'.format(
                    fn, field))

        check_fields_for_valid_utf8(fn, control)

        # check fields

        package = control['Package']
        if not re_field_package.match(package):
            raise Reject('{0}: Invalid Package field'.format(fn))

        version = control['Version']
        version_match = re_field_version.match(version)
        if not version_match:
            raise Reject('{0}: Invalid Version field'.format(fn))
        version_without_epoch = version_match.group('without_epoch')

        architecture = control['Architecture']
        if architecture not in upload.changes.architectures:
            raise Reject(
                '{0}: Architecture not in Architecture field in changes file'.
                format(fn))
        if architecture == 'source':
            raise Reject(
                '{0}: Architecture "source" invalid for binary packages'.
                format(fn))

        source = control.get('Source')
        if source is not None and not re_field_source.match(source):
            raise Reject('{0}: Invalid Source field'.format(fn))

        # check filename

        match = re_file_binary.match(fn)
        if package != match.group('package'):
            raise Reject(
                '{0}: filename does not match Package field'.format(fn))
        if version_without_epoch != match.group('version'):
            raise Reject(
                '{0}: filename does not match Version field'.format(fn))
        if architecture != match.group('architecture'):
            raise Reject(
                '{0}: filename does not match Architecture field'.format(fn))

        # check dependency field syntax

        for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances',
                      'Pre-Depends', 'Provides', 'Recommends', 'Replaces',
                      'Suggests'):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(fn, field))
                try:
                    apt_pkg.parse_depends(value)
                except:
                    raise Reject('{0}: APT could not parse {1} field'.format(
                        fn, field))

        for field in ('Built-Using', ):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(fn, field))
                try:
                    apt_pkg.parse_src_depends(value)
                except:
                    raise Reject('{0}: APT could not parse {1} field'.format(
                        fn, field))
Esempio n. 55
0
    def fetch(self, names):
        """
        Fetch packages

        Fetch specified and all dependent packages.
        """

        # There may be more than one revision specification for a package.
        # We store them in a list for each package, and we store each list
        # in a ordered dict indexed by the package name. An orderd dict is
        # used to ensure the packages are processed in the specified order.

        depends = collections.OrderedDict()
        for package_name in names:
            pkg = apt_pkg.parse_depends(package_name)[0][0]
            if pkg[0] not in depends:
                depends[pkg[0]] = []
            depends[pkg[0]].append(pkg)

        for package_name in depends.keys():
            try:
                pkg = self._cache[package_name]
            except KeyError:
                msg = "Can't find %s in package cache" % package_name
                raise OpxPackagesError, OpxPackagesError(
                    msg), sys.exc_info()[2]

            # find a version that satisfies the revision specification
            found = False
            for v in pkg.versions:
                satisfied = True

                for dep in depends[package_name]:
                    dep_version = dep[1]
                    dep_relation = dep[2]

                    if not apt_pkg.check_dep(v.version, dep_relation,
                                             dep_version):
                        satisfied = False
                        break

                if satisfied:
                    found = True

                    pkg.candidate = v
                    if self._default_solver:
                        # Use default apt_pkg solver
                        try:
                            pkg.mark_install(auto_inst=True,
                                             auto_fix=True,
                                             from_user=False)
                        except SystemError as ex:
                            raise OpxPackagesError, OpxPackagesError(
                                ex), sys.exc_info()[2]

                        if pkg.marked_keep and not pkg.is_installed:
                            self._dump_package(pkg._pkg)
                            msg = "Could not install %s due to version conflicts" % package_name
                            raise OpxPackagesError(msg)
                    else:
                        # Use modified solver for handling semantic versioning
                        self._fetch_package(pkg._pkg)

                    break

            if not found:
                raise OpxPackagesError(
                    "Failed to locate %s that satisfies revision specifications"
                    % package_name)

        if self._depcache.broken_count:
            logger.info("Attempting to fix %s broken packages",
                        self._depcache.broken_count)
            try:
                self._depcache.fix_broken()
            except SystemError:
                raise OpxPackagesError("We have broken dependencies")

        # Fetch packages
        try:
            self._cache.fetch_archives()
        except apt.cache.FetchFailedException as ex:
            # re-raise exception
            msg = "Fetch failed"
            raise OpxPackagesError, OpxPackagesError(msg), sys.exc_info()[2]
        except apt.cache.FetchCancelledException as ex:
            # re-raise exception
            msg = "Fetch cancelled"
            raise OpxPackagesError, OpxPackagesError(msg), sys.exc_info()[2]
 def satisfy_depends_str(self, dependsstr):
     # type: (str) -> bool
     """Satisfy the dependencies in the given string."""
     return self._satisfy_depends(apt_pkg.parse_depends(dependsstr, False))
Esempio n. 57
0
def read_packages_dict_from_file(archive_root, suite, component, arch, with_description=False):
    source_path = archive_root + "/dists/%s/%s/binary-%s/Packages.gz" % (suite, component, arch)

    pkgl10n = defaultdict(dict)
    if with_description:
        l10n_glob = os.path.join(archive_root, "dists", suite, component, "i18n", "Translation-*.xz")
        for path in set(glob.glob(l10n_glob)):
            # Translation-de_DE.xz -> ['Translation', 'de_DE', 'xz']
            lang = re.findall(r"[^-\.]+", os.path.basename(path))[1]
            log.info("Retrieving translations for the '%s' language from '%s'" % (lang, path))
            try:
                with tempfile.TemporaryFile(mode="w+b") as tf:
                    with lzma.open(path, "rb") as f:
                        tf.write(f.read())
                    tf.seek(0)
                    for section in TagFile(tf):
                        pkgname = section.get("Package")
                        if not pkgname:
                            continue
                        pkgl10n[pkgname][lang] = "\n".join(section.get("Description-%s" % lang).splitlines()[1:])
                        if lang == "en":  # en supplies C too
                            pkgl10n[pkgname]["C"] = pkgl10n[pkgname][lang]
            except Exception as e:
                log.warning("Could not use i18n file '{}': {}".format(l10n_en_source_path, str(e)))

    f = gzip.open(source_path, "rb")
    tagf = TagFile(f)
    package_dict = dict()
    all_packages = dict()
    # we might see a package for the first time as a dependency, not as the package itself
    # in that case, store this in a (Package, dependency) list to come back to at the end
    pkg_depends_todo = list()
    for section in tagf:
        pkg = Package(section["Package"], section["Version"], section["Architecture"])
        if not section.get("Filename"):
            print("Package %s-%s has no filename specified." % (pkg["name"], pkg["version"]))
            continue
        pkg.filename = os.path.join(archive_root, section["Filename"])
        all_packages[pkg.name] = pkg
        pkg.maintainer = section["Maintainer"]
        try:
            # Depends: a | b, c -> [[a, b], c]
            depends = parse_depends(section["Depends"])
            for depgroup in depends:
                for (dependency, _, _) in depgroup:
                    if dependency in all_packages:
                        # we've seen it already, so put it on the list
                        pkg.depends.append(all_packages[pkg])
                    else:
                        # we haven't, record that we need to come back to this later
                        pkg_depends_todo.append((pkg, dependency))
        except KeyError:
            pass

        if with_description:
            if pkg.name in pkgl10n:
                for lang in pkgl10n[pkg.name]:
                    pkg.set_description(lang, pkgl10n[pkg.name][lang])
            else:
                pkg.set_description("C", section.get("Description"))

        pkg2 = package_dict.get(pkg.name)
        if pkg2:
            compare = version_compare(pkg2.version, pkg.version)
            if compare >= 0:
                continue
        package_dict[pkg.name] = pkg
    # revisit all deferred dependencies and fill the corresponding Package in
    for (pkg, dep) in pkg_depends_todo:
        if dep in all_packages:
            pkg.depends.append(all_packages[dep])
    f.close()

    return package_dict