Example #1
0
    def open(self, file):
        """Open the package."""
        depends_tags = ["Build-Depends", "Build-Depends-Indep"]
        conflicts_tags = ["Build-Conflicts", "Build-Conflicts-Indep"]
        fd = apt_pkg.open_maybe_clear_signed_file(file)
        fobj = os.fdopen(fd)
        tagfile = apt_pkg.TagFile(fobj)
        try:
            for sec in tagfile:
                for tag in depends_tags:
                    if tag not in sec:
                        continue
                    self._depends.extend(apt_pkg.parse_src_depends(sec[tag]))
                for tag in conflicts_tags:
                    if tag not in sec:
                        continue
                    self._conflicts.extend(apt_pkg.parse_src_depends(sec[tag]))
                if 'Source' in sec:
                    self.pkgname = sec['Source']
                if 'Binary' in sec:
                    self.binaries = [
                        b.strip() for b in sec['Binary'].split(',')
                    ]
                for tag in sec.keys():
                    if tag in sec:
                        self._sections[tag] = sec[tag]
        finally:
            del tagfile
            fobj.close()

        s = _("Install Build-Dependencies for "
              "source package '%s' that builds %s\n") % (
                  self.pkgname, " ".join(self.binaries))
        self._sections["Description"] = s
        self._check_was_run = False
Example #2
0
    def open(self, file):
        # type: (str) -> None
        """Open the package."""
        depends_tags = ["Build-Depends", "Build-Depends-Indep"]
        conflicts_tags = ["Build-Conflicts", "Build-Conflicts-Indep"]
        fd = apt_pkg.open_maybe_clear_signed_file(file)
        fobj = os.fdopen(fd)
        tagfile = apt_pkg.TagFile(fobj)
        try:
            for sec in tagfile:
                for tag in depends_tags:
                    if tag not in sec:
                        continue
                    self._depends.extend(apt_pkg.parse_src_depends(sec[tag]))
                for tag in conflicts_tags:
                    if tag not in sec:
                        continue
                    self._conflicts.extend(apt_pkg.parse_src_depends(sec[tag]))
                if 'Source' in sec:
                    self.pkgname = sec['Source']
                if 'Binary' in sec:
                    self.binaries = [b.strip() for b in
                                     sec['Binary'].split(',')]
                for tag in sec.keys():      # type: ignore
                    if tag in sec:
                        self._sections[tag] = sec[tag]
        finally:
            del tagfile
            fobj.close()

        s = _("Install Build-Dependencies for "
              "source package '%s' that builds %s\n") % (self.pkgname,
              " ".join(self.binaries))
        self._sections["Description"] = s
        self._check_was_run = False
Example #3
0
    def open(self, file):
        """Open the package."""
        depends_tags = ["Build-Depends", "Build-Depends-Indep"]
        conflicts_tags = ["Build-Conflicts", "Build-Conflicts-Indep"]
        fobj = open(file)
        tagfile = apt_pkg.TagFile(fobj)
        try:
            for sec in tagfile:
                for tag in depends_tags:
                    if tag not in sec:
                        continue
                    self._depends.extend(apt_pkg.parse_src_depends(sec[tag]))
                for tag in conflicts_tags:
                    if tag not in sec:
                        continue
                    self._conflicts.extend(apt_pkg.parse_src_depends(sec[tag]))
                if 'Source' in sec:
                    self.pkgname = sec['Source']
                if 'Binary' in sec:
                    self.binaries = sec['Binary'].split(', ')
                if 'Version' in sec:
                    self._sections['Version'] = sec['Version']
        finally:
            del tagfile
            fobj.close()

        s = _("Install Build-Dependencies for "
              "source package '%s' that builds %s\n") % (self.pkgname,
              " ".join(self.binaries))
        self._sections["Description"] = s
        self._check_was_run = False
Example #4
0
    def open(self, file):
        """Open the package."""
        depends_tags = ["Build-Depends", "Build-Depends-Indep"]
        conflicts_tags = ["Build-Conflicts", "Build-Conflicts-Indep"]

        fobj = open(file)
        tagfile = apt_pkg.TagFile(fobj)
        try:
            for sec in tagfile:
                for tag in depends_tags:
                    if not tag in sec:
                        continue
                    self._depends.extend(apt_pkg.parse_src_depends(sec[tag]))
                for tag in conflicts_tags:
                    if not tag in sec:
                        continue
                    self._conflicts.extend(apt_pkg.parse_src_depends(sec[tag]))
                if 'Source' in sec:
                    self.pkgname = sec['Source']
                if 'Binary' in sec:
                    self.binaries = sec['Binary'].split(', ')
                if 'Version' in sec:
                    self._sections['Version'] = sec['Version']
        finally:
            del tagfile
            fobj.close()

        s = _("Install Build-Dependencies for "
              "source package '%s' that builds %s\n") % (
                  self.pkgname, " ".join(self.binaries))
        self._sections["Description"] = s
Example #5
0
    def check(self, upload):
        if upload.changes.source is None:
            if upload.changes.sourceful:
                raise Reject(
                    "{}: Architecture field includes source, but no source package is included in the upload"
                    .format(upload.changes.filename))
            return True

        if not upload.changes.sourceful:
            raise Reject(
                "{}: Architecture field does not include source, but a source package is included in the upload"
                .format(upload.changes.filename))

        changes = upload.changes.changes
        source = upload.changes.source
        control = source.dsc
        dsc_fn = source._dsc_file.filename

        check_fields_for_valid_utf8(dsc_fn, control)

        # check fields
        if not re_field_package.match(control['Source']):
            raise Reject('{0}: Invalid Source field'.format(dsc_fn))
        if control['Source'] != changes['Source']:
            raise Reject(
                '{0}: Source field does not match Source field in changes'.
                format(dsc_fn))
        if control['Version'] != changes['Version']:
            raise Reject(
                '{0}: Version field does not match Version field in changes'.
                format(dsc_fn))

        # check filenames
        self.check_filename(control, dsc_fn, re_file_dsc)
        for f in six.itervalues(source.files):
            self.check_filename(control, f.filename, re_file_source)

        # check dependency field syntax
        for field in ('Build-Conflicts', 'Build-Conflicts-Indep',
                      'Build-Depends', 'Build-Depends-Arch',
                      'Build-Depends-Indep'):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
                try:
                    apt_pkg.parse_src_depends(value)
                except Exception as e:
                    raise Reject(
                        '{0}: APT could not parse {1} field: {2}'.format(
                            dsc_fn, field, e))

        rejects = utils.check_dsc_files(dsc_fn, control,
                                        list(source.files.keys()))
        if len(rejects) > 0:
            raise Reject("\n".join(rejects))

        return True
Example #6
0
def chk_bd_process_dir (unused, dirname, filenames):
    for name in filenames:
        if not name.endswith(".dsc"):
            continue
        filename = os.path.abspath(dirname+'/'+name)
        dsc = utils.parse_changes(filename, dsc_file=1)
        for field_name in [ "build-depends", "build-depends-indep" ]:
            field = dsc.get(field_name)
            if field:
                try:
                    apt_pkg.parse_src_depends(field)
                except:
                    print "E: [%s] %s: %s" % (filename, field_name, field)
                    pass
Example #7
0
def chk_bd_process_dir(unused, dirname, filenames):
    for name in filenames:
        if not name.endswith(".dsc"):
            continue
        filename = os.path.abspath(dirname + '/' + name)
        dsc = utils.parse_changes(filename, dsc_file=1)
        for field_name in ["build-depends", "build-depends-indep"]:
            field = dsc.get(field_name)
            if field:
                try:
                    apt_pkg.parse_src_depends(field)
                except:
                    print "E: [%s] %s: %s" % (filename, field_name, field)
                    pass
Example #8
0
    def test_parse_src_depends(self):
        """dependencies: Test apt_pkg.parse_src_depends()."""
        # Check that architecture exclusion works
        # depends_this: Current architecture is included
        # depends_this_too: Another architecture is excluded
        # depends_other: The current architecture is excluded
        # depends_other: Another architecture is requested.
        architecture = apt_pkg.config["APT::Architecture"]
        depends_this = apt_pkg.parse_src_depends("p [%s]" % architecture)
        depends_this_too = apt_pkg.parse_src_depends("p [!not-existing-arch]")
        depends_other = apt_pkg.parse_src_depends("p [!%s]" % architecture)
        depends_other_too = apt_pkg.parse_src_depends("p [not-existing-arch]")

        self.assertEqual(len(depends_this), len(depends_this_too), 1)
        self.assertEqual(len(depends_other), len(depends_other_too), 0)
Example #9
0
    def test_parse_src_depends(self):
        """dependencies: Test apt_pkg.parse_src_depends()."""
        # Check that architecture exclusion works
        # depends_this: Current architecture is included
        # depends_this_too: Another architecture is excluded
        # depends_other: The current architecture is excluded
        # depends_other: Another architecture is requested.
        architecture = apt_pkg.config["APT::Architecture"]
        depends_this = apt_pkg.parse_src_depends("p [%s]" % architecture)
        depends_this_too = apt_pkg.parse_src_depends("p [!not-existing-arch]")
        depends_other = apt_pkg.parse_src_depends("p [!%s]" % architecture)
        depends_other_too = apt_pkg.parse_src_depends("p [not-existing-arch]")

        self.assertEqual(len(depends_this), len(depends_this_too), 1)
        self.assertEqual(len(depends_other), len(depends_other_too), 0)
Example #10
0
File: checks.py Project: Debian/dak
    def check(self, upload):
        if upload.changes.source is None:
            if "source" in upload.changes.architectures:
                raise Reject("{}: Architecture field includes source, but no source package is included in the upload".format(upload.changes.filename))
            return True

        if "source" not in upload.changes.architectures:
            raise Reject("{}: Architecture field does not include source, but a source package is included in the upload".format(upload.changes.filename))

        changes = upload.changes.changes
        source = upload.changes.source
        control = source.dsc
        dsc_fn = source._dsc_file.filename

        check_fields_for_valid_utf8(dsc_fn, control)

        # check fields
        if not re_field_package.match(control['Source']):
            raise Reject('{0}: Invalid Source field'.format(dsc_fn))
        if control['Source'] != changes['Source']:
            raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
        if control['Version'] != changes['Version']:
            raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))

        # check filenames
        self.check_filename(control, dsc_fn, re_file_dsc)
        for f in source.files.itervalues():
            self.check_filename(control, f.filename, re_file_source)

        # check dependency field syntax
        for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
                try:
                    apt_pkg.parse_src_depends(value)
                except Exception as e:
                    raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))

        rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
        if len(rejects) > 0:
            raise Reject("\n".join(rejects))

        return True
Example #11
0
    def open(self, file):
        """Open the package."""
        depends_tags = ["Build-Depends", "Build-Depends-Indep"]
        conflicts_tags = ["Build-Conflicts", "Build-Conflicts-Indep"]
        fobj = open(file)
        tagfile = apt_pkg.TagFile(fobj)
        try:
            for sec in tagfile:
                # we only care about the stanza with the "Format:" tag, the
                # rest is gpg signature noise. we should probably have
                # bindings for apts OpenMaybeClearsignedFile()
                if "Format" not in sec:
                    continue
                for tag in depends_tags:
                    if tag not in sec:
                        continue
                    self._depends.extend(apt_pkg.parse_src_depends(sec[tag]))
                for tag in conflicts_tags:
                    if tag not in sec:
                        continue
                    self._conflicts.extend(apt_pkg.parse_src_depends(sec[tag]))
                if 'Source' in sec:
                    self.pkgname = sec['Source']
                if 'Binary' in sec:
                    self.binaries = [b.strip() for b in
                                     sec['Binary'].split(',')]
                for tag in sec.keys():
                    if tag in sec:
                        self._sections[tag] = sec[tag]
        finally:
            del tagfile
            fobj.close()

        s = _("Install Build-Dependencies for "
              "source package '%s' that builds %s\n") % (self.pkgname,
              " ".join(self.binaries))
        self._sections["Description"] = s
        self._check_was_run = False
Example #12
0
    def _parseSource(self, section):
        """Parse a section from a Sources file."""
        src = section["Package"]
        ver = section["Version"]

        # If we have already seen an equal or newer version of this source,
        # then skip this section.
        if src in self.sources:
            last_ver = self.sources[src]["Version"]
            if apt_pkg.version_compare(last_ver, ver) >= 0:
                return

        self.sources[src] = {}

        self.sources[src]["Maintainer"] = \
            unicode(section.get("Maintainer", ""), "utf8", "replace")
        self.sources[src]["Version"] = ver

        for field in "Build-Depends", "Build-Depends-Indep":
            value = section.get(field, "")
            self.sources[src][field] = apt_pkg.parse_src_depends(value)

        binaries = apt_pkg.parse_depends(section.get("Binary", src))
        self.sources[src]["Binaries"] = [ b[0][0] for b in binaries ]
Example #13
0
    def check_binary(self, upload, binary):
        fn = binary.hashed_file.filename
        control = binary.control

        for field in ('Package', 'Architecture', 'Version', 'Description'):
            if field not in control:
                raise Reject('{0}: Missing mandatory field {0}.'.format(
                    fn, field))

        check_fields_for_valid_utf8(fn, control)

        # check fields

        package = control['Package']
        if not re_field_package.match(package):
            raise Reject('{0}: Invalid Package field'.format(fn))

        version = control['Version']
        version_match = re_field_version.match(version)
        if not version_match:
            raise Reject('{0}: Invalid Version field'.format(fn))
        version_without_epoch = version_match.group('without_epoch')

        architecture = control['Architecture']
        if architecture not in upload.changes.architectures:
            raise Reject(
                '{0}: Architecture not in Architecture field in changes file'.
                format(fn))
        if architecture == 'source':
            raise Reject(
                '{0}: Architecture "source" invalid for binary packages'.
                format(fn))

        source = control.get('Source')
        if source is not None and not re_field_source.match(source):
            raise Reject('{0}: Invalid Source field'.format(fn))

        # check filename

        match = re_file_binary.match(fn)
        if package != match.group('package'):
            raise Reject(
                '{0}: filename does not match Package field'.format(fn))
        if version_without_epoch != match.group('version'):
            raise Reject(
                '{0}: filename does not match Version field'.format(fn))
        if architecture != match.group('architecture'):
            raise Reject(
                '{0}: filename does not match Architecture field'.format(fn))

        # check dependency field syntax

        for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances',
                      'Pre-Depends', 'Provides', 'Recommends', 'Replaces',
                      'Suggests'):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(fn, field))
                try:
                    apt_pkg.parse_depends(value)
                except:
                    raise Reject('{0}: APT could not parse {1} field'.format(
                        fn, field))

        for field in ('Built-Using', ):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(fn, field))
                try:
                    apt_pkg.parse_src_depends(value)
                except:
                    raise Reject('{0}: APT could not parse {1} field'.format(
                        fn, field))
Example #14
0
def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False, quiet=False):
    dbsuite = get_suite(suite, session)
    overridesuite = dbsuite
    if dbsuite.overridesuite is not None:
        overridesuite = get_suite(dbsuite.overridesuite, session)
    dep_problem = 0
    p2c = {}
    all_broken = defaultdict(lambda: defaultdict(set))
    if arches:
        all_arches = set(arches)
    else:
        all_arches = set(x.arch_string for x in get_suite_architectures(suite))
    all_arches -= set(["source", "all"])
    removal_set = set(removals)
    metakey_d = get_or_set_metadatakey("Depends", session)
    metakey_p = get_or_set_metadatakey("Provides", session)
    params = {
        'suite_id':     dbsuite.suite_id,
        'metakey_d_id': metakey_d.key_id,
        'metakey_p_id': metakey_p.key_id,
    }
    for architecture in all_arches | set(['all']):
        deps = {}
        sources = {}
        virtual_packages = {}
        params['arch_id'] = get_architecture(architecture, session).arch_id

        statement = '''
            SELECT b.package, s.source, c.name as component,
                (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
                (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
                FROM binaries b
                JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
                JOIN source s ON b.source = s.id
                JOIN files_archive_map af ON b.file = af.file_id
                JOIN component c ON af.component_id = c.id
                WHERE b.architecture = :arch_id'''
        query = session.query('package', 'source', 'component', 'depends', 'provides'). \
            from_statement(statement).params(params)
        for package, source, component, depends, provides in query:
            sources[package] = source
            p2c[package] = component
            if depends is not None:
                deps[package] = depends
            # Maintain a counter for each virtual package.  If a
            # Provides: exists, set the counter to 0 and count all
            # provides by a package not in the list for removal.
            # If the counter stays 0 at the end, we know that only
            # the to-be-removed packages provided this virtual
            # package.
            if provides is not None:
                for virtual_pkg in provides.split(","):
                    virtual_pkg = virtual_pkg.strip()
                    if virtual_pkg == package: continue
                    if not virtual_packages.has_key(virtual_pkg):
                        virtual_packages[virtual_pkg] = 0
                    if package not in removals:
                        virtual_packages[virtual_pkg] += 1

        # If a virtual package is only provided by the to-be-removed
        # packages, treat the virtual package as to-be-removed too.
        removal_set.update(virtual_pkg for virtual_pkg in virtual_packages if not virtual_packages[virtual_pkg])

        # Check binary dependencies (Depends)
        for package in deps:
            if package in removals: continue
            try:
                parsed_dep = apt_pkg.parse_depends(deps[package])
            except ValueError as e:
                print "Error for package %s: %s" % (package, e)
                parsed_dep = []
            for dep in parsed_dep:
                # Check for partial breakage.  If a package has a ORed
                # dependency, there is only a dependency problem if all
                # packages in the ORed depends will be removed.
                unsat = 0
                for dep_package, _, _ in dep:
                    if dep_package in removals:
                        unsat += 1
                if unsat == len(dep):
                    component = p2c[package]
                    source = sources[package]
                    if component != "main":
                        source = "%s/%s" % (source, component)
                    all_broken[source][package].add(architecture)
                    dep_problem = 1

    if all_broken and not quiet:
        if cruft:
            print "  - broken Depends:"
        else:
            print "# Broken Depends:"
        for source, bindict in sorted(all_broken.items()):
            lines = []
            for binary, arches in sorted(bindict.items()):
                if arches == all_arches or 'all' in arches:
                    lines.append(binary)
                else:
                    lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
            if cruft:
                print '    %s: %s' % (source, lines[0])
            else:
                print '%s: %s' % (source, lines[0])
            for line in lines[1:]:
                if cruft:
                    print '    ' + ' ' * (len(source) + 2) + line
                else:
                    print ' ' * (len(source) + 2) + line
        if not cruft:
            print

    # Check source dependencies (Build-Depends and Build-Depends-Indep)
    all_broken = defaultdict(set)
    metakey_bd = get_or_set_metadatakey("Build-Depends", session)
    metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
    params = {
        'suite_id':    dbsuite.suite_id,
        'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
    }
    statement = '''
        SELECT s.source, string_agg(sm.value, ', ') as build_dep
           FROM source s
           JOIN source_metadata sm ON s.id = sm.src_id
           WHERE s.id in
               (SELECT source FROM src_associations
                   WHERE suite = :suite_id)
               AND sm.key_id in :metakey_ids
           GROUP BY s.id, s.source'''
    query = session.query('source', 'build_dep').from_statement(statement). \
        params(params)
    for source, build_dep in query:
        if source in removals: continue
        parsed_dep = []
        if build_dep is not None:
            # Remove [arch] information since we want to see breakage on all arches
            build_dep = re_build_dep_arch.sub("", build_dep)
            try:
                parsed_dep = apt_pkg.parse_src_depends(build_dep)
            except ValueError as e:
                print "Error for source %s: %s" % (source, e)
        for dep in parsed_dep:
            unsat = 0
            for dep_package, _, _ in dep:
                if dep_package in removals:
                    unsat += 1
            if unsat == len(dep):
                component, = session.query(Component.component_name) \
                    .join(Component.overrides) \
                    .filter(Override.suite == overridesuite) \
                    .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
                    .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
                    .first()
                key = source
                if component != "main":
                    key = "%s/%s" % (source, component)
                all_broken[key].add(pp_deps(dep))
                dep_problem = 1

    if all_broken and not quiet:
        if cruft:
            print "  - broken Build-Depends:"
        else:
            print "# Broken Build-Depends:"
        for source, bdeps in sorted(all_broken.items()):
            bdeps = sorted(bdeps)
            if cruft:
                print '    %s: %s' % (source, bdeps[0])
            else:
                print '%s: %s' % (source, bdeps[0])
            for bdep in bdeps[1:]:
                if cruft:
                    print '    ' + ' ' * (len(source) + 2) + bdep
                else:
                    print ' ' * (len(source) + 2) + bdep
        if not cruft:
            print

    return dep_problem
Example #15
0
    def verify(self):
        """Verify the uploaded .dsc file.

        This method is an error generator, i.e, it returns an iterator over
        all exceptions that are generated while processing DSC file checks.
        """

        for error in SourceUploadFile.verify(self):
            yield error

        # Check size and checksum of the DSC file itself
        try:
            self.checkSizeAndCheckSum()
        except UploadError as error:
            yield error

        try:
            raw_files = parse_and_merge_file_lists(self._dict, changes=False)
        except UploadError as e:
            yield e
            return

        files = []
        for attr in raw_files:
            filename, hashes, size = attr
            if not re_issource.match(filename):
                # DSC files only really hold on references to source
                # files; they are essentially a description of a source
                # package. Anything else is crack.
                yield UploadError("%s: File %s does not look sourceful." %
                                  (self.filename, filename))
                continue
            filepath = os.path.join(self.dirname, filename)
            try:
                file_instance = DSCUploadedFile(filepath, hashes, size,
                                                self.policy, self.logger)
            except UploadError as error:
                yield error
            else:
                files.append(file_instance)
        self.files = files

        if not re_valid_pkg_name.match(self.source):
            yield UploadError("%s: invalid source name %s" %
                              (self.filename, self.source))
        if not re_valid_version.match(self.dsc_version):
            yield UploadError("%s: invalid version %s" %
                              (self.filename, self.dsc_version))

        if not self.policy.distroseries.isSourcePackageFormatPermitted(
                self.format):
            yield UploadError(
                "%s: format '%s' is not permitted in %s." %
                (self.filename, self.format, self.policy.distroseries.name))

        # Validate the build dependencies
        for field_name in ['Build-Depends', 'Build-Depends-Indep']:
            field = self._dict.get(field_name, None)
            if field is not None:
                if field.startswith("ARRAY"):
                    yield UploadError(
                        "%s: invalid %s field produced by a broken version "
                        "of dpkg-dev (1.10.11)" % (self.filename, field_name))
                try:
                    apt_pkg.parse_src_depends(field)
                except (SystemExit, KeyboardInterrupt):
                    raise
                except Exception as error:
                    # Swallow everything apt_pkg throws at us because
                    # it is not desperately pythonic and can raise odd
                    # or confusing exceptions at times and is out of
                    # our control.
                    yield UploadError(
                        "%s: invalid %s field; cannot be parsed by apt: %s" %
                        (self.filename, field_name, error))

        # Verify if version declared in changesfile is the same than that
        # in DSC (including epochs).
        if self.dsc_version != self.version:
            yield UploadError(
                "%s: version ('%s') in .dsc does not match version "
                "('%s') in .changes." %
                (self.filename, self.dsc_version, self.version))

        for error in self.checkFiles():
            yield error
Example #16
0
    def verify(self):
        """Verify the uploaded .dsc file.

        This method is an error generator, i.e, it returns an iterator over
        all exceptions that are generated while processing DSC file checks.
        """

        for error in SourceUploadFile.verify(self):
            yield error

        # Check size and checksum of the DSC file itself
        try:
            self.checkSizeAndCheckSum()
        except UploadError as error:
            yield error

        try:
            raw_files = parse_and_merge_file_lists(self._dict, changes=False)
        except UploadError as e:
            yield e
            return

        files = []
        for attr in raw_files:
            filename, hashes, size = attr
            if not re_issource.match(filename):
                # DSC files only really hold on references to source
                # files; they are essentially a description of a source
                # package. Anything else is crack.
                yield UploadError("%s: File %s does not look sourceful." % (
                                  self.filename, filename))
                continue
            filepath = os.path.join(self.dirname, filename)
            try:
                file_instance = DSCUploadedFile(
                    filepath, hashes, size, self.policy, self.logger)
            except UploadError as error:
                yield error
            else:
                files.append(file_instance)
        self.files = files

        if not re_valid_pkg_name.match(self.source):
            yield UploadError(
                "%s: invalid source name %s" % (self.filename, self.source))
        if not re_valid_version.match(self.dsc_version):
            yield UploadError(
                "%s: invalid version %s" % (self.filename, self.dsc_version))

        if not self.policy.distroseries.isSourcePackageFormatPermitted(
            self.format):
            yield UploadError(
                "%s: format '%s' is not permitted in %s." %
                (self.filename, self.format, self.policy.distroseries.name))

        # Validate the build dependencies
        for field_name in ['Build-Depends', 'Build-Depends-Indep']:
            field = self._dict.get(field_name, None)
            if field is not None:
                if field.startswith("ARRAY"):
                    yield UploadError(
                        "%s: invalid %s field produced by a broken version "
                        "of dpkg-dev (1.10.11)" % (self.filename, field_name))
                try:
                    apt_pkg.parse_src_depends(field)
                except (SystemExit, KeyboardInterrupt):
                    raise
                except Exception as error:
                    # Swallow everything apt_pkg throws at us because
                    # it is not desperately pythonic and can raise odd
                    # or confusing exceptions at times and is out of
                    # our control.
                    yield UploadError(
                        "%s: invalid %s field; cannot be parsed by apt: %s"
                        % (self.filename, field_name, error))

        # Verify if version declared in changesfile is the same than that
        # in DSC (including epochs).
        if self.dsc_version != self.version:
            yield UploadError(
                "%s: version ('%s') in .dsc does not match version "
                "('%s') in .changes."
                % (self.filename, self.dsc_version, self.version))

        for error in self.checkFiles():
            yield error
Example #17
0
    def check_binary(self, upload, binary):
        fn = binary.hashed_file.filename
        control = binary.control

        for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'):
            if field not in control:
                raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))

        check_fields_for_valid_utf8(fn, control)

        # check fields

        package = control['Package']
        if not re_field_package.match(package):
            raise Reject('{0}: Invalid Package field'.format(fn))

        version = control['Version']
        version_match = re_field_version.match(version)
        if not version_match:
            raise Reject('{0}: Invalid Version field'.format(fn))
        version_without_epoch = version_match.group('without_epoch')

        architecture = control['Architecture']
        if architecture not in upload.changes.architectures:
            raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
        if architecture == 'source':
            raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))

        source = control.get('Source')
        if source is not None and not re_field_source.match(source):
            raise Reject('{0}: Invalid Source field'.format(fn))

        # check filename

        match = re_file_binary.match(fn)
        if package != match.group('package'):
            raise Reject('{0}: filename does not match Package field'.format(fn))
        if version_without_epoch != match.group('version'):
            raise Reject('{0}: filename does not match Version field'.format(fn))
        if architecture != match.group('architecture'):
            raise Reject('{0}: filename does not match Architecture field'.format(fn))

        # check dependency field syntax

        for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
                      'Provides', 'Recommends', 'Replaces', 'Suggests'):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(fn, field))
                try:
                    apt_pkg.parse_depends(value)
                except:
                    raise Reject('{0}: APT could not parse {1} field'.format(fn, field))

        for field in ('Built-Using',):
            value = control.get(field)
            if value is not None:
                if value.strip() == '':
                    raise Reject('{0}: empty {1} field'.format(fn, field))
                try:
                    apt_pkg.parse_src_depends(value)
                except:
                    raise Reject('{0}: APT could not parse {1} field'.format(fn, field))

        # "Multi-Arch: no" breaks wanna-build, #768353
        multi_arch = control.get("Multi-Arch")
        if multi_arch == 'no':
            raise Reject('{0}: Multi-Arch: no support in Debian is broken (#768353)'.format(fn))
Example #18
0
def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False, quiet=False, include_arch_all=True):
    dbsuite = get_suite(suite, session)
    overridesuite = dbsuite
    if dbsuite.overridesuite is not None:
        overridesuite = get_suite(dbsuite.overridesuite, session)
    dep_problem = 0
    p2c = {}
    all_broken = defaultdict(lambda: defaultdict(set))
    if arches:
        all_arches = set(arches)
    else:
        all_arches = set(x.arch_string for x in get_suite_architectures(suite))
    all_arches -= set(["source", "all"])
    removal_set = set(removals)
    metakey_d = get_or_set_metadatakey("Depends", session)
    metakey_p = get_or_set_metadatakey("Provides", session)
    params = {
        'suite_id':     dbsuite.suite_id,
        'metakey_d_id': metakey_d.key_id,
        'metakey_p_id': metakey_p.key_id,
    }
    if include_arch_all:
        rdep_architectures = all_arches | set(['all'])
    else:
        rdep_architectures = all_arches
    for architecture in rdep_architectures:
        deps = {}
        sources = {}
        virtual_packages = {}
        try:
            params['arch_id'] = get_architecture(architecture, session).arch_id
        except AttributeError:
            continue

        statement = sql.text('''
            SELECT b.package, s.source, c.name as component,
                (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
                (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
                FROM binaries b
                JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
                JOIN source s ON b.source = s.id
                JOIN files_archive_map af ON b.file = af.file_id
                JOIN component c ON af.component_id = c.id
                WHERE b.architecture = :arch_id''')
        query = session.query('package', 'source', 'component', 'depends', 'provides'). \
            from_statement(statement).params(params)
        for package, source, component, depends, provides in query:
            sources[package] = source
            p2c[package] = component
            if depends is not None:
                deps[package] = depends
            # Maintain a counter for each virtual package.  If a
            # Provides: exists, set the counter to 0 and count all
            # provides by a package not in the list for removal.
            # If the counter stays 0 at the end, we know that only
            # the to-be-removed packages provided this virtual
            # package.
            if provides is not None:
                for virtual_pkg in provides.split(","):
                    virtual_pkg = virtual_pkg.strip()
                    if virtual_pkg == package: continue
                    if virtual_pkg not in virtual_packages:
                        virtual_packages[virtual_pkg] = 0
                    if package not in removals:
                        virtual_packages[virtual_pkg] += 1

        # If a virtual package is only provided by the to-be-removed
        # packages, treat the virtual package as to-be-removed too.
        removal_set.update(virtual_pkg for virtual_pkg in virtual_packages if not virtual_packages[virtual_pkg])

        # Check binary dependencies (Depends)
        for package in deps:
            if package in removals: continue
            try:
                parsed_dep = apt_pkg.parse_depends(deps[package])
            except ValueError as e:
                print "Error for package %s: %s" % (package, e)
                parsed_dep = []
            for dep in parsed_dep:
                # Check for partial breakage.  If a package has a ORed
                # dependency, there is only a dependency problem if all
                # packages in the ORed depends will be removed.
                unsat = 0
                for dep_package, _, _ in dep:
                    if dep_package in removals:
                        unsat += 1
                if unsat == len(dep):
                    component = p2c[package]
                    source = sources[package]
                    if component != "main":
                        source = "%s/%s" % (source, component)
                    all_broken[source][package].add(architecture)
                    dep_problem = 1

    if all_broken and not quiet:
        if cruft:
            print "  - broken Depends:"
        else:
            print "# Broken Depends:"
        for source, bindict in sorted(all_broken.items()):
            lines = []
            for binary, arches in sorted(bindict.items()):
                if arches == all_arches or 'all' in arches:
                    lines.append(binary)
                else:
                    lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
            if cruft:
                print '    %s: %s' % (source, lines[0])
            else:
                print '%s: %s' % (source, lines[0])
            for line in lines[1:]:
                if cruft:
                    print '    ' + ' ' * (len(source) + 2) + line
                else:
                    print ' ' * (len(source) + 2) + line
        if not cruft:
            print

    # Check source dependencies (Build-Depends and Build-Depends-Indep)
    all_broken = defaultdict(set)
    metakey_bd = get_or_set_metadatakey("Build-Depends", session)
    metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
    if include_arch_all:
        metakey_ids = (metakey_bd.key_id, metakey_bdi.key_id)
    else:
        metakey_ids = (metakey_bd.key_id,)

    params = {
        'suite_id':    dbsuite.suite_id,
        'metakey_ids': metakey_ids,
    }
    statement = sql.text('''
        SELECT s.source, string_agg(sm.value, ', ') as build_dep
           FROM source s
           JOIN source_metadata sm ON s.id = sm.src_id
           WHERE s.id in
               (SELECT src FROM newest_src_association
                   WHERE suite = :suite_id)
               AND sm.key_id in :metakey_ids
           GROUP BY s.id, s.source''')
    query = session.query('source', 'build_dep').from_statement(statement). \
        params(params)
    for source, build_dep in query:
        if source in removals: continue
        parsed_dep = []
        if build_dep is not None:
            # Remove [arch] information since we want to see breakage on all arches
            build_dep = re_build_dep_arch.sub("", build_dep)
            try:
                parsed_dep = apt_pkg.parse_src_depends(build_dep)
            except ValueError as e:
                print "Error for source %s: %s" % (source, e)
        for dep in parsed_dep:
            unsat = 0
            for dep_package, _, _ in dep:
                if dep_package in removals:
                    unsat += 1
            if unsat == len(dep):
                component, = session.query(Component.component_name) \
                    .join(Component.overrides) \
                    .filter(Override.suite == overridesuite) \
                    .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
                    .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
                    .first()
                key = source
                if component != "main":
                    key = "%s/%s" % (source, component)
                all_broken[key].add(pp_deps(dep))
                dep_problem = 1

    if all_broken and not quiet:
        if cruft:
            print "  - broken Build-Depends:"
        else:
            print "# Broken Build-Depends:"
        for source, bdeps in sorted(all_broken.items()):
            bdeps = sorted(bdeps)
            if cruft:
                print '    %s: %s' % (source, bdeps[0])
            else:
                print '%s: %s' % (source, bdeps[0])
            for bdep in bdeps[1:]:
                if cruft:
                    print '    ' + ' ' * (len(source) + 2) + bdep
                else:
                    print ' ' * (len(source) + 2) + bdep
        if not cruft:
            print

    return dep_problem
Example #19
0
    def _load_package_information(session, suite_id, suite_archs2id):
        package_dependencies = defaultdict(lambda: defaultdict(set))
        arch_providers_of = defaultdict(lambda: defaultdict(set))
        arch_provided_by = defaultdict(lambda: defaultdict(set))
        source_deps = defaultdict(set)
        metakey_d = get_or_set_metadatakey("Depends", session)
        metakey_p = get_or_set_metadatakey("Provides", session)
        params = {
            'suite_id':     suite_id,
            'arch_all_id':  suite_archs2id['all'],
            'metakey_d_id': metakey_d.key_id,
            'metakey_p_id': metakey_p.key_id,
        }
        all_arches = set(suite_archs2id)
        all_arches.discard('source')

        package_dependencies['source'] = source_deps

        for architecture in all_arches:
            deps = defaultdict(set)
            providers_of = defaultdict(set)
            provided_by = defaultdict(set)
            arch_providers_of[architecture] = providers_of
            arch_provided_by[architecture] = provided_by
            package_dependencies[architecture] = deps

            params['arch_id'] = suite_archs2id[architecture]

            statement = '''
                    SELECT b.package,
                        (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
                        (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
                        FROM binaries b
                        JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
                        WHERE b.architecture = :arch_id OR b.architecture = :arch_all_id'''
            query = session.query('package', 'depends', 'provides'). \
                from_statement(statement).params(params)
            for package, depends, provides in query:

                if depends is not None:
                    try:
                        parsed_dep = []
                        for dep in apt_pkg.parse_depends(depends):
                            parsed_dep.append(frozenset(d[0] for d in dep))
                        deps[package].update(parsed_dep)
                    except ValueError as e:
                        print "Error for package %s: %s" % (package, e)
                # Maintain a counter for each virtual package.  If a
                # Provides: exists, set the counter to 0 and count all
                # provides by a package not in the list for removal.
                # If the counter stays 0 at the end, we know that only
                # the to-be-removed packages provided this virtual
                # package.
                if provides is not None:
                    for virtual_pkg in provides.split(","):
                        virtual_pkg = virtual_pkg.strip()
                        if virtual_pkg == package:
                            continue
                        provided_by[virtual_pkg].add(package)
                        providers_of[package].add(virtual_pkg)

        # Check source dependencies (Build-Depends and Build-Depends-Indep)
        metakey_bd = get_or_set_metadatakey("Build-Depends", session)
        metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
        params = {
            'suite_id':    suite_id,
            'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
        }
        statement = '''
            SELECT s.source, string_agg(sm.value, ', ') as build_dep
               FROM source s
               JOIN source_metadata sm ON s.id = sm.src_id
               WHERE s.id in
                   (SELECT source FROM src_associations
                       WHERE suite = :suite_id)
                   AND sm.key_id in :metakey_ids
               GROUP BY s.id, s.source'''
        query = session.query('source', 'build_dep').from_statement(statement). \
            params(params)
        for source, build_dep in query:
            if build_dep is not None:
                # Remove [arch] information since we want to see breakage on all arches
                build_dep = re_build_dep_arch.sub("", build_dep)
                try:
                    parsed_dep = []
                    for dep in apt_pkg.parse_src_depends(build_dep):
                        parsed_dep.append(frozenset(d[0] for d in dep))
                    source_deps[source].update(parsed_dep)
                except ValueError as e:
                    print "Error for package %s: %s" % (source, e)

        return package_dependencies, arch_providers_of, arch_provided_by
Example #20
0
    def _load_package_information(session, suite_id, suite_archs2id):
        package_dependencies = defaultdict(lambda: defaultdict(set))
        arch_providers_of = defaultdict(lambda: defaultdict(set))
        arch_provided_by = defaultdict(lambda: defaultdict(set))
        source_deps = defaultdict(set)
        metakey_d = get_or_set_metadatakey("Depends", session)
        metakey_p = get_or_set_metadatakey("Provides", session)
        params = {
            'suite_id': suite_id,
            'arch_all_id': suite_archs2id['all'],
            'metakey_d_id': metakey_d.key_id,
            'metakey_p_id': metakey_p.key_id,
        }
        all_arches = set(suite_archs2id)
        all_arches.discard('source')

        package_dependencies['source'] = source_deps

        for architecture in all_arches:
            deps = defaultdict(set)
            providers_of = defaultdict(set)
            provided_by = defaultdict(set)
            arch_providers_of[architecture] = providers_of
            arch_provided_by[architecture] = provided_by
            package_dependencies[architecture] = deps

            params['arch_id'] = suite_archs2id[architecture]

            statement = '''
                    SELECT b.package,
                        (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
                        (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
                        FROM binaries b
                        JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
                        WHERE b.architecture = :arch_id OR b.architecture = :arch_all_id'''
            query = session.query('package', 'depends', 'provides'). \
                from_statement(statement).params(params)
            for package, depends, provides in query:

                if depends is not None:
                    try:
                        parsed_dep = []
                        for dep in apt_pkg.parse_depends(depends):
                            parsed_dep.append(frozenset(d[0] for d in dep))
                        deps[package].update(parsed_dep)
                    except ValueError as e:
                        print "Error for package %s: %s" % (package, e)
                # Maintain a counter for each virtual package.  If a
                # Provides: exists, set the counter to 0 and count all
                # provides by a package not in the list for removal.
                # If the counter stays 0 at the end, we know that only
                # the to-be-removed packages provided this virtual
                # package.
                if provides is not None:
                    for virtual_pkg in provides.split(","):
                        virtual_pkg = virtual_pkg.strip()
                        if virtual_pkg == package:
                            continue
                        provided_by[virtual_pkg].add(package)
                        providers_of[package].add(virtual_pkg)

        # Check source dependencies (Build-Depends and Build-Depends-Indep)
        metakey_bd = get_or_set_metadatakey("Build-Depends", session)
        metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
        params = {
            'suite_id': suite_id,
            'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
        }
        statement = '''
            SELECT s.source, string_agg(sm.value, ', ') as build_dep
               FROM source s
               JOIN source_metadata sm ON s.id = sm.src_id
               WHERE s.id in
                   (SELECT source FROM src_associations
                       WHERE suite = :suite_id)
                   AND sm.key_id in :metakey_ids
               GROUP BY s.id, s.source'''
        query = session.query('source', 'build_dep').from_statement(statement). \
            params(params)
        for source, build_dep in query:
            if build_dep is not None:
                # Remove [arch] information since we want to see breakage on all arches
                build_dep = re_build_dep_arch.sub("", build_dep)
                try:
                    parsed_dep = []
                    for dep in apt_pkg.parse_src_depends(build_dep):
                        parsed_dep.append(frozenset(d[0] for d in dep))
                    source_deps[source].update(parsed_dep)
                except ValueError as e:
                    print "Error for package %s: %s" % (source, e)

        return package_dependencies, arch_providers_of, arch_provided_by