コード例 #1
0
ファイル: dscfile.py プロジェクト: pombreda/UnnaturalCodeFork
def check_format_1_0_files(filename, file_type_counts, component_counts,
                           bzip2_count, xz_count):
    """Check that the given counts of each file type suit format 1.0.

    A 1.0 source must be native (with only one tar.gz), or have an orig.tar.gz
    and a diff.gz. It cannot use bzip2 or xz compression.
    """
    if bzip2_count > 0:
        yield UploadError("%s: is format 1.0 but uses bzip2 compression." %
                          filename)
    if xz_count > 0:
        yield UploadError("%s: is format 1.0 but uses xz compression." %
                          filename)

    valid_file_type_counts = [
        {
            SourcePackageFileType.NATIVE_TARBALL: 1,
            SourcePackageFileType.ORIG_TARBALL: 0,
            SourcePackageFileType.DEBIAN_TARBALL: 0,
            SourcePackageFileType.DIFF: 0,
        },
        {
            SourcePackageFileType.ORIG_TARBALL: 1,
            SourcePackageFileType.DIFF: 1,
            SourcePackageFileType.NATIVE_TARBALL: 0,
            SourcePackageFileType.DEBIAN_TARBALL: 0,
        },
    ]

    if (file_type_counts not in valid_file_type_counts
            or len(component_counts) > 0):
        yield UploadError(
            "%s: must have exactly one tar.gz, or an orig.tar.gz and diff.gz" %
            filename)
コード例 #2
0
ファイル: dscfile.py プロジェクト: pombreda/UnnaturalCodeFork
def check_format_3_0_quilt_files(filename, file_type_counts, component_counts,
                                 bzip2_count, xz_count):
    """Check that the given counts of each file type suit format 3.0 (native).

    A 3.0 (quilt) source must have exactly one orig.tar.*, one debian.tar.*,
    and at most one orig-COMPONENT.tar.* for each COMPONENT. Any of gzip,
    bzip2, and xz compression are permissible.
    """

    valid_file_type_counts = [
        {
            SourcePackageFileType.ORIG_TARBALL: 1,
            SourcePackageFileType.DEBIAN_TARBALL: 1,
            SourcePackageFileType.NATIVE_TARBALL: 0,
            SourcePackageFileType.DIFF: 0,
        },
    ]

    if file_type_counts not in valid_file_type_counts:
        yield UploadError(
            "%s: must have only an orig.tar.*, a debian.tar.*, and "
            "optionally orig-*.tar.*" % filename)

    for component in component_counts:
        if component_counts[component] > 1:
            yield UploadError("%s: has more than one orig-%s.tar.*." %
                              (filename, component))
コード例 #3
0
    def verifyPackage(self):
        """Check if the binary is in changesfile and its name is valid."""
        control_package = self.control.get("Package", '')

        # Since DDEBs are generated after the original DEBs are processed
        # and considered by `dpkg-genchanges` they are only half-incorporated
        # the binary upload changes file. DDEBs are only listed in the
        # Files/Checksums-Sha1/ChecksumsSha256 sections and missing from
        # Binary/Description.
        if not self.filename.endswith('.ddeb'):
            if control_package not in self.changes.binaries:
                yield UploadError(
                    "%s: control file lists name as %r, which isn't in "
                    "changes file." % (self.filename, control_package))

        if not re_valid_pkg_name.match(control_package):
            yield UploadError("%s: invalid package name %r." %
                              (self.filename, control_package))

        # Ensure the filename matches the contents of the .deb
        # First check the file package name matches the deb contents.
        binary_match = re_isadeb.match(self.filename)
        file_package = binary_match.group(1)
        if control_package != file_package:
            yield UploadError("%s: package part of filename %r does not match "
                              "package name in the control fields %r" %
                              (self.filename, file_package, control_package))
コード例 #4
0
    def __init__(self, filepath, md5, size, component_and_section,
                 priority_name, package, version, changes, policy, logger):
        """Check presence of the component and section from an uploaded_file.

        They need to satisfy at least the NEW queue constraints that includes
        SourcePackageRelease creation, so component and section need to exist.
        Even if they might be overridden in the future.
        """
        super(PackageUploadFile,
              self).__init__(filepath, md5, size, component_and_section,
                             priority_name, policy, logger)
        self.package = package
        self.version = version
        self.changes = changes

        valid_components = [
            component.name for component in getUtility(IComponentSet)
        ]
        valid_sections = [section.name for section in getUtility(ISectionSet)]

        if self.section_name not in valid_sections:
            raise UploadError("%s: Unknown section %r" %
                              (self.filename, self.section_name))

        if self.component_name not in valid_components:
            raise UploadError("%s: Unknown component %r" %
                              (self.filename, self.component_name))
コード例 #5
0
    def _verifySignature(self, content, filename):
        """Verify the signature on the file content.

        Raise UploadError if the signing key cannot be found in launchpad
        or if the GPG verification failed for any other reason.

        Returns a tuple of the key (`IGPGKey` object) and the verified
        cleartext data.
        """
        self.logger.debug("Verifying signature on %s" %
                          os.path.basename(filename))

        try:
            sig = getUtility(IGPGHandler).getVerifiedSignatureResilient(
                content)
        except GPGVerificationError as error:
            raise UploadError("GPG verification of %s failed: %s" %
                              (filename, str(error)))

        key = getUtility(IGPGKeySet).getByFingerprint(sig.fingerprint)
        if key is None:
            raise UploadError("Signing key %s not registered in launchpad." %
                              sig.fingerprint)

        return (key, sig.plain_data)
コード例 #6
0
def findFile(source_dir, filename):
    """Find and return any file under source_dir

    :param source_file: The directory where the source was extracted
    :param source_dir: The directory where the source was extracted.
    :return fullpath: The full path of the file, else return None if the
                      file is not found.
    """
    # Instead of trying to predict the unpacked source directory name,
    # we simply use glob to retrieve everything like:
    # 'tempdir/*/debian/filename'
    globpath = os.path.join(source_dir, "*", filename)
    for fullpath in glob.glob(globpath):
        if not os.path.exists(fullpath):
            continue
        if os.path.islink(fullpath):
            raise UploadError("Symbolic link for %s not allowed" % filename)
        # Anything returned by this method should be less than 10MiB since it
        # will be stored in the database assuming the source package isn't
        # rejected before hand
        if os.stat(fullpath).st_size > 10485760:
            raise UploadError("%s file too large, 10MiB max" % filename)
        else:
            return fullpath
    return None
コード例 #7
0
    def checkSizeAndCheckSum(self):
        """Check the size and checksums of the nascent file.

        Raise UploadError if the size or checksums do not match or if the
        file is not found on the disk.
        """
        if not self.exists_on_disk:
            raise UploadError(
                "File %s mentioned in the changes file was not found." %
                self.filename)

        # Read in the file and compute its md5 and sha1 checksums and remember
        # the size of the file as read-in.
        digesters = dict((n, hashlib.new(n)) for n in self.checksums.keys())
        ckfile = open(self.filepath, "r")
        size = 0
        for chunk in filechunks(ckfile):
            for digester in digesters.itervalues():
                digester.update(chunk)
            size += len(chunk)
        ckfile.close()

        # Check the size and checksum match what we were told in __init__
        for n in sorted(self.checksums.keys()):
            if digesters[n].hexdigest() != self.checksums[n]:
                raise UploadError(
                    "File %s mentioned in the changes has a %s mismatch. "
                    "%s != %s" % (self.filename, n, digesters[n].hexdigest(),
                                  self.checksums[n]))
        if size != self.size:
            raise UploadError(
                "File %s mentioned in the changes has a size mismatch. "
                "%s != %s" % (self.filename, size, self.size))
コード例 #8
0
ファイル: dscfile.py プロジェクト: pombreda/UnnaturalCodeFork
    def parse(self, verify_signature=True):
        """Parse the tag file, optionally verifying the signature.

        If verify_signature is True, signingkey will be set to the signing
        `IGPGKey`, and only the verified content will be parsed. Otherwise,
        any signature will be stripped and the contained content parsed.

        Will raise an `UploadError` if the tag file was unparsable,
        or if signature verification was requested but failed.
        """
        try:
            with open(self.filepath, 'rb') as f:
                self.raw_content = f.read()
        except IOError as error:
            raise UploadError("Unable to read %s: %s" % (self.filename, error))

        if verify_signature:
            self.signingkey, self.parsed_content = self.verifySignature(
                self.raw_content, self.filepath)
        else:
            self.logger.debug("%s can be unsigned." % self.filename)
            self.parsed_content = self.raw_content
        try:
            self._dict = parse_tagfile_content(self.parsed_content,
                                               filename=self.filepath)
        except TagFileParseError as error:
            raise UploadError("Unable to parse %s: %s" %
                              (self.filename, error))
コード例 #9
0
ファイル: dscfile.py プロジェクト: pombreda/UnnaturalCodeFork
    def parseAddress(self, addr, fieldname="Maintainer"):
        """Parse an address, using the policy to decide if we should add a
        non-existent person or not.

        Raise an UploadError if the parsing of the maintainer string fails
        for any reason, or if the email address then cannot be found within
        the launchpad database.

        Return a dict containing the rfc822 and rfc2047 formatted forms of
        the address, the person's name, email address and person record within
        the launchpad database.
        """
        try:
            (rfc822, rfc2047, name,
             email) = safe_fix_maintainer(addr, fieldname)
        except ParseMaintError as error:
            raise UploadError(str(error))

        person = getUtility(IPersonSet).getByEmail(email)
        if person and person.private:
            # Private teams can not be maintainers.
            raise UploadError("Invalid Maintainer.")

        if person is None and self.policy.create_people:
            package = self._dict['Source']
            version = self._dict['Version']
            if self.policy.distroseries and self.policy.pocket:
                policy_suite = (
                    '%s/%s' %
                    (self.policy.distroseries.name, self.policy.pocket.name))
            else:
                policy_suite = '(unknown)'
            try:
                person = getUtility(IPersonSet).ensurePerson(
                    email,
                    name,
                    PersonCreationRationale.SOURCEPACKAGEUPLOAD,
                    comment=('when the %s_%s package was uploaded to %s' %
                             (package, version, policy_suite)))
            except InvalidEmailAddress:
                self.logger.info("Invalid email address: '%s'", email)
                person = None

        if person is None:
            raise UploadError("Unable to identify '%s':<%s> in launchpad" %
                              (name, email))

        return {
            "rfc822": rfc822,
            "rfc2047": rfc2047,
            "name": name,
            "email": email,
            "person": person,
        }
コード例 #10
0
    def unpackAndCheckSource(self):
        """Verify uploaded source using dpkg-source."""
        self.logger.debug("Verifying uploaded source package by unpacking it.")

        try:
            unpacked_dir = unpack_source(self.filepath)
        except DpkgSourceError as e:
            yield UploadError("dpkg-source failed for %s [return: %s]\n"
                              "[dpkg-source output: %s]" %
                              (self.filename, e.result, e.output))
            return

        try:
            # Copy debian/copyright file content. It will be stored in the
            # SourcePackageRelease records.

            # Check if 'dpkg-source' created only one directory.
            temp_directories = [
                entry.name for entry in scandir.scandir(unpacked_dir)
                if entry.is_dir()
            ]
            if len(temp_directories) > 1:
                yield UploadError(
                    'Unpacked source contains more than one directory: %r' %
                    temp_directories)

            # XXX cprov 20070713: We should access only the expected directory
            # name (<sourcename>-<no_epoch(no_revision(version))>).

            # Locate both the copyright and changelog files for later
            # processing.
            try:
                self.copyright = find_copyright(unpacked_dir, self.logger)
            except UploadError as error:
                yield error
                return
            except UploadWarning as warning:
                yield warning

            try:
                self.changelog = find_changelog(unpacked_dir, self.logger)
            except UploadError as error:
                yield error
                return
            except UploadWarning as warning:
                yield warning
        finally:
            self.logger.debug("Cleaning up source tree.")
            cleanup_unpacked_dir(unpacked_dir)
        self.logger.debug("Done")
コード例 #11
0
    def verifyDebTimestamp(self):
        """Check specific DEB format timestamp checks."""
        self.logger.debug("Verifying timestamps in %s" % (self.filename))

        future_cutoff = time.time() + self.policy.future_time_grace
        earliest_year = time.strptime(str(self.policy.earliest_year), "%Y")
        past_cutoff = time.mktime(earliest_year)

        tar_checker = TarFileDateChecker(future_cutoff, past_cutoff)
        tar_checker.reset()
        try:
            deb_file = apt_inst.DebFile(self.filepath)
        except SystemError as error:
            # We get an error from the constructor if the .deb does not
            # contain all the expected top-level members (debian-binary,
            # control.tar.gz, and data.tar.*).
            yield UploadError(str(error))
            return
        try:
            deb_file.control.go(tar_checker.callback)
            deb_file.data.go(tar_checker.callback)
            future_files = tar_checker.future_files.keys()
            if future_files:
                first_file = future_files[0]
                timestamp = time.ctime(tar_checker.future_files[first_file])
                yield UploadError(
                    "%s: has %s file(s) with a time stamp too "
                    "far into the future (e.g. %s [%s])." %
                    (self.filename, len(future_files), first_file, timestamp))

            ancient_files = tar_checker.ancient_files.keys()
            if ancient_files:
                first_file = ancient_files[0]
                timestamp = time.ctime(tar_checker.ancient_files[first_file])
                yield UploadError(
                    "%s: has %s file(s) with a time stamp too "
                    "far in the past (e.g. %s [%s])." %
                    (self.filename, len(ancient_files), first_file, timestamp))
        except (SystemExit, KeyboardInterrupt):
            raise
        except Exception as error:
            # There is a very large number of places where we
            # might get an exception while checking the timestamps.
            # Many of them come from apt_inst/apt_pkg and they are
            # terrible in giving sane exceptions. We thusly capture
            # them all and make them into rejection messages instead
            yield UploadError("%s: deb contents timestamp check failed: %s" %
                              (self.filename, error))
コード例 #12
0
    def verifySourcePackageRelease(self, sourcepackagerelease):
        """Check if the given ISourcePackageRelease matches the context."""
        assert 'source' in self.changes.architectures, (
            "It should be a mixed upload, but no source part was found.")

        if self.source_version != sourcepackagerelease.version:
            raise UploadError(
                "source version %r for %s does not match version %r "
                "from control file" % (sourcepackagerelease.version,
                                       self.source_version, self.filename))

        if self.source_name != sourcepackagerelease.name:
            raise UploadError(
                "source name %r for %s does not match name %r in "
                "control file" %
                (sourcepackagerelease.name, self.filename, self.source_name))
コード例 #13
0
def cleanup_unpacked_dir(unpacked_dir):
    """Remove the directory with an unpacked source package.

    :param unpacked_dir: Path to the directory.
    """
    try:
        shutil.rmtree(unpacked_dir)
    except OSError as error:
        if errno.errorcode[error.errno] != 'EACCES':
            raise UploadError("couldn't remove tmp dir %s: code %s" %
                              (unpacked_dir, error.errno))
        else:
            result = os.system("chmod -R u+rwx " + unpacked_dir)
            if result != 0:
                raise UploadError("chmod failed with %s" % result)
            shutil.rmtree(unpacked_dir)
コード例 #14
0
 def verifyPriority(self):
     """Check if priority matches changesfile."""
     control_priority = self.control.get('Priority', '')
     if control_priority and self.priority_name != control_priority:
         yield UploadError(
             "%s control file lists priority as %s but changes file has "
             "%s." % (self.filename, control_priority, self.priority_name))
コード例 #15
0
    def verifyVersion(self):
        """Check if control version is valid matches the filename version.

        Binary version  doesn't need to match the changesfile version,
        because the changesfile version refers to the SOURCE version.
        """
        if not re_valid_version.match(self.control_version):
            yield UploadError("%s: invalid version number %r." %
                              (self.filename, self.control_version))

        binary_match = re_isadeb.match(self.filename)
        filename_version = binary_match.group(2)
        control_version_chopped = re_no_epoch.sub('', self.control_version)
        if filename_version != control_version_chopped:
            yield UploadError("%s: should be %s according to control file." %
                              (filename_version, control_version_chopped))
コード例 #16
0
    def __init__(self, filepath, checksums, size, component_and_section,
                 priority, package, version, changes, policy, logger):
        """Construct a DSCFile instance.

        This takes all NascentUploadFile constructor parameters plus package
        and version.

        Can raise UploadError.
        """
        # Avoid circular imports.
        from lp.archiveuploader.nascentupload import EarlyReturnUploadError

        SourceUploadFile.__init__(self, filepath, checksums, size,
                                  component_and_section, priority, package,
                                  version, changes, policy, logger)
        self.parse(verify_signature=not policy.unsigned_dsc_ok, as_bytes=True)

        self.logger.debug("Performing DSC verification.")
        for mandatory_field in self.mandatory_fields:
            if mandatory_field not in self._dict:
                raise UploadError("Unable to find mandatory field %s in %s" %
                                  (mandatory_field, self.filename))

        self.maintainer = self.parseAddress(self._dict['Maintainer'])

        # If format is not present, assume 1.0. At least one tool in
        # the wild generates dsc files with format missing, and we need
        # to accept them.
        if 'Format' not in self._dict:
            self._dict['Format'] = "1.0"

        if self.format is None:
            raise EarlyReturnUploadError("Unsupported source format: %s" %
                                         self._dict['Format'])
コード例 #17
0
    def verify(self):
        """Run all the verification checks on the changes data.

        This method is an error generator, i.e, it returns an iterator over
        all exceptions that are generated while verifying the changesfile
        consistency.
        """
        self.logger.debug("Verifying the changes file.")

        if len(self.files) == 0:
            yield UploadError("No files found in the changes")

        if 'Urgency' not in self._dict:
            # Urgency is recommended but not mandatory. Default to 'low'
            self._dict['Urgency'] = "low"

        raw_urgency = self._dict['Urgency'].lower()
        if raw_urgency not in self.urgency_map:
            yield UploadWarning(
                "Unable to grok urgency %s, overriding with 'low'" %
                (raw_urgency))
            self._dict['Urgency'] = "low"

        if not self.policy.unsigned_changes_ok:
            assert self.signer is not None, (
                "Policy does not allow unsigned changesfile")
コード例 #18
0
    def parse(self, verify_signature=True, as_bytes=False):
        """Parse the tag file, optionally verifying the signature.

        If verify_signature is True, signingkey will be set to the signing
        `IGPGKey`, and only the verified content will be parsed. Otherwise,
        any signature will be stripped and the contained content parsed.

        Will raise an `UploadError` if the tag file was unparsable,
        or if signature verification was requested but failed.
        """
        try:
            with open(self.filepath, 'rb') as f:
                self.raw_content = f.read()
        except IOError as error:
            raise UploadError("Unable to read %s: %s" % (self.filename, error))

        if verify_signature:
            # We set self.signingkey regardless of whether the key is
            # deactivated or expired, since a deactivated or expired key is
            # still good enough for determining whom to notify, and raising
            # UploadError is enough to prevent the upload being accepted.
            try:
                self.signingkey, self.parsed_content = self._verifySignature(
                    self.raw_content, self.filepath)
                if not self.signingkey.active:
                    raise UploadError(
                        "File %s is signed with a deactivated key %s" %
                        (self.filepath, self.signingkey.fingerprint))
            except GPGKeyExpired as e:
                # This may theoretically return None, but the "expired"
                # error will take precedence anyway.
                self.signingkey = getUtility(IGPGKeySet).getByFingerprint(
                    e.key.fingerprint)
                raise UploadError("File %s is signed with an expired key %s" %
                                  (self.filepath, e.key.fingerprint))
        else:
            self.logger.debug("%s can be unsigned." % self.filename)
            self.parsed_content = self.raw_content
        try:
            self._dict = parse_tagfile_content(self.parsed_content,
                                               filename=self.filepath,
                                               as_bytes=as_bytes)
        except TagFileParseError as error:
            raise UploadError("Unable to parse %s: %s" %
                              (self.filename, error))
コード例 #19
0
    def checkBuild(self, build):
        """See PackageUploadFile."""
        try:
            dar = self.policy.distroseries[self.archtag]
        except NotFoundError:
            raise UploadError(
                "Upload to unknown architecture %s for distroseries %s" %
                (self.archtag, self.policy.distroseries))

        build.updateStatus(BuildStatus.FULLYBUILT)

        # Sanity check; raise an error if the build we've been
        # told to link to makes no sense.
        if (build.pocket != self.policy.pocket
                or build.distro_arch_series != dar
                or build.archive != self.policy.archive):
            raise UploadError("Attempt to upload binaries specifying "
                              "build %s, where they don't fit." % build.id)
コード例 #20
0
    def verify(self):
        """Verify CustomUploadFile.

        Simply check is the given section is allowed for custom uploads.
        It returns an iterator over all the encountered errors and warnings.
        """
        if self.section_name not in self.custom_sections:
            yield UploadError("Unsupported custom section name %r" %
                              self.section_name)
        else:
            handler = self.custom_handlers.get(
                self.custom_sections[self.section_name])
            if handler is not None:
                try:
                    handler.parsePath(self.filename)
                except ValueError:
                    yield UploadError(
                        "Invalid filename %r for section name %r" %
                        (self.filename, self.section_name))
コード例 #21
0
    def verify(self):
        """Verify the uploaded source file.

        It returns an iterator over all the encountered errors and warnings.
        """
        self.logger.debug("Verifying source file %s" % self.filename)

        if 'source' not in self.changes.architectures:
            yield UploadError("%s: changes file doesn't list 'source' in "
                              "Architecture field." % (self.filename))

        version_chopped = re_no_epoch.sub('', self.version)
        if self.is_orig:
            version_chopped = re_no_revision.sub('', version_chopped)

        source_match = re_issource.match(self.filename)
        filename_version = source_match.group(2)
        if filename_version != version_chopped:
            yield UploadError("%s: should be %s according to changes file." %
                              (filename_version, version_chopped))
コード例 #22
0
 def verifySection(self):
     """Check the section & priority match those in changesfile."""
     control_section_and_component = self.control.get('Section', '')
     control_component, control_section = splitComponentAndSection(
         control_section_and_component)
     if ((control_component, control_section) !=
         (self.component_name, self.section_name)):
         yield UploadError(
             "%s control file lists section as %s/%s but changes file "
             "has %s/%s." %
             (self.filename, control_component, control_section,
              self.component_name, self.section_name))
コード例 #23
0
    def parseChanges(self):
        """Process the given changesfile.

        Does:
            * Verification of required fields
            * Verification of the required Format
            * Parses maintainer and changed-by
            * Checks name of changes file
            * Checks signature of changes file

        If any of these checks fail, UploadError is yielded, and it should
        be considered a fatal error (no subsequent processing of the upload
        should be done).

        Logger and Policy are instances built in uploadprocessor.py passed
        via NascentUpload class.
        """
        try:
            self.parse(verify_signature=not self.policy.unsigned_changes_ok)
        except UploadError as e:
            yield e
            return

        for field in self.mandatory_fields:
            if field not in self._dict:
                yield UploadError(
                    "Unable to find mandatory field '%s' in the changes "
                    "file." % field)
                return

        try:
            format = float(self._dict["Format"])
        except KeyError:
            # If format is missing, pretend it's 1.5
            format = 1.5

        if format < 1.5 or format > 2.0:
            yield UploadError(
                "Format out of acceptable range for changes file. Range "
                "1.5 - 2.0, format %g" % format)
コード例 #24
0
    def verifyFormat(self):
        """Check if the DEB format is sane.

        Debian packages are in fact 'ar' files. Thus we run '/usr/bin/ar'
        to look at the contents of the deb files to confirm they make sense.
        """
        ar_process = subprocess.Popen(["/usr/bin/ar", "t", self.filepath],
                                      stdout=subprocess.PIPE)
        output = ar_process.stdout.read()
        result = ar_process.wait()
        if result != 0:
            yield UploadError("%s: 'ar t' invocation failed." % self.filename)
            yield UploadError(
                prefix_multi_line_string(output, " [ar output:] "))

        chunks = output.strip().split("\n")
        if len(chunks) != 3:
            yield UploadError("%s: found %d chunks, expecting 3. %r" %
                              (self.filename, len(chunks), chunks))

        debian_binary, control_tar, data_tar = chunks
        if debian_binary != "debian-binary":
            yield UploadError(
                "%s: first chunk is %s, expected debian-binary." %
                (self.filename, debian_binary))
        if control_tar != "control.tar.gz":
            yield UploadError(
                "%s: second chunk is %s, expected control.tar.gz." %
                (self.filename, control_tar))
        if data_tar not in ("data.tar.gz", "data.tar.bz2", "data.tar.lzma",
                            "data.tar.xz"):
            yield UploadError("%s: third chunk is %s, expected data.tar.gz, "
                              "data.tar.bz2, data.tar.lzma or data.tar.xz." %
                              (self.filename, data_tar))
コード例 #25
0
    def _matchDDEBs(self):
        """Check and link DEBs and DDEBs in the upload.

        Matches each DDEB to its corresponding DEB, adding links in both
        directions. Unmatched or duplicated DDEBs result in upload errors.

        This method is an error generator, i.e, it returns an iterator over
        all exceptions that are generated while processing all mentioned
        files.
        """
        unmatched_ddebs = {}
        for uploaded_file in self.changes.files:
            if isinstance(uploaded_file, DdebBinaryUploadFile):
                ddeb_key = (uploaded_file.package, uploaded_file.version,
                            uploaded_file.architecture)
                if ddeb_key in unmatched_ddebs:
                    yield UploadError("Duplicated debug packages: %s %s (%s)" %
                                      ddeb_key)
                else:
                    unmatched_ddebs[ddeb_key] = uploaded_file

        for uploaded_file in self.changes.files:
            is_deb = isinstance(uploaded_file, DebBinaryUploadFile)
            is_udeb = isinstance(uploaded_file, UdebBinaryUploadFile)
            is_ddeb = isinstance(uploaded_file, DdebBinaryUploadFile)
            # We need exactly a DEB or UDEB, not a DDEB.
            if (is_deb or is_udeb) and not is_ddeb:
                try:
                    matching_ddeb = unmatched_ddebs.pop(
                        (uploaded_file.package + '-dbgsym',
                         uploaded_file.version, uploaded_file.architecture))
                except KeyError:
                    continue
                uploaded_file.ddeb_file = matching_ddeb
                matching_ddeb.deb_file = uploaded_file

        if len(unmatched_ddebs) > 0:
            yield UploadError("Orphaned debug packages: %s" %
                              ', '.join('%s %s (%s)' % d
                                        for d in unmatched_ddebs))
コード例 #26
0
    def checkBuild(self, build):
        """See PackageUploadFile."""
        # The master verifies the status to confirm successful upload.
        build.updateStatus(BuildStatus.FULLYBUILT)

        # Sanity check; raise an error if the build we've been
        # told to link to makes no sense.
        if (build.pocket != self.policy.pocket
                or build.distroseries != self.policy.distroseries
                or build.archive != self.policy.archive):
            raise UploadError("Attempt to upload source specifying "
                              "recipe build %s, where it doesn't fit." %
                              build.id)
コード例 #27
0
    def __init__(self, filepath, policy, logger):
        """Process the given changesfile.

        Does:
            * Verification of required fields
            * Verification of the required Format
            * Parses maintainer and changed-by
            * Checks name of changes file
            * Checks signature of changes file

        If any of these checks fail, UploadError is raised, and it's
        considered a fatal error (no subsequent processing of the upload
        will be done).

        Logger and Policy are instances built in uploadprocessor.py passed
        via NascentUpload class.
        """
        self.filepath = filepath
        self.policy = policy
        self.logger = logger

        self.parse(verify_signature=not policy.unsigned_changes_ok)

        for field in self.mandatory_fields:
            if field not in self._dict:
                raise UploadError(
                    "Unable to find mandatory field '%s' in the changes "
                    "file." % field)

        try:
            format = float(self._dict["Format"])
        except KeyError:
            # If format is missing, pretend it's 1.5
            format = 1.5

        if format < 1.5 or format > 2.0:
            raise UploadError(
                "Format out of acceptable range for changes file. Range "
                "1.5 - 2.0, format %g" % format)
コード例 #28
0
    def extractAndParseControl(self):
        """Extract and parse control information."""
        try:
            deb_file = apt_inst.DebFile(self.filepath)
            control_file = deb_file.control.extractdata("control")
            control_lines = apt_pkg.TagSection(control_file)
        except (SystemExit, KeyboardInterrupt):
            raise
        except:
            yield UploadError(
                "%s: extracting control file raised %s, giving up." %
                (self.filename, sys.exc_type))
            return

        for mandatory_field in self.mandatory_fields:
            if control_lines.find(mandatory_field) is None:
                yield UploadError("%s: control file lacks mandatory field %r" %
                                  (self.filename, mandatory_field))
        control = {}
        for key in control_lines.keys():
            control[key] = control_lines.find(key)
        self.parseControl(control)
コード例 #29
0
    def checkFileName(self):
        """Make sure the changes file name is well-formed.

        Please note: for well-formed changes file names the `filename_archtag`
        property will be set appropriately.
        """
        match_changes = re_changes_file_name.match(self.filename)
        if match_changes is None:
            yield UploadError(
                '%s -> inappropriate changesfile name, '
                'should follow "<pkg>_<version>_<arch>.changes" format' %
                self.filename)
        else:
            self.filename_archtag = match_changes.group(3)
コード例 #30
0
    def processFiles(self):
        """Build objects for each file mentioned in this changesfile.

        This method is an error generator, i.e, it returns an iterator over
        all exceptions that are generated while processing all mentioned
        files.
        """
        try:
            raw_files = parse_and_merge_file_lists(self._dict, changes=True)
        except UploadError as e:
            yield e
            return

        files = []
        for attr in raw_files:
            filename, hashes, size, component_and_section, priority_name = attr
            filepath = os.path.join(self.dirname, filename)
            try:
                if self.isCustom(component_and_section):
                    # This needs to be the first check, because
                    # otherwise the tarballs in custom uploads match
                    # with source_match.
                    file_instance = CustomUploadFile(filepath, hashes, size,
                                                     component_and_section,
                                                     priority_name,
                                                     self.policy, self.logger)
                else:
                    try:
                        package, cls = determine_file_class_and_name(filename)
                    except CannotDetermineFileTypeError:
                        yield UploadError(
                            "Unable to identify file %s (%s) in changes." %
                            (filename, component_and_section))
                        continue

                    file_instance = cls(filepath, hashes, size,
                                        component_and_section, priority_name,
                                        package, self.version, self,
                                        self.policy, self.logger)

                    if cls == DSCFile:
                        self.dsc = file_instance
                    elif cls == BuildInfoFile:
                        self.buildinfo = file_instance
            except UploadError as error:
                yield error
            else:
                files.append(file_instance)

        self.files = files