def got_file(ignored, filename, out_file, out_file_name):
            try:
                # If the requested file is the 'buildlog' compress it
                # using gzip before storing in Librarian.
                if file_sha1 == 'buildlog':
                    out_file = open(out_file_name)
                    filename += '.gz'
                    out_file_name += '.gz'
                    gz_file = gzip.GzipFile(out_file_name, mode='wb')
                    copy_and_close(out_file, gz_file)
                    os.remove(out_file_name.replace('.gz', ''))

                # Reopen the file, seek to its end position, count and seek
                # to beginning, ready for adding to the Librarian.
                out_file = open(out_file_name)
                out_file.seek(0, 2)
                bytes_written = out_file.tell()
                out_file.seek(0)

                library_file = getUtility(ILibraryFileAliasSet).create(
                    filename, bytes_written, out_file,
                    contentType=filenameToContentType(filename),
                    restricted=private)
            finally:
                # Remove the temporary file.  getFile() closes the file
                # object.
                os.remove(out_file_name)

            return library_file.id
        def got_file(ignored, filename, out_file, out_file_name):
            try:
                # If the requested file is the 'buildlog' compress it
                # using gzip before storing in Librarian.
                if file_sha1 == 'buildlog':
                    out_file = open(out_file_name)
                    filename += '.gz'
                    out_file_name += '.gz'
                    gz_file = gzip.GzipFile(out_file_name, mode='wb')
                    copy_and_close(out_file, gz_file)
                    os.remove(out_file_name.replace('.gz', ''))

                # Reopen the file, seek to its end position, count and seek
                # to beginning, ready for adding to the Librarian.
                out_file = open(out_file_name)
                out_file.seek(0, 2)
                bytes_written = out_file.tell()
                out_file.seek(0)

                library_file = getUtility(ILibraryFileAliasSet).create(
                    filename,
                    bytes_written,
                    out_file,
                    contentType=filenameToContentType(filename),
                    restricted=private)
            finally:
                # Remove the temporary file.  getFile() closes the file
                # object.
                os.remove(out_file_name)

            return library_file.id
Ejemplo n.º 3
0
def download_file(destination_path, libraryfile):
    """Download a file from the librarian to the destination path.

    :param destination_path: Absolute destination path (where the
        file should be downloaded to).
    :type destination_path: ``str``
    :param libraryfile: The librarian file that is to be downloaded.
    :type libraryfile: ``LibraryFileAlias``
    """
    libraryfile.open()
    destination_file = open(destination_path, 'w')
    copy_and_close(libraryfile, destination_file)
Ejemplo n.º 4
0
    def _compressFile(self, path):
        """Compress the file in the given path using bzip2.

        The compressed file will be in the same path and old file
        will be removed.

        :return: the path to the compressed file.
        """
        bz2_path = '%s.bz2' % path
        copy_and_close(open(path), bz2.BZ2File(bz2_path, mode='w'))
        os.remove(path)
        return bz2_path
Ejemplo n.º 5
0
def download_file(destination_path, libraryfile):
    """Download a file from the librarian to the destination path.

    :param destination_path: Absolute destination path (where the
        file should be downloaded to).
    :type destination_path: ``str``
    :param libraryfile: The librarian file that is to be downloaded.
    :type libraryfile: ``LibraryFileAlias``
    """
    libraryfile.open()
    destination_file = open(destination_path, 'w')
    copy_and_close(libraryfile, destination_file)
Ejemplo n.º 6
0
    def _compressFile(self, path):
        """Compress the file in the given path using bzip2.

        The compressed file will be in the same path and old file
        will be removed.

        :return: the path to the compressed file.
        """
        bz2_path = '%s.bz2' % path
        copy_and_close(open(path), bz2.BZ2File(bz2_path, mode='w'))
        os.remove(path)
        return bz2_path
Ejemplo n.º 7
0
 def publish(cls, packageupload, libraryfilealias, logger=None):
     """See `ICustomUploadHandler`."""
     temp_dir = tempfile.mkdtemp()
     try:
         tarfile_path = os.path.join(temp_dir, libraryfilealias.filename)
         temp_file = open(tarfile_path, "wb")
         libraryfilealias.open()
         copy_and_close(libraryfilealias, temp_file)
         suite = packageupload.distroseries.getSuite(packageupload.pocket)
         upload = cls(logger=logger)
         upload.process(packageupload.archive, tarfile_path, suite)
     finally:
         shutil.rmtree(temp_dir)
Ejemplo n.º 8
0
    def process(self, packageupload, libraryfilealias):
        pubconf = getPubConfig(packageupload.archive)
        if pubconf.metaroot is None:
            if self.logger is not None:
                self.logger.debug(
                    "Skipping meta-data for archive without metaroot.")
            return

        dest_file = os.path.join(pubconf.metaroot, libraryfilealias.filename)
        if not os.path.isdir(pubconf.metaroot):
            os.makedirs(pubconf.metaroot, 0o755)

        # At this point we now have a directory of the format:
        # <person_name>/meta/<ppa_name>
        # We're ready to copy the file out of the librarian into it.

        file_obj = open(dest_file, "wb")
        libraryfilealias.open()
        copy_and_close(libraryfilealias, file_obj)
Ejemplo n.º 9
0
    def addFile(self, component, sha1, contents):
        """See DiskPool.addFile."""
        assert component in HARDCODED_COMPONENT_ORDER

        targetpath = self.pathFor(component)
        if not os.path.exists(os.path.dirname(targetpath)):
            os.makedirs(os.path.dirname(targetpath))

        if self.file_component:
            # There's something on disk. Check hash.
            if sha1 != self.file_hash:
                raise PoolFileOverwriteError('%s != %s for %s' %
                    (sha1, self.file_hash,
                     self.pathFor(self.file_component)))

            if (component == self.file_component
                or component in self.symlink_components):
                # The file is already here
                return FileAddActionEnum.NONE
            else:
                # The file is present in a different component,
                # make a symlink.
                relative_symlink(
                    self.pathFor(self.file_component), targetpath)
                self.symlink_components.add(component)
                # Then fix to ensure the right component is linked.
                self._sanitiseLinks()

                return FileAddActionEnum.SYMLINK_ADDED

        # If we get to here, we want to write the file.
        assert not os.path.exists(targetpath)

        self.debug("Making new file in %s for %s/%s" %
                   (component, self.source, self.filename))

        file_to_write = _diskpool_atomicfile(
            targetpath, "wb", rootpath=self.temppath)
        contents.open()
        copy_and_close(contents, file_to_write)
        self.file_component = component
        return FileAddActionEnum.FILE_ADDED
Ejemplo n.º 10
0
    def checkFiles(self):
        """Check if mentioned files are present and match.

        We don't use the NascentUploadFile.verify here, only verify size
        and checksum.
        """

        file_type_counts = {
            SourcePackageFileType.DIFF: 0,
            SourcePackageFileType.ORIG_TARBALL: 0,
            SourcePackageFileType.ORIG_TARBALL_SIGNATURE: 0,
            SourcePackageFileType.DEBIAN_TARBALL: 0,
            SourcePackageFileType.NATIVE_TARBALL: 0,
        }
        component_orig_tar_counts = {}
        component_orig_tar_signature_counts = {}
        bzip2_count = 0
        xz_count = 0
        files_missing = False

        for sub_dsc_file in self.files:
            file_type = determine_source_file_type(sub_dsc_file.filename)

            if file_type is None:
                yield UploadError('Unknown file: ' + sub_dsc_file.filename)
                continue

            if file_type == SourcePackageFileType.COMPONENT_ORIG_TARBALL:
                # Split the count by component name.
                component = re_is_component_orig_tar_ext.match(
                    get_source_file_extension(sub_dsc_file.filename)).group(1)
                if component not in component_orig_tar_counts:
                    component_orig_tar_counts[component] = 0
                component_orig_tar_counts[component] += 1
            elif (file_type ==
                  SourcePackageFileType.COMPONENT_ORIG_TARBALL_SIGNATURE):
                # Split the count by component name.
                component = re_is_component_orig_tar_ext_sig.match(
                    get_source_file_extension(sub_dsc_file.filename)).group(1)
                if component not in component_orig_tar_signature_counts:
                    component_orig_tar_signature_counts[component] = 0
                component_orig_tar_signature_counts[component] += 1
            else:
                file_type_counts[file_type] += 1

            if sub_dsc_file.filename.endswith('.bz2'):
                bzip2_count += 1
            elif sub_dsc_file.filename.endswith('.xz'):
                xz_count += 1

            try:
                library_file, file_archive = self._getFileByName(
                    sub_dsc_file.filename)
            except NotFoundError as error:
                library_file = None
                file_archive = None
            else:
                # try to check dsc-mentioned file against its copy already
                # in librarian, if it's new (aka not found in librarian)
                # dismiss. It prevents us from having scary duplicated
                # filenames in Librarian and misapplied files in archive,
                # fixes bug # 38636 and friends.
                if sub_dsc_file.checksums['MD5'] != library_file.content.md5:
                    yield UploadError(
                        "File %s already exists in %s, but uploaded version "
                        "has different contents. See more information about "
                        "this error in "
                        "https://help.launchpad.net/Packaging/UploadErrors." %
                        (sub_dsc_file.filename, file_archive.displayname))
                    files_missing = True
                    continue

            if not sub_dsc_file.exists_on_disk:
                if library_file is None:
                    # Raises an error if the mentioned DSC file isn't
                    # included in the upload neither published in the
                    # context Distribution.
                    yield UploadError(
                        "Unable to find %s in upload or distribution." %
                        (sub_dsc_file.filename))
                    files_missing = True
                    continue

                # Pump the file through.
                self.logger.debug("Pumping %s out of the librarian" %
                                  (sub_dsc_file.filename))
                library_file.open()
                target_file = open(sub_dsc_file.filepath, "wb")
                copy_and_close(library_file, target_file)

            for error in sub_dsc_file.verify():
                yield error
                files_missing = True

        try:
            file_checker = format_to_file_checker_map[self.format]
        except KeyError:
            raise AssertionError("No file checker for source format %s." %
                                 self.format)

        for error in file_checker(self.filename, file_type_counts,
                                  component_orig_tar_counts,
                                  component_orig_tar_signature_counts,
                                  bzip2_count, xz_count):
            yield error

        if files_missing:
            yield UploadError("Files specified in DSC are broken or missing, "
                              "skipping package unpack verification.")
        else:
            for error in self.unpackAndCheckSource():
                # Pass on errors found when unpacking the source.
                yield error
 def fake_getFile(sum, file):
     dummy_tar = os.path.join(os.path.dirname(__file__),
                              'dummy_templates.tar.gz')
     tar_file = open(dummy_tar)
     copy_and_close(tar_file, file)
     return defer.succeed(None)
Ejemplo n.º 12
0
    def performDiff(self):
        """See `IPackageDiff`.

        This involves creating a temporary directory, downloading the files
        from both SPRs involved from the librarian, running debdiff, storing
        the output in the librarian and updating the PackageDiff record.
        """
        # Make sure the files associated with the two source packages are
        # still available in the librarian.
        if self._countDeletedLFAs() > 0:
            self.status = PackageDiffStatus.FAILED
            return

        blacklist = config.packagediff.blacklist.split()
        if self.from_source.sourcepackagename.name in blacklist:
            self.status = PackageDiffStatus.FAILED
            return

        # Create the temporary directory where the files will be
        # downloaded to and where the debdiff will be performed.
        tmp_dir = tempfile.mkdtemp()

        try:
            directions = ('from', 'to')

            # Keep track of the files belonging to the respective packages.
            downloaded = dict(zip(directions, ([], [])))

            # Make it easy to iterate over packages.
            packages = dict(zip(directions,
                                (self.from_source, self.to_source)))

            # Iterate over the packages to be diff'ed.
            for direction, package in packages.iteritems():
                # Create distinct directory locations for
                # 'from' and 'to' files.
                absolute_path = os.path.join(tmp_dir, direction)
                os.makedirs(absolute_path)

                # Download the files associated with each package in
                # their corresponding relative location.
                for file in package.files:
                    the_name = file.libraryfile.filename
                    relative_location = os.path.join(direction, the_name)
                    downloaded[direction].append(relative_location)
                    destination_path = os.path.join(absolute_path, the_name)
                    download_file(destination_path, file.libraryfile)

            # All downloads are done. Construct the name of the resulting
            # diff file.
            result_filename = '%s_%s_%s.diff' % (
                self.from_source.sourcepackagename.name,
                self.from_source.version, self.to_source.version)

            # Perform the actual diff operation.
            return_code, stderr = perform_deb_diff(tmp_dir, result_filename,
                                                   downloaded['from'],
                                                   downloaded['to'])

            # `debdiff` failed, mark the package diff request accordingly
            # and return. 0 means no differences, 1 means they differ.
            # Note that pre-Karmic debdiff will return 0 even if they differ.
            if return_code not in (0, 1):
                self.status = PackageDiffStatus.FAILED
                return

            # Compress the generated diff.
            out_file = open(os.path.join(tmp_dir, result_filename))
            gzip_result_filename = result_filename + '.gz'
            gzip_file_path = os.path.join(tmp_dir, gzip_result_filename)
            gzip_file = gzip.GzipFile(gzip_file_path, mode='wb')
            copy_and_close(out_file, gzip_file)

            # Calculate the compressed size.
            gzip_size = os.path.getsize(gzip_file_path)

            # Upload the compressed diff to librarian and update
            # the package diff request.
            gzip_file = open(gzip_file_path)
            try:
                librarian_set = getUtility(ILibraryFileAliasSet)
                self.diff_content = librarian_set.create(
                    gzip_result_filename,
                    gzip_size,
                    gzip_file,
                    'application/gzipped-patch',
                    restricted=self.private)
            finally:
                gzip_file.close()

            # Last but not least, mark the diff as COMPLETED.
            self.date_fulfilled = UTC_NOW
            self.status = PackageDiffStatus.COMPLETED
        finally:
            shutil.rmtree(tmp_dir)
Ejemplo n.º 13
0
    def checkFiles(self):
        """Check if mentioned files are present and match.

        We don't use the NascentUploadFile.verify here, only verify size
        and checksum.
        """

        file_type_counts = {
            SourcePackageFileType.DIFF: 0,
            SourcePackageFileType.ORIG_TARBALL: 0,
            SourcePackageFileType.DEBIAN_TARBALL: 0,
            SourcePackageFileType.NATIVE_TARBALL: 0,
            }
        component_orig_tar_counts = {}
        bzip2_count = 0
        xz_count = 0
        files_missing = False

        for sub_dsc_file in self.files:
            file_type = determine_source_file_type(sub_dsc_file.filename)

            if file_type is None:
                yield UploadError('Unknown file: ' + sub_dsc_file.filename)
                continue

            if file_type == SourcePackageFileType.COMPONENT_ORIG_TARBALL:
                # Split the count by component name.
                component = re_is_component_orig_tar_ext.match(
                    get_source_file_extension(sub_dsc_file.filename)).group(1)
                if component not in component_orig_tar_counts:
                    component_orig_tar_counts[component] = 0
                component_orig_tar_counts[component] += 1
            else:
                file_type_counts[file_type] += 1

            if sub_dsc_file.filename.endswith('.bz2'):
                bzip2_count += 1
            elif sub_dsc_file.filename.endswith('.xz'):
                xz_count += 1

            try:
                library_file, file_archive = self._getFileByName(
                    sub_dsc_file.filename)
            except NotFoundError as error:
                library_file = None
                file_archive = None
            else:
                # try to check dsc-mentioned file against its copy already
                # in librarian, if it's new (aka not found in librarian)
                # dismiss. It prevents us from having scary duplicated
                # filenames in Librarian and misapplied files in archive,
                # fixes bug # 38636 and friends.
                if sub_dsc_file.checksums['MD5'] != library_file.content.md5:
                    yield UploadError(
                        "File %s already exists in %s, but uploaded version "
                        "has different contents. See more information about "
                        "this error in "
                        "https://help.launchpad.net/Packaging/UploadErrors." %
                        (sub_dsc_file.filename, file_archive.displayname))
                    files_missing = True
                    continue

            if not sub_dsc_file.exists_on_disk:
                if library_file is None:
                    # Raises an error if the mentioned DSC file isn't
                    # included in the upload neither published in the
                    # context Distribution.
                    yield UploadError(
                        "Unable to find %s in upload or distribution."
                        % (sub_dsc_file.filename))
                    files_missing = True
                    continue

                # Pump the file through.
                self.logger.debug("Pumping %s out of the librarian" % (
                    sub_dsc_file.filename))
                library_file.open()
                target_file = open(sub_dsc_file.filepath, "wb")
                copy_and_close(library_file, target_file)

            for error in sub_dsc_file.verify():
                yield error
                files_missing = True

        try:
            file_checker = format_to_file_checker_map[self.format]
        except KeyError:
            raise AssertionError(
                "No file checker for source format %s." % self.format)

        for error in file_checker(
            self.filename, file_type_counts, component_orig_tar_counts,
            bzip2_count, xz_count):
            yield error

        if files_missing:
            yield UploadError(
                "Files specified in DSC are broken or missing, "
                "skipping package unpack verification.")
        else:
            for error in self.unpackAndCheckSource():
                # Pass on errors found when unpacking the source.
                yield error
Ejemplo n.º 14
0
    def performDiff(self):
        """See `IPackageDiff`.

        This involves creating a temporary directory, downloading the files
        from both SPRs involved from the librarian, running debdiff, storing
        the output in the librarian and updating the PackageDiff record.
        """
        # Make sure the files associated with the two source packages are
        # still available in the librarian.
        if self._countDeletedLFAs() > 0:
            self.status = PackageDiffStatus.FAILED
            return

        # Create the temporary directory where the files will be
        # downloaded to and where the debdiff will be performed.
        tmp_dir = tempfile.mkdtemp()

        try:
            directions = ('from', 'to')

            # Keep track of the files belonging to the respective packages.
            downloaded = dict(zip(directions, ([], [])))

            # Make it easy to iterate over packages.
            packages = dict(
                zip(directions, (self.from_source, self.to_source)))

            # Iterate over the packages to be diff'ed.
            for direction, package in packages.iteritems():
                # Create distinct directory locations for
                # 'from' and 'to' files.
                absolute_path = os.path.join(tmp_dir, direction)
                os.makedirs(absolute_path)

                # Download the files associated with each package in
                # their corresponding relative location.
                for file in package.files:
                    the_name = file.libraryfile.filename
                    relative_location = os.path.join(direction, the_name)
                    downloaded[direction].append(relative_location)
                    destination_path = os.path.join(absolute_path, the_name)
                    download_file(destination_path, file.libraryfile)

            # All downloads are done. Construct the name of the resulting
            # diff file.
            result_filename = '%s_%s_%s.diff' % (
                self.from_source.sourcepackagename.name,
                self.from_source.version,
                self.to_source.version)

            # Perform the actual diff operation.
            return_code, stderr = perform_deb_diff(
                tmp_dir, result_filename, downloaded['from'],
                downloaded['to'])

            # `debdiff` failed, mark the package diff request accordingly
            # and return. 0 means no differences, 1 means they differ.
            # Note that pre-Karmic debdiff will return 0 even if they differ.
            if return_code not in (0, 1):
                self.status = PackageDiffStatus.FAILED
                return

            # Compress the generated diff.
            out_file = open(os.path.join(tmp_dir, result_filename))
            gzip_result_filename = result_filename + '.gz'
            gzip_file_path = os.path.join(tmp_dir, gzip_result_filename)
            gzip_file = gzip.GzipFile(gzip_file_path, mode='wb')
            copy_and_close(out_file, gzip_file)

            # Calculate the compressed size.
            gzip_size = os.path.getsize(gzip_file_path)

            # Upload the compressed diff to librarian and update
            # the package diff request.
            gzip_file = open(gzip_file_path)
            try:
                librarian_set = getUtility(ILibraryFileAliasSet)
                self.diff_content = librarian_set.create(
                    gzip_result_filename, gzip_size, gzip_file,
                    'application/gzipped-patch', restricted=self.private)
            finally:
                gzip_file.close()

            # Last but not least, mark the diff as COMPLETED.
            self.date_fulfilled = UTC_NOW
            self.status = PackageDiffStatus.COMPLETED
        finally:
            shutil.rmtree(tmp_dir)
 def fake_getFile(sum, file):
     dummy_tar = os.path.join(
         os.path.dirname(__file__), 'dummy_templates.tar.gz')
     tar_file = open(dummy_tar)
     copy_and_close(tar_file, file)
     return defer.succeed(None)