コード例 #1
0
 def openArchive(self):
     self.version = "20070214ubuntu1"
     self.arch = "i386"
     self.path = os.path.join(
         self.temp_dir,
         "debian-installer-images_%s_%s.tar.gz" % (self.version, self.arch))
     self.buffer = open(self.path, "wb")
     self.archive = LaunchpadWriteTarFile(self.buffer)
コード例 #2
0
 def __init__(self, single_file_storage=None):
     """Initialze empty storage strategy, or subsume single-file one."""
     self.buffer = tempfile.TemporaryFile()
     self.tar_writer = LaunchpadWriteTarFile(self.buffer)
     if single_file_storage is not None:
         self.addFile(single_file_storage.path,
                      single_file_storage.extension,
                      single_file_storage.content,
                      single_file_storage.mime_type)
コード例 #3
0
class TarballFileStorageStrategy(StorageStrategy):
    """Store any number of files for export as a tarball.

    Similar to `SingleFileStorageStrategy`, but lets you store any number of
    files using the same API.  Each file is written into the resulting tarball
    as soon as it is added.  There is no need to keep the full contents of the
    tarball in memory at any single time.
    """
    mime_type = 'application/x-gtar'

    empty = False

    def __init__(self, single_file_storage=None):
        """Initialze empty storage strategy, or subsume single-file one."""
        self.buffer = tempfile.TemporaryFile()
        self.tar_writer = LaunchpadWriteTarFile(self.buffer)
        if single_file_storage is not None:
            self.addFile(single_file_storage.path,
                         single_file_storage.extension,
                         single_file_storage.content,
                         single_file_storage.mime_type)

    def addFile(self, path, extension, content, mime_type):
        """See `StorageStrategy`."""
        # Tarballs don't store MIME types, so ignore that.
        self.empty = False
        self.tar_writer.add_file(path, content)

    def isEmpty(self):
        """See `StorageStrategy`."""
        return self.empty

    def isFull(self):
        """See `StorageStrategy`.

        A `TarballFileStorageStrategy` can store any number of files, so no.
        """
        return False

    def export(self):
        """See `StorageStrategy`."""
        self.tar_writer.close()
        self.buffer.seek(0)
        output = ExportedTranslationFile(self.buffer)

        # Don't set path; let the caller decide.

        # For tar.gz files, the standard content type is application/x-gtar.
        # You can see more info on
        #   http://en.wikipedia.org/wiki/List_of_archive_formats
        output.content_type = self.mime_type
        output.file_extension = 'tar.gz'
        return output
コード例 #4
0
class TarballFileStorageStrategy(StorageStrategy):
    """Store any number of files for export as a tarball.

    Similar to `SingleFileStorageStrategy`, but lets you store any number of
    files using the same API.  Each file is written into the resulting tarball
    as soon as it is added.  There is no need to keep the full contents of the
    tarball in memory at any single time.
    """
    mime_type = 'application/x-gtar'

    empty = False

    def __init__(self, single_file_storage=None):
        """Initialze empty storage strategy, or subsume single-file one."""
        self.buffer = tempfile.TemporaryFile()
        self.tar_writer = LaunchpadWriteTarFile(self.buffer)
        if single_file_storage is not None:
            self.addFile(
                single_file_storage.path, single_file_storage.extension,
                single_file_storage.content, single_file_storage.mime_type)

    def addFile(self, path, extension, content, mime_type):
        """See `StorageStrategy`."""
        # Tarballs don't store MIME types, so ignore that.
        self.empty = False
        self.tar_writer.add_file(path, content)

    def isEmpty(self):
        """See `StorageStrategy`."""
        return self.empty

    def isFull(self):
        """See `StorageStrategy`.

        A `TarballFileStorageStrategy` can store any number of files, so no.
        """
        return False

    def export(self):
        """See `StorageStrategy`."""
        self.tar_writer.close()
        self.buffer.seek(0)
        output = ExportedTranslationFile(self.buffer)

        # Don't set path; let the caller decide.

        # For tar.gz files, the standard content type is application/x-gtar.
        # You can see more info on
        #   http://en.wikipedia.org/wiki/List_of_archive_formats
        output.content_type = self.mime_type
        output.file_extension = 'tar.gz'
        return output
コード例 #5
0
def make_test_tarball_1():
    '''
    Generate a test tarball that looks something like a source tarball which
    has exactly one directory called 'po' which is interesting (i.e. contains
    some files which look like POT/PO files).

    >>> tarball = make_test_tarball_1()

    Check it looks vaguely sensible.

    >>> names = tarball.getnames()
    >>> 'uberfrob-0.1/po/cy.po' in names
    True
    '''

    return LaunchpadWriteTarFile.files_to_tarfile({
        'uberfrob-0.1/README':
            'Uberfrob is an advanced frobnicator.',
        'uberfrob-0.1/po/cy.po':
            '# Blah.',
        'uberfrob-0.1/po/es.po':
            '# Blah blah.',
        'uberfrob-0.1/po/uberfrob.pot':
            '# Yowza!',
        'uberfrob-0.1/blah/po/la':
            'la la',
        'uberfrob-0.1/uberfrob.py':
            'import sys\n'
            'print "Frob!"\n',
        })
コード例 #6
0
 def __init__(self, single_file_storage=None):
     """Initialze empty storage strategy, or subsume single-file one."""
     self.buffer = tempfile.TemporaryFile()
     self.tar_writer = LaunchpadWriteTarFile(self.buffer)
     if single_file_storage is not None:
         self.addFile(
             single_file_storage.path, single_file_storage.extension,
             single_file_storage.content, single_file_storage.mime_type)
コード例 #7
0
 def makeTranslationsLFA(self):
     """Create an LibraryFileAlias containing dummy translation data."""
     test_tar_content = {
         'source/po/foo.pot': 'Foo template',
         'source/po/eo.po': 'Foo translation',
         }
     tarfile_content = LaunchpadWriteTarFile.files_to_string(
         test_tar_content)
     return self.factory.makeLibraryFileAlias(content=tarfile_content)
コード例 #8
0
 def makeTranslationsLFA(self):
     """Create an LibraryFileAlias containing dummy translation data."""
     test_tar_content = {
         'source/po/foo.pot': 'Foo template',
         'source/po/eo.po': 'Foo translation',
     }
     tarfile_content = LaunchpadWriteTarFile.files_to_string(
         test_tar_content)
     return self.factory.makeLibraryFileAlias(content=tarfile_content)
コード例 #9
0
 def test_addOrUpdateEntriesFromTarball_path_leading_slash(self):
     # Leading slashes are stripped from path names.
     path, content = self._makeFile('pot', '/directory')
     files = dict(((path, content),))
     tarfile_content = LaunchpadWriteTarFile.files_to_string(files)
     self.import_queue.addOrUpdateEntriesFromTarball(
         tarfile_content, True, self.importer,
         productseries=self.productseries)
     stripped_path = path.lstrip('/')
     self.assertEqual([stripped_path], self._getQueuePaths())
コード例 #10
0
 def test_addOrUpdateEntriesFromTarball_only_translation_files(self):
     # Only files with the right extensions are added.
     files = dict((
         self._makeFile(),
         ))
     tarfile_content = LaunchpadWriteTarFile.files_to_string(files)
     self.import_queue.addOrUpdateEntriesFromTarball(
         tarfile_content, True, self.importer,
         productseries=self.productseries)
     self.assertEqual([], self._getQueuePaths())
コード例 #11
0
 def test_addOrUpdateEntriesFromTarball_only_translation_files(self):
     # Only files with the right extensions are added.
     files = dict((self._makeFile(), ))
     tarfile_content = LaunchpadWriteTarFile.files_to_stream(files)
     self.import_queue.addOrUpdateEntriesFromTarball(
         tarfile_content,
         True,
         self.importer,
         productseries=self.productseries)
     self.assertEqual([], self._getQueuePaths())
コード例 #12
0
 def test_addOrUpdateEntriesFromTarball_path(self):
     # File names are store with full path.
     files = dict((
         self._makeFile('pot', 'directory'),
         ))
     tarfile_content = LaunchpadWriteTarFile.files_to_string(files)
     self.import_queue.addOrUpdateEntriesFromTarball(
         tarfile_content, True, self.importer,
         productseries=self.productseries)
     self.assertEqual(files.keys(), self._getQueuePaths())
コード例 #13
0
 def test_addOrUpdateEntriesFromTarball_path(self):
     # File names are store with full path.
     files = dict((self._makeFile('pot', 'directory'), ))
     tarfile_content = LaunchpadWriteTarFile.files_to_stream(files)
     self.import_queue.addOrUpdateEntriesFromTarball(
         tarfile_content,
         True,
         self.importer,
         productseries=self.productseries)
     self.assertEqual(files.keys(), self._getQueuePaths())
コード例 #14
0
 def makeTranslationsLFA(self, tar_content=None, filename=None):
     """Create an LibraryFileAlias containing dummy translation data."""
     if tar_content is None:
         tar_content = {
             'source/po/foo.pot': b'Foo template',
             'source/po/eo.po': b'Foo translation',
             }
     tarfile_content = LaunchpadWriteTarFile.files_to_string(
         tar_content)
     return self.factory.makeLibraryFileAlias(content=tarfile_content,
                                              filename=filename)
コード例 #15
0
 def test_addOrUpdateEntriesFromTarball_baseline(self):
     # Files from a tarball are placed in the queue.
     files = dict((
         self._makeFile('pot'),
         self._makeFile('po'),
         self._makeFile('xpi'),
         ))
     tarfile_content = LaunchpadWriteTarFile.files_to_string(files)
     self.import_queue.addOrUpdateEntriesFromTarball(
         tarfile_content, True, self.importer,
         productseries=self.productseries)
     self.assertContentEqual(files.keys(), self._getQueuePaths())
コード例 #16
0
def make_test_tarball_2():
    r'''
    Generate a test tarball string that has some interesting files in a common
    prefix.

    >>> tarball = make_test_tarball_2()

    Check the expected files are in the archive.

    # XXX: 2010-04-26, Salgado, bug=570244: This rstrip('/') is to make the
    # test pass on python2.5 and 2.6.
    >>> [name.rstrip('/') for name in tarball.getnames()]
    ['test', 'test/cy.po', 'test/es.po', 'test/test.pot']

    Check the contents.

    >>> f = tarball.extractfile('test/cy.po')
    >>> f.readline()
    '# Test PO file.\n'
    '''

    pot = dedent("""
        # Test POT file.
        msgid "foo"
        msgstr ""
        """).strip()

    po = dedent("""
        # Test PO file.
        msgid "foo"
        msgstr "bar"
        """).strip()

    return LaunchpadWriteTarFile.files_to_tarfile({
        'test/test.pot': pot,
        'test/cy.po': po,
        'test/es.po': po,
    })
コード例 #17
0
def make_test_tarball_2():
    r'''
    Generate a test tarball string that has some interesting files in a common
    prefix.

    >>> tarball = make_test_tarball_2()

    Check the expected files are in the archive.

    >>> tarball.getnames()
    ['test', 'test/cy.po', 'test/es.po', 'test/test.pot']

    Check the contents.

    >>> f = tarball.extractfile('test/cy.po')
    >>> f.readline()
    '# Test PO file.\n'
    '''

    pot = dedent("""
        # Test POT file.
        msgid "foo"
        msgstr ""
        """).strip()

    po = dedent("""
        # Test PO file.
        msgid "foo"
        msgstr "bar"
        """).strip()

    return LaunchpadWriteTarFile.files_to_tarfile({
        'test/test.pot': pot,
        'test/cy.po': po,
        'test/es.po': po,
    })
コード例 #18
0
class TestDistUpgrader(TestCase):

    def setUp(self):
        super(TestDistUpgrader, self).setUp()
        self.temp_dir = self.makeTemporaryDirectory()
        self.pubconf = FakeConfig(self.temp_dir)
        self.suite = "distroseries"
        # CustomUpload.installFiles requires a umask of 022.
        old_umask = os.umask(022)
        self.addCleanup(os.umask, old_umask)

    def openArchive(self, version):
        self.path = os.path.join(
            self.temp_dir, "dist-upgrader_%s_all.tar.gz" % version)
        self.buffer = open(self.path, "wb")
        self.archive = LaunchpadWriteTarFile(self.buffer)

    def process(self):
        self.archive.close()
        self.buffer.close()
        process_dist_upgrader(self.pubconf, self.path, self.suite)

    def getUpgraderPath(self):
        return os.path.join(
            self.temp_dir, "dists", self.suite, "main", "dist-upgrader-all")

    def test_basic(self):
        # Processing a simple correct tar file works.
        self.openArchive("20060302.0120")
        self.archive.add_file("20060302.0120/hello", "world")
        self.process()

    def test_already_exists(self):
        # If the target directory already exists, processing fails.
        self.openArchive("20060302.0120")
        self.archive.add_file("20060302.0120/hello", "world")
        os.makedirs(os.path.join(self.getUpgraderPath(), "20060302.0120"))
        self.assertRaises(CustomUploadAlreadyExists, self.process)

    def test_bad_umask(self):
        # The umask must be 022 to avoid incorrect permissions.
        self.openArchive("20060302.0120")
        self.archive.add_file("20060302.0120/file", "foo")
        os.umask(002)  # cleanup already handled by setUp
        self.assertRaises(CustomUploadBadUmask, self.process)

    def test_current_symlink(self):
        # A "current" symlink is created to the last version.
        self.openArchive("20060302.0120")
        self.archive.add_file("20060302.0120/hello", "world")
        self.process()
        upgrader_path = self.getUpgraderPath()
        self.assertContentEqual(
            ["20060302.0120", "current"], os.listdir(upgrader_path))
        self.assertEqual(
            "20060302.0120",
            os.readlink(os.path.join(upgrader_path, "current")))
        self.assertContentEqual(
            ["hello"],
            os.listdir(os.path.join(upgrader_path, "20060302.0120")))

    def test_bad_version(self):
        # Bad versions in the tarball are refused.
        self.openArchive("20070219.1234")
        self.archive.add_file("foobar/foobar/dapper.tar.gz", "")
        self.assertRaises(DistUpgraderBadVersion, self.process)

    def test_getSeriesKey_extracts_architecture(self):
        # getSeriesKey extracts the architecture from an upload's filename.
        self.openArchive("20060302.0120")
        self.assertEqual("all", DistUpgraderUpload.getSeriesKey(self.path))

    def test_getSeriesKey_returns_None_on_mismatch(self):
        # getSeriesKey returns None if the filename does not match the
        # expected pattern.
        self.assertIsNone(DistUpgraderUpload.getSeriesKey("argh_1.0.jpg"))

    def test_getSeriesKey_refuses_names_with_wrong_number_of_fields(self):
        # getSeriesKey requires exactly three fields.
        self.assertIsNone(DistUpgraderUpload.getSeriesKey(
            "package_1.0.tar.gz"))
        self.assertIsNone(DistUpgraderUpload.getSeriesKey(
            "one_two_three_four_5.tar.gz"))
コード例 #19
0
 def openArchive(self, version):
     self.path = os.path.join(
         self.temp_dir, "translations_main_%s.tar.gz" % version)
     self.buffer = open(self.path, "wb")
     self.archive = LaunchpadWriteTarFile(self.buffer)
コード例 #20
0
def export(distroseries, component, update, force_utf8, logger):
    """Return a pair containing a filehandle from which the distribution's
    translations tarball can be read and the size of the tarball in bytes.

    :arg distroseries: The `IDistroSeries` we want to export from.
    :arg component: The component name from the given distribution series.
    :arg update: Whether the export should be an update from the last export.
    :arg force_utf8: Whether the export should have all files exported as
        UTF-8.
    :arg logger: A logger object.
    """
    # We will need when the export started later to add the timestamp for this
    # export inside the exported tarball.
    start_date = datetime.datetime.utcnow().strftime('%Y%m%d')
    export_set = getUtility(IVPOExportSet)

    logger.debug("Selecting PO files for export")

    date = None
    if update:
        # Get the export date for the current base language pack.
        date = distroseries.language_pack_base.date_exported

    pofile_count = export_set.get_distroseries_pofiles_count(distroseries,
                                                             date,
                                                             component,
                                                             languagepack=True)
    logger.info("Number of PO files to export: %d" % pofile_count)

    filehandle = tempfile.TemporaryFile()
    archive = LaunchpadWriteTarFile(filehandle)

    # XXX JeroenVermeulen 2008-02-06: Is there anything here that we can unify
    # with the export-queue code?
    xpi_templates_to_export = set()
    path_prefix = 'rosetta-%s' % distroseries.name

    pofiles = export_set.get_distroseries_pofiles(distroseries,
                                                  date,
                                                  component,
                                                  languagepack=True)

    # Manual caching.  Fetch POTMsgSets in bulk per template, and cache
    # them across POFiles if subsequent POFiles belong to the same
    # template.
    cached_potemplate = None
    cached_potmsgsets = []

    for index, pofile in enumerate(pofiles):
        number = index + 1
        logger.debug("Exporting PO file %d (%d/%d)" %
                     (pofile.id, number, pofile_count))

        potemplate = pofile.potemplate
        if potemplate != cached_potemplate:
            # Launchpad's StupidCache caches absolutely everything,
            # which causes us to run out of memory.  We know at this
            # point that we don't have useful references to potemplate's
            # messages anymore, so remove them forcibly from the cache.
            store = Store.of(potemplate)
            for potmsgset in cached_potmsgsets:
                store.invalidate(potmsgset.msgid_singular)
                store.invalidate(potmsgset)

            # Commit a transaction with every PO template and its
            # PO files exported so we don't keep it open for too long.
            transaction.commit()

            cached_potemplate = potemplate
            cached_potmsgsets = [
                potmsgset for potmsgset in potemplate.getPOTMsgSets()
            ]

            if ((index + 1) % 5) == 0:
                # Garbage-collect once in 5 templates (but not at the
                # very beginning).  Bit too expensive to do for each
                # one.
                gc.collect()

        domain = potemplate.translation_domain.encode('ascii')
        code = pofile.getFullLanguageCode().encode('UTF-8')

        if potemplate.source_file_format == TranslationFileFormat.XPI:
            xpi_templates_to_export.add(potemplate)
            path = os.path.join(path_prefix, 'xpi', domain, '%s.po' % code)
        else:
            path = os.path.join(path_prefix, code, 'LC_MESSAGES',
                                '%s.po' % domain)

        try:
            # We don't want obsolete entries here, it makes no sense for a
            # language pack.
            contents = pofile.export(ignore_obsolete=True,
                                     force_utf8=force_utf8)

            # Store it in the tarball.
            archive.add_file(path, contents)
        except:
            logger.exception("Uncaught exception while exporting PO file %d" %
                             pofile.id)

        store.invalidate(pofile)

    logger.info("Exporting XPI template files.")
    librarian_client = getUtility(ILibrarianClient)
    for template in xpi_templates_to_export:
        if template.source_file is None:
            logger.warning("%s doesn't have source file registered." %
                           potemplate.title)
            continue
        domain = template.translation_domain.encode('ascii')
        archive.add_file(
            os.path.join(path_prefix, 'xpi', domain, 'en-US.xpi'),
            librarian_client.getFileByAlias(template.source_file.id).read())

    logger.info("Adding timestamp file")
    # Is important that the timestamp contain the date when the export
    # started, not when it finished because that notes how old is the
    # information the export contains.
    archive.add_file('rosetta-%s/timestamp.txt' % distroseries.name,
                     '%s\n' % start_date)

    logger.info("Adding mapping file")
    mapping_text = ''
    mapping = iter_sourcepackage_translationdomain_mapping(distroseries)
    for sourcepackagename, translationdomain in mapping:
        mapping_text += "%s %s\n" % (sourcepackagename, translationdomain)
    archive.add_file('rosetta-%s/mapping.txt' % distroseries.name,
                     mapping_text)

    logger.info("Done.")

    archive.close()
    size = filehandle.tell()
    filehandle.seek(0)

    return filehandle, size
コード例 #21
0
 def openArchive(self, version):
     self.path = os.path.join(self.temp_dir,
                              "dist-upgrader_%s_all.tar.gz" % version)
     self.buffer = open(self.path, "wb")
     self.tarfile = LaunchpadWriteTarFile(self.buffer)
コード例 #22
0
ファイル: test_gina.py プロジェクト: vitaminmoo/unnaturalcode
    def test_unpack_dsc_with_vendor(self):
        # Some source packages unpack differently depending on dpkg's idea
        # of the "vendor", and in extreme cases may even fail with some
        # vendors.  gina always sets the vendor to the target distribution
        # name to ensure that it unpacks packages as if unpacking on that
        # distribution.
        archive_root = self.useTempDir()
        pool_dir = os.path.join(archive_root, "pool/main/f/foo")
        os.makedirs(pool_dir)

        # Synthesise a package that can be unpacked with DEB_VENDOR=debian
        # but not with DEB_VENDOR=ubuntu.
        with open(os.path.join(pool_dir, "foo_1.0.orig.tar.gz"), "wb+") as buffer:
            orig_tar = LaunchpadWriteTarFile(buffer)
            orig_tar.add_directory("foo-1.0")
            orig_tar.close()
            buffer.seek(0)
            orig_tar_contents = buffer.read()
        with open(os.path.join(pool_dir, "foo_1.0-1.debian.tar.gz"), "wb+") as buffer:
            debian_tar = LaunchpadWriteTarFile(buffer)
            debian_tar.add_file("debian/source/format", "3.0 (quilt)\n")
            debian_tar.add_file("debian/patches/ubuntu.series", "--- corrupt patch\n")
            debian_tar.add_file("debian/rules", "")
            debian_tar.close()
            buffer.seek(0)
            debian_tar_contents = buffer.read()
        dsc_path = os.path.join(pool_dir, "foo_1.0-1.dsc")
        with open(dsc_path, "w") as dsc:
            dsc.write(
                dedent(
                    """\
                Format: 3.0 (quilt)
                Source: foo
                Binary: foo
                Architecture: all
                Version: 1.0-1
                Maintainer: Foo Bar <*****@*****.**>
                Files:
                 %s %s foo_1.0.orig.tar.gz
                 %s %s foo_1.0-1.debian.tar.gz
                """
                    % (
                        hashlib.md5(orig_tar_contents).hexdigest(),
                        len(orig_tar_contents),
                        hashlib.md5(debian_tar_contents).hexdigest(),
                        len(debian_tar_contents),
                    )
                )
            )

        dsc_contents = parse_tagfile(dsc_path)
        dsc_contents["Directory"] = pool_dir
        dsc_contents["Package"] = "foo"
        dsc_contents["Component"] = "main"
        dsc_contents["Section"] = "misc"

        sp_data = SourcePackageData(**dsc_contents)
        # Unpacking this in an Ubuntu context fails.
        self.assertRaises(ExecutionError, sp_data.do_package, "ubuntu", archive_root)
        # But all is well in a Debian context.
        sp_data.do_package("debian", archive_root)
コード例 #23
0
class TestUefi(TestCase):

    def setUp(self):
        super(TestUefi, self).setUp()
        self.temp_dir = self.makeTemporaryDirectory()
        self.uefi_dir = self.makeTemporaryDirectory()
        self.pubconf = FakeConfig(self.temp_dir, self.uefi_dir)
        self.suite = "distroseries"
        # CustomUpload.installFiles requires a umask of 022.
        old_umask = os.umask(022)
        self.addCleanup(os.umask, old_umask)

    def setUpKeyAndCert(self):
        self.key = os.path.join(self.uefi_dir, "uefi.key")
        self.cert = os.path.join(self.uefi_dir, "uefi.crt")
        write_file(self.key, "")
        write_file(self.cert, "")

    def openArchive(self, loader_type, version, arch):
        self.path = os.path.join(
            self.temp_dir, "%s_%s_%s.tar.gz" % (loader_type, version, arch))
        self.buffer = open(self.path, "wb")
        self.archive = LaunchpadWriteTarFile(self.buffer)

    def process(self):
        self.archive.close()
        self.buffer.close()
        upload = UefiUpload()
        upload.sign = FakeMethod()
        upload.process(self.pubconf, self.path, self.suite)
        return upload

    def getUefiPath(self, loader_type, arch):
        return os.path.join(
            self.temp_dir, "dists", self.suite, "main", "uefi",
            "%s-%s" % (loader_type, arch))

    def test_unconfigured(self):
        # If there is no key/cert configuration, processing succeeds but
        # nothing is signed.
        self.pubconf = FakeConfig(self.temp_dir, None)
        self.openArchive("test", "1.0", "amd64")
        self.archive.add_file("1.0/empty.efi", "")
        upload = self.process()
        self.assertEqual(0, upload.sign.call_count)

    def test_missing_key_and_cert(self):
        # If the configured key/cert are missing, processing succeeds but
        # nothing is signed.
        self.openArchive("test", "1.0", "amd64")
        self.archive.add_file("1.0/empty.efi", "")
        upload = self.process()
        self.assertEqual(0, upload.sign.call_count)

    def test_no_efi_files(self):
        # Tarballs containing no *.efi files are extracted without complaint.
        self.setUpKeyAndCert()
        self.openArchive("empty", "1.0", "amd64")
        self.archive.add_file("1.0/hello", "world")
        self.process()
        self.assertTrue(os.path.exists(os.path.join(
            self.getUefiPath("empty", "amd64"), "1.0", "hello")))

    def test_already_exists(self):
        # If the target directory already exists, processing fails.
        self.setUpKeyAndCert()
        self.openArchive("test", "1.0", "amd64")
        self.archive.add_file("1.0/empty.efi", "")
        os.makedirs(os.path.join(self.getUefiPath("test", "amd64"), "1.0"))
        self.assertRaises(CustomUploadAlreadyExists, self.process)

    def test_bad_umask(self):
        # The umask must be 022 to avoid incorrect permissions.
        self.setUpKeyAndCert()
        self.openArchive("test", "1.0", "amd64")
        self.archive.add_file("1.0/dir/file.efi", "foo")
        os.umask(002)  # cleanup already handled by setUp
        self.assertRaises(CustomUploadBadUmask, self.process)

    def test_correct_signing_command(self):
        # getSigningCommand returns the correct command.
        self.setUpKeyAndCert()
        upload = UefiUpload()
        upload.setTargetDirectory(
            self.pubconf, "test_1.0_amd64.tar.gz", "distroseries")
        expected_command = [
            "sbsign", "--key", self.key, "--cert", self.cert, "t.efi"]
        self.assertEqual(expected_command, upload.getSigningCommand("t.efi"))

    def test_signs_image(self):
        # Each image in the tarball is signed.
        self.setUpKeyAndCert()
        self.openArchive("test", "1.0", "amd64")
        self.archive.add_file("1.0/empty.efi", "")
        upload = self.process()
        self.assertEqual(1, upload.sign.call_count)
        self.assertEqual(1, len(upload.sign.calls[0][0]))
        self.assertEqual(
            "empty.efi", os.path.basename(upload.sign.calls[0][0][0]))

    def test_installed(self):
        # Files in the tarball are installed correctly.
        self.setUpKeyAndCert()
        self.openArchive("test", "1.0", "amd64")
        self.archive.add_file("1.0/empty.efi", "")
        self.process()
        self.assertTrue(os.path.exists(os.path.join(
            self.getUefiPath("test", "amd64"), "1.0", "empty.efi")))
コード例 #24
0
class TestDistUpgrader(RunPartsMixin, TestCaseWithFactory):

    layer = ZopelessDatabaseLayer

    def setUp(self):
        super(TestDistUpgrader, self).setUp()
        self.temp_dir = self.makeTemporaryDirectory()
        self.distro = self.factory.makeDistribution()
        db_pubconf = getUtility(IPublisherConfigSet).getByDistribution(
            self.distro)
        db_pubconf.root_dir = unicode(self.temp_dir)
        self.archive = self.factory.makeArchive(distribution=self.distro,
                                                purpose=ArchivePurpose.PRIMARY)
        self.suite = "distroseries"
        # CustomUpload.installFiles requires a umask of 0o022.
        old_umask = os.umask(0o022)
        self.addCleanup(os.umask, old_umask)

    def openArchive(self, version):
        self.path = os.path.join(self.temp_dir,
                                 "dist-upgrader_%s_all.tar.gz" % version)
        self.buffer = open(self.path, "wb")
        self.tarfile = LaunchpadWriteTarFile(self.buffer)

    def process(self):
        self.tarfile.close()
        self.buffer.close()
        DistUpgraderUpload().process(self.archive, self.path, self.suite)

    def getUpgraderPath(self):
        pubconf = getPubConfig(self.archive)
        return os.path.join(pubconf.archiveroot, "dists", self.suite, "main",
                            "dist-upgrader-all")

    def test_basic(self):
        # Processing a simple correct tar file works.
        self.openArchive("20060302.0120")
        self.tarfile.add_file("20060302.0120/hello", b"world")
        self.process()

    def test_already_exists(self):
        # If the target directory already exists, processing fails.
        self.openArchive("20060302.0120")
        self.tarfile.add_file("20060302.0120/hello", b"world")
        os.makedirs(os.path.join(self.getUpgraderPath(), "20060302.0120"))
        self.assertRaises(CustomUploadAlreadyExists, self.process)

    def test_bad_umask(self):
        # The umask must be 0o022 to avoid incorrect permissions.
        self.openArchive("20060302.0120")
        self.tarfile.add_file("20060302.0120/file", b"foo")
        os.umask(0o002)  # cleanup already handled by setUp
        self.assertRaises(CustomUploadBadUmask, self.process)

    def test_current_symlink(self):
        # A "current" symlink is created to the last version.
        self.openArchive("20060302.0120")
        self.tarfile.add_file("20060302.0120/hello", b"world")
        self.process()
        upgrader_path = self.getUpgraderPath()
        self.assertContentEqual(["20060302.0120", "current"],
                                os.listdir(upgrader_path))
        self.assertEqual("20060302.0120",
                         os.readlink(os.path.join(upgrader_path, "current")))
        self.assertContentEqual(["hello"],
                                os.listdir(
                                    os.path.join(upgrader_path,
                                                 "20060302.0120")))

    def test_bad_version(self):
        # Bad versions in the tarball are refused.
        self.openArchive("20070219.1234")
        self.tarfile.add_file("foobar/foobar/dapper.tar.gz", b"")
        self.assertRaises(DistUpgraderBadVersion, self.process)

    def test_sign_with_external_run_parts(self):
        self.enableRunParts(distribution_name=self.distro.name)
        with open(
                os.path.join(self.parts_directory, self.distro.name, "sign.d",
                             "10-sign"), "w") as f:
            f.write(
                dedent("""\
                #! /bin/sh
                touch "$OUTPUT_PATH"
                """))
            os.fchmod(f.fileno(), 0o755)
        self.openArchive("20060302.0120")
        self.tarfile.add_file("20060302.0120/list", "a list")
        self.tarfile.add_file("20060302.0120/foo.tar.gz", "a tarball")
        self.process()
        self.assertThat(os.path.join(self.getUpgraderPath(), "20060302.0120"),
                        DirContains(["list", "foo.tar.gz", "foo.tar.gz.gpg"]))

    def test_getSeriesKey_extracts_architecture(self):
        # getSeriesKey extracts the architecture from an upload's filename.
        self.openArchive("20060302.0120")
        self.assertEqual("all", DistUpgraderUpload.getSeriesKey(self.path))

    def test_getSeriesKey_returns_None_on_mismatch(self):
        # getSeriesKey returns None if the filename does not match the
        # expected pattern.
        self.assertIsNone(DistUpgraderUpload.getSeriesKey("argh_1.0.jpg"))

    def test_getSeriesKey_refuses_names_with_wrong_number_of_fields(self):
        # getSeriesKey requires exactly three fields.
        self.assertIsNone(
            DistUpgraderUpload.getSeriesKey("package_1.0.tar.gz"))
        self.assertIsNone(
            DistUpgraderUpload.getSeriesKey("one_two_three_four_5.tar.gz"))
コード例 #25
0
class TestDdtpTarball(TestCase):

    def setUp(self):
        super(TestDdtpTarball, self).setUp()
        self.temp_dir = self.makeTemporaryDirectory()
        self.pubconf = FakeConfig(self.temp_dir)
        self.suite = "distroseries"
        # CustomUpload.installFiles requires a umask of 022.
        old_umask = os.umask(022)
        self.addCleanup(os.umask, old_umask)

    def openArchive(self, version):
        self.path = os.path.join(
            self.temp_dir, "translations_main_%s.tar.gz" % version)
        self.buffer = open(self.path, "wb")
        self.archive = LaunchpadWriteTarFile(self.buffer)

    def process(self):
        self.archive.close()
        self.buffer.close()
        process_ddtp_tarball(self.pubconf, self.path, self.suite)

    def getTranslationsPath(self, filename):
        return os.path.join(
            self.temp_dir, "dists", self.suite, "main", "i18n", filename)

    def test_basic(self):
        # Processing a simple correct tar file works.
        self.openArchive("20060728")
        self.archive.add_file("i18n/Translation-de", "")
        self.process()
        self.assertTrue(os.path.exists(
            self.getTranslationsPath("Translation-de")))

    def test_ignores_empty_directories(self):
        # Empty directories in the tarball are not extracted.
        self.openArchive("20060728")
        self.archive.add_file("i18n/Translation-de", "")
        self.archive.add_directory("i18n/foo")
        self.process()
        self.assertTrue(os.path.exists(
            self.getTranslationsPath("Translation-de")))
        self.assertFalse(os.path.exists(self.getTranslationsPath("foo")))

    def test_partial_update(self):
        # If a DDTP tarball only contains a subset of published translation
        # files, these are updated and the rest are left untouched.
        self.openArchive("20060728")
        self.archive.add_file("i18n/Translation-bn", "bn")
        self.archive.add_file("i18n/Translation-ca", "ca")
        self.process()
        with open(self.getTranslationsPath("Translation-bn")) as bn_file:
            self.assertEqual("bn", bn_file.read())
        with open(self.getTranslationsPath("Translation-ca")) as ca_file:
            self.assertEqual("ca", ca_file.read())
        self.openArchive("20060817")
        self.archive.add_file("i18n/Translation-bn", "new bn")
        self.process()
        with open(self.getTranslationsPath("Translation-bn")) as bn_file:
            self.assertEqual("new bn", bn_file.read())
        with open(self.getTranslationsPath("Translation-ca")) as ca_file:
            self.assertEqual("ca", ca_file.read())

    def test_breaks_hard_links(self):
        # Our archive uses dsync to replace identical files with hard links
        # in order to save some space.  tarfile.extract overwrites
        # pre-existing files rather than creating new files and moving them
        # into place, so making this work requires special care.  Test that
        # that care has been taken.
        self.openArchive("20060728")
        self.archive.add_file("i18n/Translation-ca", "")
        self.process()
        ca = self.getTranslationsPath("Translation-ca")
        bn = self.getTranslationsPath("Translation-bn")
        os.link(ca, bn)
        self.assertTrue(os.path.exists(bn))
        self.assertEqual(2, os.stat(bn).st_nlink)
        self.assertEqual(2, os.stat(ca).st_nlink)
        self.openArchive("20060817")
        self.archive.add_file("i18n/Translation-bn", "break hard link")
        self.process()
        with open(bn) as bn_file:
            self.assertEqual("break hard link", bn_file.read())
        with open(ca) as ca_file:
            self.assertEqual("", ca_file.read())
        self.assertEqual(1, os.stat(bn).st_nlink)
        self.assertEqual(1, os.stat(ca).st_nlink)

    def test_parsePath_handles_underscore_in_directory(self):
        # parsePath is not misled by an underscore in the directory name.
        self.assertEqual(
            # XXX cjwatson 2012-07-03: .tar.gz is not stripped off the end
            # of the version due to something of an ambiguity in the design;
            # how should translations_main_1.0.1.tar.gz be parsed?  In
            # practice this doesn't matter because DdtpTarballUpload never
            # uses the version for anything.
            ("translations", "main", "1.tar.gz"),
            DdtpTarballUpload.parsePath(
                "/dir_with_underscores/translations_main_1.tar.gz"))

    def test_getSeriesKey_extracts_component(self):
        # getSeriesKey extracts the component from an upload's filename.
        self.openArchive("20060728")
        self.assertEqual("main", DdtpTarballUpload.getSeriesKey(self.path))

    def test_getSeriesKey_returns_None_on_mismatch(self):
        # getSeriesKey returns None if the filename does not match the
        # expected pattern.
        self.assertIsNone(DdtpTarballUpload.getSeriesKey("argh_1.0.jpg"))

    def test_getSeriesKey_refuses_names_with_wrong_number_of_fields(self):
        # getSeriesKey requires exactly three fields.
        self.assertIsNone(DdtpTarballUpload.getSeriesKey("package_1.0.tar.gz"))
        self.assertIsNone(DdtpTarballUpload.getSeriesKey(
            "one_two_three_four_5.tar.gz"))
コード例 #26
0
ファイル: test_gina.py プロジェクト: pombredanne/launchpad-3
    def test_process_package_cleans_up_after_unpack_failure(self):
        archive_root = self.useTempDir()
        pool_dir = os.path.join(archive_root, "pool/main/f/foo")
        os.makedirs(pool_dir)

        with open(os.path.join(
            pool_dir, "foo_1.0.orig.tar.gz"), "wb+") as buffer:
            orig_tar = LaunchpadWriteTarFile(buffer)
            orig_tar.add_directory("foo-1.0")
            orig_tar.close()
            buffer.seek(0)
            orig_tar_contents = buffer.read()
        with open(os.path.join(
            pool_dir, "foo_1.0-1.debian.tar.gz"), "wb+") as buffer:
            debian_tar = LaunchpadWriteTarFile(buffer)
            debian_tar.add_file("debian/source/format", "3.0 (quilt)\n")
            debian_tar.add_file("debian/patches/series", "--- corrupt patch\n")
            debian_tar.add_file("debian/rules", "")
            debian_tar.close()
            buffer.seek(0)
            debian_tar_contents = buffer.read()
        dsc_path = os.path.join(pool_dir, "foo_1.0-1.dsc")
        with open(dsc_path, "w") as dsc:
            dsc.write(dedent("""\
                Format: 3.0 (quilt)
                Source: foo
                Binary: foo
                Architecture: all
                Version: 1.0-1
                Maintainer: Foo Bar <*****@*****.**>
                Files:
                 %s %s foo_1.0.orig.tar.gz
                 %s %s foo_1.0-1.debian.tar.gz
                """ % (
                    hashlib.md5(orig_tar_contents).hexdigest(),
                    len(orig_tar_contents),
                    hashlib.md5(debian_tar_contents).hexdigest(),
                    len(debian_tar_contents))))

        dsc_contents = parse_tagfile(dsc_path)
        dsc_contents["Directory"] = pool_dir
        dsc_contents["Package"] = "foo"
        dsc_contents["Component"] = "main"
        dsc_contents["Section"] = "misc"

        sp_data = SourcePackageData(**dsc_contents)
        unpack_tmpdir = self.makeTemporaryDirectory()
        with EnvironmentVariableFixture("TMPDIR", unpack_tmpdir):
            # Force tempfile to recheck TMPDIR.
            tempfile.tempdir = None
            try:
                self.assertRaises(
                    ExecutionError,
                    sp_data.process_package, "ubuntu", archive_root)
            finally:
                # Force tempfile to recheck TMPDIR for future tests.
                tempfile.tempdir = None
        self.assertEqual([], os.listdir(unpack_tmpdir))
コード例 #27
0
class TestDebianInstaller(TestCase):

    def setUp(self):
        super(TestDebianInstaller, self).setUp()
        self.temp_dir = self.makeTemporaryDirectory()
        self.pubconf = FakeConfig(self.temp_dir)
        self.suite = "distroseries"
        # CustomUpload.installFiles requires a umask of 022.
        old_umask = os.umask(022)
        self.addCleanup(os.umask, old_umask)

    def openArchive(self):
        self.version = "20070214ubuntu1"
        self.arch = "i386"
        self.path = os.path.join(
            self.temp_dir,
            "debian-installer-images_%s_%s.tar.gz" % (self.version, self.arch))
        self.buffer = open(self.path, "wb")
        self.archive = LaunchpadWriteTarFile(self.buffer)

    def addFile(self, path, contents):
        self.archive.add_file(
            "installer-%s/%s/%s" % (self.arch, self.version, path), contents)

    def addSymlink(self, path, target):
        self.archive.add_symlink(
            "installer-%s/%s/%s" % (self.arch, self.version, path), target)

    def process(self):
        self.archive.close()
        self.buffer.close()
        process_debian_installer(self.pubconf, self.path, self.suite)

    def getInstallerPath(self, versioned_filename=None):
        installer_path = os.path.join(
            self.temp_dir, "dists", self.suite, "main",
            "installer-%s" % self.arch)
        if versioned_filename is not None:
            installer_path = os.path.join(
                installer_path, self.version, versioned_filename)
        return installer_path

    def test_basic(self):
        # Processing a simple correct tar file succeeds.
        self.openArchive()
        self.addFile("hello", "world")
        self.process()

    def test_already_exists(self):
        # If the target directory already exists, processing fails.
        self.openArchive()
        os.makedirs(self.getInstallerPath("."))
        self.assertRaises(CustomUploadAlreadyExists, self.process)

    def test_bad_umask(self):
        # The umask must be 022 to avoid incorrect permissions.
        self.openArchive()
        self.addFile("dir/file", "foo")
        os.umask(002)  # cleanup already handled by setUp
        self.assertRaises(CustomUploadBadUmask, self.process)

    def test_current_symlink(self):
        # A "current" symlink is created to the last version.
        self.openArchive()
        self.addFile("hello", "world")
        self.process()
        installer_path = self.getInstallerPath()
        self.assertContentEqual(
            [self.version, "current"], os.listdir(installer_path))
        self.assertEqual(
            self.version, os.readlink(os.path.join(installer_path, "current")))

    def test_correct_file(self):
        # Files in the tarball are extracted correctly.
        self.openArchive()
        directory = ("images/netboot/ubuntu-installer/i386/"
                     "pxelinux.cfg.serial-9600")
        filename = os.path.join(directory, "default")
        long_filename = os.path.join(
            directory, "very_very_very_very_very_very_long_filename")
        self.addFile(filename, "hey")
        self.addFile(long_filename, "long")
        self.process()
        with open(self.getInstallerPath(filename)) as f:
            self.assertEqual("hey", f.read())
        with open(self.getInstallerPath(long_filename)) as f:
            self.assertEqual("long", f.read())

    def test_correct_symlink(self):
        # Symbolic links in the tarball are extracted correctly.
        self.openArchive()
        foo_path = "images/netboot/foo"
        foo_target = "ubuntu-installer/i386/pxelinux.cfg.serial-9600/default"
        link_to_dir_path = "images/netboot/link_to_dir"
        link_to_dir_target = "ubuntu-installer/i386/pxelinux.cfg.serial-9600"
        self.addSymlink(foo_path, foo_target)
        self.addSymlink(link_to_dir_path, link_to_dir_target)
        self.process()
        self.assertEqual(
            foo_target, os.readlink(self.getInstallerPath(foo_path)))
        self.assertEqual(
            link_to_dir_target,
            os.path.normpath(os.readlink(
                self.getInstallerPath(link_to_dir_path))))

    def test_top_level_permissions(self):
        # Top-level directories are set to mode 0755 (see bug 107068).
        self.openArchive()
        self.addFile("hello", "world")
        self.process()
        installer_path = self.getInstallerPath()
        self.assertEqual(0755, os.stat(installer_path).st_mode & 0777)
        self.assertEqual(
            0755,
            os.stat(os.path.join(installer_path, os.pardir)).st_mode & 0777)

    def test_extracted_permissions(self):
        # Extracted files and directories are set to 0644/0755.
        self.openArchive()
        directory = ("images/netboot/ubuntu-installer/i386/"
                     "pxelinux.cfg.serial-9600")
        filename = os.path.join(directory, "default")
        self.addFile(filename, "hey")
        self.process()
        self.assertEqual(
            0644, os.stat(self.getInstallerPath(filename)).st_mode & 0777)
        self.assertEqual(
            0755, os.stat(self.getInstallerPath(directory)).st_mode & 0777)

    def test_getSeriesKey_extracts_architecture(self):
        # getSeriesKey extracts the architecture from an upload's filename.
        self.openArchive()
        self.assertEqual(
            self.arch, DebianInstallerUpload.getSeriesKey(self.path))

    def test_getSeriesKey_returns_None_on_mismatch(self):
        # getSeriesKey returns None if the filename does not match the
        # expected pattern.
        self.assertIsNone(DebianInstallerUpload.getSeriesKey("argh_1.0.jpg"))

    def test_getSeriesKey_refuses_names_with_wrong_number_of_fields(self):
        # getSeriesKey requires exactly three fields.
        self.assertIsNone(DebianInstallerUpload.getSeriesKey(
            "package_1.0.tar.gz"))
        self.assertIsNone(DebianInstallerUpload.getSeriesKey(
            "one_two_three_four_5.tar.gz"))
コード例 #28
0
class TestDistUpgrader(TestCase):
    def setUp(self):
        super(TestDistUpgrader, self).setUp()
        self.temp_dir = self.makeTemporaryDirectory()
        self.pubconf = FakeConfig(self.temp_dir)
        self.suite = "distroseries"
        # CustomUpload.installFiles requires a umask of 022.
        old_umask = os.umask(022)
        self.addCleanup(os.umask, old_umask)

    def openArchive(self, version):
        self.path = os.path.join(self.temp_dir, "dist-upgrader_%s_all.tar.gz" % version)
        self.buffer = open(self.path, "wb")
        self.archive = LaunchpadWriteTarFile(self.buffer)

    def process(self):
        self.archive.close()
        self.buffer.close()
        process_dist_upgrader(self.pubconf, self.path, self.suite)

    def getUpgraderPath(self):
        return os.path.join(self.temp_dir, "dists", self.suite, "main", "dist-upgrader-all")

    def test_basic(self):
        # Processing a simple correct tar file works.
        self.openArchive("20060302.0120")
        self.archive.add_file("20060302.0120/hello", "world")
        self.process()

    def test_already_exists(self):
        # If the target directory already exists, processing fails.
        self.openArchive("20060302.0120")
        self.archive.add_file("20060302.0120/hello", "world")
        os.makedirs(os.path.join(self.getUpgraderPath(), "20060302.0120"))
        self.assertRaises(CustomUploadAlreadyExists, self.process)

    def test_bad_umask(self):
        # The umask must be 022 to avoid incorrect permissions.
        self.openArchive("20060302.0120")
        self.archive.add_file("20060302.0120/file", "foo")
        os.umask(002)  # cleanup already handled by setUp
        self.assertRaises(CustomUploadBadUmask, self.process)

    def test_current_symlink(self):
        # A "current" symlink is created to the last version.
        self.openArchive("20060302.0120")
        self.archive.add_file("20060302.0120/hello", "world")
        self.process()
        upgrader_path = self.getUpgraderPath()
        self.assertContentEqual(["20060302.0120", "current"], os.listdir(upgrader_path))
        self.assertEqual("20060302.0120", os.readlink(os.path.join(upgrader_path, "current")))
        self.assertContentEqual(["hello"], os.listdir(os.path.join(upgrader_path, "20060302.0120")))

    def test_bad_version(self):
        # Bad versions in the tarball are refused.
        self.openArchive("20070219.1234")
        self.archive.add_file("foobar/foobar/dapper.tar.gz", "")
        self.assertRaises(DistUpgraderBadVersion, self.process)

    def test_getSeriesKey_extracts_architecture(self):
        # getSeriesKey extracts the architecture from an upload's filename.
        self.openArchive("20060302.0120")
        self.assertEqual("all", DistUpgraderUpload.getSeriesKey(self.path))

    def test_getSeriesKey_returns_None_on_mismatch(self):
        # getSeriesKey returns None if the filename does not match the
        # expected pattern.
        self.assertIsNone(DistUpgraderUpload.getSeriesKey("argh_1.0.jpg"))

    def test_getSeriesKey_refuses_names_with_wrong_number_of_fields(self):
        # getSeriesKey requires exactly three fields.
        self.assertIsNone(DistUpgraderUpload.getSeriesKey("package_1.0.tar.gz"))
        self.assertIsNone(DistUpgraderUpload.getSeriesKey("one_two_three_four_5.tar.gz"))
コード例 #29
0
 def openArchive(self, loader_type, version, arch):
     self.path = os.path.join(
         self.temp_dir, "%s_%s_%s.tar.gz" % (loader_type, version, arch))
     self.buffer = open(self.path, "wb")
     self.archive = LaunchpadWriteTarFile(self.buffer)
コード例 #30
0
 def openArchive(self, version):
     self.path = os.path.join(self.temp_dir,
                              "translations_main_%s.tar.gz" % version)
     self.buffer = open(self.path, "wb")
     self.tarfile = LaunchpadWriteTarFile(self.buffer)
コード例 #31
0
 def openArchive(self, version):
     self.path = os.path.join(self.temp_dir, "dist-upgrader_%s_all.tar.gz" % version)
     self.buffer = open(self.path, "wb")
     self.archive = LaunchpadWriteTarFile(self.buffer)
コード例 #32
0
class TestDebianInstaller(RunPartsMixin, TestCaseWithFactory):

    layer = ZopelessDatabaseLayer

    def setUp(self):
        super(TestDebianInstaller, self).setUp()
        self.temp_dir = self.makeTemporaryDirectory()
        self.distro = self.factory.makeDistribution()
        db_pubconf = getUtility(IPublisherConfigSet).getByDistribution(
            self.distro)
        db_pubconf.root_dir = unicode(self.temp_dir)
        self.archive = self.factory.makeArchive(distribution=self.distro,
                                                purpose=ArchivePurpose.PRIMARY)
        self.suite = "distroseries"
        # CustomUpload.installFiles requires a umask of 0o022.
        old_umask = os.umask(0o022)
        self.addCleanup(os.umask, old_umask)

    def openArchive(self):
        self.version = "20070214ubuntu1"
        self.arch = "i386"
        self.path = os.path.join(
            self.temp_dir,
            "debian-installer-images_%s_%s.tar.gz" % (self.version, self.arch))
        self.buffer = open(self.path, "wb")
        self.tarfile = LaunchpadWriteTarFile(self.buffer)

    def addFile(self, path, contents):
        self.tarfile.add_file(
            "installer-%s/%s/%s" % (self.arch, self.version, path), contents)

    def addSymlink(self, path, target):
        self.tarfile.add_symlink(
            "installer-%s/%s/%s" % (self.arch, self.version, path), target)

    def process(self):
        self.tarfile.close()
        self.buffer.close()
        DebianInstallerUpload().process(self.archive, self.path, self.suite)

    def getInstallerPath(self, versioned_filename=None):
        pubconf = getPubConfig(self.archive)
        installer_path = os.path.join(pubconf.archiveroot, "dists", self.suite,
                                      "main", "installer-%s" % self.arch)
        if versioned_filename is not None:
            installer_path = os.path.join(installer_path, self.version,
                                          versioned_filename)
        return installer_path

    def test_basic(self):
        # Processing a simple correct tar file succeeds.
        self.openArchive()
        self.addFile("hello", b"world")
        self.process()

    def test_already_exists(self):
        # If the target directory already exists, processing fails.
        self.openArchive()
        os.makedirs(self.getInstallerPath("."))
        self.assertRaises(CustomUploadAlreadyExists, self.process)

    def test_bad_umask(self):
        # The umask must be 0o022 to avoid incorrect permissions.
        self.openArchive()
        self.addFile("dir/file", b"foo")
        os.umask(0o002)  # cleanup already handled by setUp
        self.assertRaises(CustomUploadBadUmask, self.process)

    def test_current_symlink(self):
        # A "current" symlink is created to the last version.
        self.openArchive()
        self.addFile("hello", b"world")
        self.process()
        installer_path = self.getInstallerPath()
        self.assertContentEqual([self.version, "current"],
                                os.listdir(installer_path))
        self.assertEqual(self.version,
                         os.readlink(os.path.join(installer_path, "current")))

    def test_correct_file(self):
        # Files in the tarball are extracted correctly.
        self.openArchive()
        directory = ("images/netboot/ubuntu-installer/i386/"
                     "pxelinux.cfg.serial-9600")
        filename = os.path.join(directory, "default")
        long_filename = os.path.join(
            directory, "very_very_very_very_very_very_long_filename")
        self.addFile(filename, b"hey")
        self.addFile(long_filename, b"long")
        self.process()
        with open(self.getInstallerPath(filename)) as f:
            self.assertEqual("hey", f.read())
        with open(self.getInstallerPath(long_filename)) as f:
            self.assertEqual("long", f.read())

    def test_correct_symlink(self):
        # Symbolic links in the tarball are extracted correctly.
        self.openArchive()
        foo_path = "images/netboot/foo"
        foo_target = "ubuntu-installer/i386/pxelinux.cfg.serial-9600/default"
        link_to_dir_path = "images/netboot/link_to_dir"
        link_to_dir_target = "ubuntu-installer/i386/pxelinux.cfg.serial-9600"
        self.addSymlink(foo_path, foo_target)
        self.addSymlink(link_to_dir_path, link_to_dir_target)
        self.process()
        self.assertEqual(foo_target,
                         os.readlink(self.getInstallerPath(foo_path)))
        self.assertEqual(
            link_to_dir_target,
            os.path.normpath(
                os.readlink(self.getInstallerPath(link_to_dir_path))))

    def test_top_level_permissions(self):
        # Top-level directories are set to mode 0o755 (see bug 107068).
        self.openArchive()
        self.addFile("hello", b"world")
        self.process()
        installer_path = self.getInstallerPath()
        self.assertEqual(0o755, os.stat(installer_path).st_mode & 0o777)
        self.assertEqual(
            0o755,
            os.stat(os.path.join(installer_path, os.pardir)).st_mode & 0o777)

    def test_extracted_permissions(self):
        # Extracted files and directories are set to 0o644/0o755.
        self.openArchive()
        directory = ("images/netboot/ubuntu-installer/i386/"
                     "pxelinux.cfg.serial-9600")
        filename = os.path.join(directory, "default")
        self.addFile(filename, b"hey")
        self.process()
        self.assertEqual(
            0o644,
            os.stat(self.getInstallerPath(filename)).st_mode & 0o777)
        self.assertEqual(
            0o755,
            os.stat(self.getInstallerPath(directory)).st_mode & 0o777)

    def test_sign_with_external_run_parts(self):
        self.enableRunParts(distribution_name=self.distro.name)
        with open(
                os.path.join(self.parts_directory, self.distro.name, "sign.d",
                             "10-sign"), "w") as f:
            f.write(
                dedent("""\
                #! /bin/sh
                touch "$OUTPUT_PATH"
                """))
            os.fchmod(f.fileno(), 0o755)
        self.openArchive()
        self.addFile("images/list", "a list")
        self.addFile("images/SHA256SUMS", "a checksum")
        self.process()
        self.assertThat(self.getInstallerPath("images"),
                        DirContains(["list", "SHA256SUMS", "SHA256SUMS.gpg"]))

    def test_getSeriesKey_extracts_architecture(self):
        # getSeriesKey extracts the architecture from an upload's filename.
        self.openArchive()
        self.assertEqual(self.arch,
                         DebianInstallerUpload.getSeriesKey(self.path))

    def test_getSeriesKey_returns_None_on_mismatch(self):
        # getSeriesKey returns None if the filename does not match the
        # expected pattern.
        self.assertIsNone(DebianInstallerUpload.getSeriesKey("argh_1.0.jpg"))

    def test_getSeriesKey_refuses_names_with_wrong_number_of_fields(self):
        # getSeriesKey requires exactly three fields.
        self.assertIsNone(
            DebianInstallerUpload.getSeriesKey("package_1.0.tar.gz"))
        self.assertIsNone(
            DebianInstallerUpload.getSeriesKey("one_two_three_four_5.tar.gz"))
コード例 #33
0
ファイル: test_gina.py プロジェクト: pombredanne/launchpad-3
    def test_unpack_dsc_with_vendor(self):
        # Some source packages unpack differently depending on dpkg's idea
        # of the "vendor", and in extreme cases may even fail with some
        # vendors.  gina always sets the vendor to the target distribution
        # name to ensure that it unpacks packages as if unpacking on that
        # distribution.
        archive_root = self.useTempDir()
        pool_dir = os.path.join(archive_root, "pool/main/f/foo")
        os.makedirs(pool_dir)

        # Synthesise a package that can be unpacked with DEB_VENDOR=debian
        # but not with DEB_VENDOR=ubuntu.
        with open(os.path.join(
            pool_dir, "foo_1.0.orig.tar.gz"), "wb+") as buffer:
            orig_tar = LaunchpadWriteTarFile(buffer)
            orig_tar.add_directory("foo-1.0")
            orig_tar.close()
            buffer.seek(0)
            orig_tar_contents = buffer.read()
        with open(os.path.join(
            pool_dir, "foo_1.0-1.debian.tar.gz"), "wb+") as buffer:
            debian_tar = LaunchpadWriteTarFile(buffer)
            debian_tar.add_file("debian/source/format", "3.0 (quilt)\n")
            debian_tar.add_file(
                "debian/patches/ubuntu.series", "--- corrupt patch\n")
            debian_tar.add_file("debian/rules", "")
            debian_tar.close()
            buffer.seek(0)
            debian_tar_contents = buffer.read()
        dsc_path = os.path.join(pool_dir, "foo_1.0-1.dsc")
        with open(dsc_path, "w") as dsc:
            dsc.write(dedent("""\
                Format: 3.0 (quilt)
                Source: foo
                Binary: foo
                Architecture: all
                Version: 1.0-1
                Maintainer: Foo Bar <*****@*****.**>
                Files:
                 %s %s foo_1.0.orig.tar.gz
                 %s %s foo_1.0-1.debian.tar.gz
                """ % (
                    hashlib.md5(orig_tar_contents).hexdigest(),
                    len(orig_tar_contents),
                    hashlib.md5(debian_tar_contents).hexdigest(),
                    len(debian_tar_contents))))

        dsc_contents = parse_tagfile(dsc_path)
        dsc_contents["Directory"] = pool_dir
        dsc_contents["Package"] = "foo"
        dsc_contents["Component"] = "main"
        dsc_contents["Section"] = "misc"

        sp_data = SourcePackageData(**dsc_contents)
        # Unpacking this in an Ubuntu context fails.
        self.assertRaises(
            ExecutionError, sp_data.do_package, "ubuntu", archive_root)
        self.assertFalse(os.path.exists("foo-1.0"))
        # But all is well in a Debian context.
        sp_data.do_package("debian", archive_root)
        self.assertFalse(os.path.exists("foo-1.0"))
コード例 #34
0
class TestDdtpTarball(TestCaseWithFactory):

    layer = ZopelessDatabaseLayer

    def setUp(self):
        super(TestDdtpTarball, self).setUp()
        self.temp_dir = self.makeTemporaryDirectory()
        self.distro = self.factory.makeDistribution()
        db_pubconf = getUtility(IPublisherConfigSet).getByDistribution(
            self.distro)
        db_pubconf.root_dir = unicode(self.temp_dir)
        self.archive = self.factory.makeArchive(distribution=self.distro,
                                                purpose=ArchivePurpose.PRIMARY)
        self.suite = "distroseries"
        # CustomUpload.installFiles requires a umask of 0o022.
        old_umask = os.umask(0o022)
        self.addCleanup(os.umask, old_umask)

    def openArchive(self, version):
        self.path = os.path.join(self.temp_dir,
                                 "translations_main_%s.tar.gz" % version)
        self.buffer = open(self.path, "wb")
        self.tarfile = LaunchpadWriteTarFile(self.buffer)

    def process(self):
        self.tarfile.close()
        self.buffer.close()
        DdtpTarballUpload().process(self.archive, self.path, self.suite)

    def getTranslationsPath(self, filename):
        pubconf = getPubConfig(self.archive)
        return os.path.join(pubconf.archiveroot, "dists", self.suite, "main",
                            "i18n", filename)

    def test_basic(self):
        # Processing a simple correct tar file works.
        self.openArchive("20060728")
        self.tarfile.add_file("i18n/Translation-de", b"")
        self.process()
        self.assertTrue(
            os.path.exists(self.getTranslationsPath("Translation-de")))

    def test_ignores_empty_directories(self):
        # Empty directories in the tarball are not extracted.
        self.openArchive("20060728")
        self.tarfile.add_file("i18n/Translation-de", b"")
        self.tarfile.add_directory("i18n/foo")
        self.process()
        self.assertTrue(
            os.path.exists(self.getTranslationsPath("Translation-de")))
        self.assertFalse(os.path.exists(self.getTranslationsPath("foo")))

    def test_partial_update(self):
        # If a DDTP tarball only contains a subset of published translation
        # files, these are updated and the rest are left untouched.
        self.openArchive("20060728")
        self.tarfile.add_file("i18n/Translation-bn", b"bn")
        self.tarfile.add_file("i18n/Translation-ca", b"ca")
        self.process()
        with open(self.getTranslationsPath("Translation-bn")) as bn_file:
            self.assertEqual("bn", bn_file.read())
        with open(self.getTranslationsPath("Translation-ca")) as ca_file:
            self.assertEqual("ca", ca_file.read())
        self.openArchive("20060817")
        self.tarfile.add_file("i18n/Translation-bn", b"new bn")
        self.process()
        with open(self.getTranslationsPath("Translation-bn")) as bn_file:
            self.assertEqual("new bn", bn_file.read())
        with open(self.getTranslationsPath("Translation-ca")) as ca_file:
            self.assertEqual("ca", ca_file.read())

    def test_breaks_hard_links(self):
        # Our archive uses dsync to replace identical files with hard links
        # in order to save some space.  tarfile.extract overwrites
        # pre-existing files rather than creating new files and moving them
        # into place, so making this work requires special care.  Test that
        # that care has been taken.
        self.openArchive("20060728")
        self.tarfile.add_file("i18n/Translation-ca", b"")
        self.process()
        ca = self.getTranslationsPath("Translation-ca")
        bn = self.getTranslationsPath("Translation-bn")
        os.link(ca, bn)
        self.assertTrue(os.path.exists(bn))
        self.assertEqual(2, os.stat(bn).st_nlink)
        self.assertEqual(2, os.stat(ca).st_nlink)
        self.openArchive("20060817")
        self.tarfile.add_file("i18n/Translation-bn", b"break hard link")
        self.process()
        with open(bn) as bn_file:
            self.assertEqual("break hard link", bn_file.read())
        with open(ca) as ca_file:
            self.assertEqual("", ca_file.read())
        self.assertEqual(1, os.stat(bn).st_nlink)
        self.assertEqual(1, os.stat(ca).st_nlink)

    def test_parsePath_handles_underscore_in_directory(self):
        # parsePath is not misled by an underscore in the directory name.
        self.assertEqual(
            # XXX cjwatson 2012-07-03: .tar.gz is not stripped off the end
            # of the version due to something of an ambiguity in the design;
            # how should translations_main_1.0.1.tar.gz be parsed?  In
            # practice this doesn't matter because DdtpTarballUpload never
            # uses the version for anything.
            ("translations", "main", "1.tar.gz"),
            DdtpTarballUpload.parsePath(
                "/dir_with_underscores/translations_main_1.tar.gz"))

    def test_getSeriesKey_extracts_component(self):
        # getSeriesKey extracts the component from an upload's filename.
        self.openArchive("20060728")
        self.assertEqual("main", DdtpTarballUpload.getSeriesKey(self.path))

    def test_getSeriesKey_returns_None_on_mismatch(self):
        # getSeriesKey returns None if the filename does not match the
        # expected pattern.
        self.assertIsNone(DdtpTarballUpload.getSeriesKey("argh_1.0.jpg"))

    def test_getSeriesKey_refuses_names_with_wrong_number_of_fields(self):
        # getSeriesKey requires exactly three fields.
        self.assertIsNone(DdtpTarballUpload.getSeriesKey("package_1.0.tar.gz"))
        self.assertIsNone(
            DdtpTarballUpload.getSeriesKey("one_two_three_four_5.tar.gz"))
コード例 #35
0
def export(distroseries, component, update, force_utf8, logger):
    """Return a pair containing a filehandle from which the distribution's
    translations tarball can be read and the size of the tarball in bytes.

    :arg distroseries: The `IDistroSeries` we want to export from.
    :arg component: The component name from the given distribution series.
    :arg update: Whether the export should be an update from the last export.
    :arg force_utf8: Whether the export should have all files exported as
        UTF-8.
    :arg logger: A logger object.
    """
    # We will need when the export started later to add the timestamp for this
    # export inside the exported tarball.
    start_date = datetime.datetime.utcnow().strftime('%Y%m%d')
    export_set = getUtility(IVPOExportSet)

    logger.debug("Selecting PO files for export")

    date = None
    if update:
        # Get the export date for the current base language pack.
        date = distroseries.language_pack_base.date_exported

    pofile_count = export_set.get_distroseries_pofiles_count(
        distroseries, date, component, languagepack=True)
    logger.info("Number of PO files to export: %d" % pofile_count)

    filehandle = tempfile.TemporaryFile()
    archive = LaunchpadWriteTarFile(filehandle)

    # XXX JeroenVermeulen 2008-02-06: Is there anything here that we can unify
    # with the export-queue code?
    xpi_templates_to_export = set()
    path_prefix = 'rosetta-%s' % distroseries.name

    pofiles = export_set.get_distroseries_pofiles(
        distroseries, date, component, languagepack=True)

    # Manual caching.  Fetch POTMsgSets in bulk per template, and cache
    # them across POFiles if subsequent POFiles belong to the same
    # template.
    cached_potemplate = None
    cached_potmsgsets = []

    for index, pofile in enumerate(pofiles):
        number = index + 1
        logger.debug("Exporting PO file %d (%d/%d)" %
            (pofile.id, number, pofile_count))

        potemplate = pofile.potemplate
        if potemplate != cached_potemplate:
            # Launchpad's StupidCache caches absolutely everything,
            # which causes us to run out of memory.  We know at this
            # point that we don't have useful references to potemplate's
            # messages anymore, so remove them forcibly from the cache.
            store = Store.of(potemplate)
            for potmsgset in cached_potmsgsets:
                store.invalidate(potmsgset.msgid_singular)
                store.invalidate(potmsgset)

            # Commit a transaction with every PO template and its
            # PO files exported so we don't keep it open for too long.
            transaction.commit()

            cached_potemplate = potemplate
            cached_potmsgsets = [
                potmsgset for potmsgset in potemplate.getPOTMsgSets()]

            if ((index + 1) % 5) == 0:
                # Garbage-collect once in 5 templates (but not at the
                # very beginning).  Bit too expensive to do for each
                # one.
                gc.collect()

        domain = potemplate.translation_domain.encode('ascii')
        code = pofile.getFullLanguageCode().encode('UTF-8')

        if potemplate.source_file_format == TranslationFileFormat.XPI:
            xpi_templates_to_export.add(potemplate)
            path = os.path.join(
                path_prefix, 'xpi', domain, '%s.po' % code)
        else:
            path = os.path.join(
                path_prefix, code, 'LC_MESSAGES', '%s.po' % domain)

        try:
            # We don't want obsolete entries here, it makes no sense for a
            # language pack.
            contents = pofile.export(
                ignore_obsolete=True, force_utf8=force_utf8)

            # Store it in the tarball.
            archive.add_file(path, contents)
        except:
            logger.exception(
                "Uncaught exception while exporting PO file %d" % pofile.id)

        store.invalidate(pofile)

    logger.info("Exporting XPI template files.")
    librarian_client = getUtility(ILibrarianClient)
    for template in xpi_templates_to_export:
        if template.source_file is None:
            logger.warning(
                "%s doesn't have source file registered." % potemplate.title)
            continue
        domain = template.translation_domain.encode('ascii')
        archive.add_file(
            os.path.join(path_prefix, 'xpi', domain, 'en-US.xpi'),
            librarian_client.getFileByAlias(
                template.source_file.id).read())

    logger.info("Adding timestamp file")
    # Is important that the timestamp contain the date when the export
    # started, not when it finished because that notes how old is the
    # information the export contains.
    archive.add_file(
        'rosetta-%s/timestamp.txt' % distroseries.name, '%s\n' % start_date)

    logger.info("Adding mapping file")
    mapping_text = ''
    mapping = iter_sourcepackage_translationdomain_mapping(distroseries)
    for sourcepackagename, translationdomain in mapping:
        mapping_text += "%s %s\n" % (sourcepackagename, translationdomain)
    archive.add_file(
        'rosetta-%s/mapping.txt' % distroseries.name, mapping_text)

    logger.info("Done.")

    archive.close()
    size = filehandle.tell()
    filehandle.seek(0)

    return filehandle, size