def test_unpack_dsc_with_vendor(self): # Some source packages unpack differently depending on dpkg's idea # of the "vendor", and in extreme cases may even fail with some # vendors. gina always sets the vendor to the target distribution # name to ensure that it unpacks packages as if unpacking on that # distribution. archive_root = self.useTempDir() pool_dir = os.path.join(archive_root, "pool/main/f/foo") os.makedirs(pool_dir) # Synthesise a package that can be unpacked with DEB_VENDOR=debian # but not with DEB_VENDOR=ubuntu. with open(os.path.join(pool_dir, "foo_1.0.orig.tar.gz"), "wb+") as buffer: orig_tar = LaunchpadWriteTarFile(buffer) orig_tar.add_directory("foo-1.0") orig_tar.close() buffer.seek(0) orig_tar_contents = buffer.read() with open(os.path.join(pool_dir, "foo_1.0-1.debian.tar.gz"), "wb+") as buffer: debian_tar = LaunchpadWriteTarFile(buffer) debian_tar.add_file("debian/source/format", "3.0 (quilt)\n") debian_tar.add_file("debian/patches/ubuntu.series", "--- corrupt patch\n") debian_tar.add_file("debian/rules", "") debian_tar.close() buffer.seek(0) debian_tar_contents = buffer.read() dsc_path = os.path.join(pool_dir, "foo_1.0-1.dsc") with open(dsc_path, "w") as dsc: dsc.write( dedent("""\ Format: 3.0 (quilt) Source: foo Binary: foo Architecture: all Version: 1.0-1 Maintainer: Foo Bar <*****@*****.**> Files: %s %s foo_1.0.orig.tar.gz %s %s foo_1.0-1.debian.tar.gz """ % (hashlib.md5(orig_tar_contents).hexdigest(), len(orig_tar_contents), hashlib.md5(debian_tar_contents).hexdigest(), len(debian_tar_contents)))) dsc_contents = parse_tagfile(dsc_path) dsc_contents["Directory"] = pool_dir dsc_contents["Package"] = "foo" dsc_contents["Component"] = "main" dsc_contents["Section"] = "misc" sp_data = SourcePackageData(**dsc_contents) # Unpacking this in an Ubuntu context fails. self.assertRaises(ExecutionError, sp_data.do_package, "ubuntu", archive_root) # But all is well in a Debian context. sp_data.do_package("debian", archive_root)
def test_process_package_cleans_up_after_unpack_failure(self): archive_root = self.useTempDir() pool_dir = os.path.join(archive_root, "pool/main/f/foo") os.makedirs(pool_dir) with open(os.path.join( pool_dir, "foo_1.0.orig.tar.gz"), "wb+") as buffer: orig_tar = LaunchpadWriteTarFile(buffer) orig_tar.add_directory("foo-1.0") orig_tar.close() buffer.seek(0) orig_tar_contents = buffer.read() with open(os.path.join( pool_dir, "foo_1.0-1.debian.tar.gz"), "wb+") as buffer: debian_tar = LaunchpadWriteTarFile(buffer) debian_tar.add_file("debian/source/format", "3.0 (quilt)\n") debian_tar.add_file("debian/patches/series", "--- corrupt patch\n") debian_tar.add_file("debian/rules", "") debian_tar.close() buffer.seek(0) debian_tar_contents = buffer.read() dsc_path = os.path.join(pool_dir, "foo_1.0-1.dsc") with open(dsc_path, "w") as dsc: dsc.write(dedent("""\ Format: 3.0 (quilt) Source: foo Binary: foo Architecture: all Version: 1.0-1 Maintainer: Foo Bar <*****@*****.**> Files: %s %s foo_1.0.orig.tar.gz %s %s foo_1.0-1.debian.tar.gz """ % ( hashlib.md5(orig_tar_contents).hexdigest(), len(orig_tar_contents), hashlib.md5(debian_tar_contents).hexdigest(), len(debian_tar_contents)))) dsc_contents = parse_tagfile(dsc_path) dsc_contents["Directory"] = pool_dir dsc_contents["Package"] = "foo" dsc_contents["Component"] = "main" dsc_contents["Section"] = "misc" sp_data = SourcePackageData(**dsc_contents) unpack_tmpdir = self.makeTemporaryDirectory() with EnvironmentVariableFixture("TMPDIR", unpack_tmpdir): # Force tempfile to recheck TMPDIR. tempfile.tempdir = None try: self.assertRaises( ExecutionError, sp_data.process_package, "ubuntu", archive_root) finally: # Force tempfile to recheck TMPDIR for future tests. tempfile.tempdir = None self.assertEqual([], os.listdir(unpack_tmpdir))
def openArchive(self): self.version = "20070214ubuntu1" self.arch = "i386" self.path = os.path.join( self.temp_dir, "debian-installer-images_%s_%s.tar.gz" % (self.version, self.arch)) self.buffer = open(self.path, "wb") self.archive = LaunchpadWriteTarFile(self.buffer)
def __init__(self, single_file_storage=None): """Initialze empty storage strategy, or subsume single-file one.""" self.buffer = tempfile.TemporaryFile() self.tar_writer = LaunchpadWriteTarFile(self.buffer) if single_file_storage is not None: self.addFile(single_file_storage.path, single_file_storage.extension, single_file_storage.content, single_file_storage.mime_type)
def openArchive(self, version): self.path = os.path.join(self.temp_dir, "dist-upgrader_%s_all.tar.gz" % version) self.buffer = open(self.path, "wb") self.tarfile = LaunchpadWriteTarFile(self.buffer)
def openArchive(self, version): self.path = os.path.join(self.temp_dir, "translations_main_%s.tar.gz" % version) self.buffer = open(self.path, "wb") self.tarfile = LaunchpadWriteTarFile(self.buffer)
def openArchive(self, loader_type, version, arch): self.path = os.path.join( self.temp_dir, "%s_%s_%s.tar.gz" % (loader_type, version, arch)) self.buffer = open(self.path, "wb") self.archive = LaunchpadWriteTarFile(self.buffer)
def export(distroseries, component, update, force_utf8, logger): """Return a pair containing a filehandle from which the distribution's translations tarball can be read and the size of the tarball in bytes. :arg distroseries: The `IDistroSeries` we want to export from. :arg component: The component name from the given distribution series. :arg update: Whether the export should be an update from the last export. :arg force_utf8: Whether the export should have all files exported as UTF-8. :arg logger: A logger object. """ # We will need when the export started later to add the timestamp for this # export inside the exported tarball. start_date = datetime.datetime.utcnow().strftime('%Y%m%d') export_set = getUtility(IVPOExportSet) logger.debug("Selecting PO files for export") date = None if update: # Get the export date for the current base language pack. date = distroseries.language_pack_base.date_exported pofile_count = export_set.get_distroseries_pofiles_count(distroseries, date, component, languagepack=True) logger.info("Number of PO files to export: %d" % pofile_count) filehandle = tempfile.TemporaryFile() archive = LaunchpadWriteTarFile(filehandle) # XXX JeroenVermeulen 2008-02-06: Is there anything here that we can unify # with the export-queue code? xpi_templates_to_export = set() path_prefix = 'rosetta-%s' % distroseries.name pofiles = export_set.get_distroseries_pofiles(distroseries, date, component, languagepack=True) # Manual caching. Fetch POTMsgSets in bulk per template, and cache # them across POFiles if subsequent POFiles belong to the same # template. cached_potemplate = None cached_potmsgsets = [] for index, pofile in enumerate(pofiles): number = index + 1 logger.debug("Exporting PO file %d (%d/%d)" % (pofile.id, number, pofile_count)) potemplate = pofile.potemplate if potemplate != cached_potemplate: # Launchpad's StupidCache caches absolutely everything, # which causes us to run out of memory. We know at this # point that we don't have useful references to potemplate's # messages anymore, so remove them forcibly from the cache. store = Store.of(potemplate) for potmsgset in cached_potmsgsets: store.invalidate(potmsgset.msgid_singular) store.invalidate(potmsgset) # Commit a transaction with every PO template and its # PO files exported so we don't keep it open for too long. transaction.commit() cached_potemplate = potemplate cached_potmsgsets = [ potmsgset for potmsgset in potemplate.getPOTMsgSets() ] if ((index + 1) % 5) == 0: # Garbage-collect once in 5 templates (but not at the # very beginning). Bit too expensive to do for each # one. gc.collect() domain = potemplate.translation_domain.encode('ascii') code = pofile.getFullLanguageCode().encode('UTF-8') if potemplate.source_file_format == TranslationFileFormat.XPI: xpi_templates_to_export.add(potemplate) path = os.path.join(path_prefix, 'xpi', domain, '%s.po' % code) else: path = os.path.join(path_prefix, code, 'LC_MESSAGES', '%s.po' % domain) try: # We don't want obsolete entries here, it makes no sense for a # language pack. contents = pofile.export(ignore_obsolete=True, force_utf8=force_utf8) # Store it in the tarball. archive.add_file(path, contents) except: logger.exception("Uncaught exception while exporting PO file %d" % pofile.id) store.invalidate(pofile) logger.info("Exporting XPI template files.") librarian_client = getUtility(ILibrarianClient) for template in xpi_templates_to_export: if template.source_file is None: logger.warning("%s doesn't have source file registered." % potemplate.title) continue domain = template.translation_domain.encode('ascii') archive.add_file( os.path.join(path_prefix, 'xpi', domain, 'en-US.xpi'), librarian_client.getFileByAlias(template.source_file.id).read()) logger.info("Adding timestamp file") # Is important that the timestamp contain the date when the export # started, not when it finished because that notes how old is the # information the export contains. archive.add_file('rosetta-%s/timestamp.txt' % distroseries.name, '%s\n' % start_date) logger.info("Adding mapping file") mapping_text = '' mapping = iter_sourcepackage_translationdomain_mapping(distroseries) for sourcepackagename, translationdomain in mapping: mapping_text += "%s %s\n" % (sourcepackagename, translationdomain) archive.add_file('rosetta-%s/mapping.txt' % distroseries.name, mapping_text) logger.info("Done.") archive.close() size = filehandle.tell() filehandle.seek(0) return filehandle, size