Exemplo n.º 1
0
def sync_local_mirror(config, multipidfile_state):
    if config["CDIMAGE_NOSYNC"]:
        return

    capproject = config.capproject
    sync_lock = os.path.join(config.root, "etc", ".lock-archive-sync")
    if not multipidfile_state:
        log_marker("Syncing %s mirror" % capproject)
        # Acquire lock to allow parallel builds to ensure a consistent
        # archive.
        try:
            subprocess.check_call(["lockfile", "-r", "4", sync_lock])
        except subprocess.CalledProcessError:
            logger.error("Couldn't acquire archive sync lock!")
            raise
        try:
            anonftpsync(config)
        finally:
            osextras.unlink_force(sync_lock)
    else:
        log_marker(
            "Parallel build; waiting for %s mirror to sync" % capproject)
        try:
            subprocess.check_call(["lockfile", "-8", "-r", "450", sync_lock])
        except subprocess.CalledProcessError:
            logger.error("Timed out waiting for archive sync lock!")
            raise
        osextras.unlink_force(sync_lock)
Exemplo n.º 2
0
 def _write(self, pids):
     # Must be called within context manager lock.
     assert os.path.exists(self.lock_path), (
         "Called _write on %s without locking!" % self)
     if pids:
         with open(self.path, "w") as fd:
             for pid in sorted(pids):
                 print(pid, file=fd)
     else:
         osextras.unlink_force(self.path)
Exemplo n.º 3
0
def sign_cdimage(config, path):
    if not can_sign(config):
        return False

    with open(path, "rb") as infile:
        with open("%s.gpg" % path, "wb") as outfile:
            try:
                subprocess.check_call(_signing_command(config),
                                      stdin=infile,
                                      stdout=outfile)
            except subprocess.CalledProcessError:
                osextras.unlink_force("%s.gpg" % path)
                raise
    return True
Exemplo n.º 4
0
def lock_build_image_set(config):
    project = config.project
    if config["UBUNTU_DEFAULTS_LOCALE"] == "zh_CN":
        project = "ubuntu-chinese-edition"
    if config.distribution == "ubuntu":
        full_series = config.series
    else:
        full_series = "%s-%s" % (config.distribution, config.series)
    lock_path = os.path.join(
        config.root, "etc", ".lock-build-image-set-%s-%s-%s" %
        (project, full_series, config.image_type))
    try:
        subprocess.check_call(["lockfile", "-l", "7200", "-r", "0", lock_path])
    except subprocess.CalledProcessError:
        logger.error("Another image set is already building!")
        raise
    try:
        yield
    finally:
        osextras.unlink_force(lock_path)
Exemplo n.º 5
0
    def publish_source(self, date):
        for i in count(1):
            in_prefix = "%s-src-%d" % (self.config.series, i)
            out_prefix = "%s-src-%d" % (self.config.series, i)
            source_dir = os.path.join(self.image_output, "src")
            source_prefix = os.path.join(source_dir, in_prefix)
            target_dir = os.path.join(self.publish_base, date, "source")
            target_prefix = os.path.join(target_dir, out_prefix)
            if not os.path.exists("%s.raw" % source_prefix):
                break

            logger.info("Publishing source %d ..." % i)
            osextras.ensuredir(target_dir)
            shutil.move("%s.raw" % source_prefix, "%s.iso" % target_prefix)
            shutil.move("%s.list" % source_prefix, "%s.list" % target_prefix)
            with ChecksumFileSet(
                self.config, target_dir, sign=False) as checksum_files:
                checksum_files.remove("%s.iso" % out_prefix)

            # Jigdo integration
            if os.path.exists("%s.jigdo" % source_prefix):
                logger.info("Publishing source %d jigdo ..." % i)
                shutil.move(
                    "%s.jigdo" % source_prefix, "%s.jigdo" % target_prefix)
                shutil.move(
                    "%s.template" % source_prefix,
                    "%s.template" % target_prefix)
            else:
                logger.warning("No jigdo for source %d!" % i)
                osextras.unlink_force("%s.jigdo" % target_prefix)
                osextras.unlink_force("%s.template" % target_prefix)

            # zsync metafiles
            if self.try_zsyncmake and osextras.find_on_path("zsyncmake"):
                logger.info("Making source %d zsync metafile ..." % i)
                osextras.unlink_force("%s.iso.zsync" % target_prefix)
                zsyncmake(
                    "%s.iso" % target_prefix, "%s.iso.zsync" % target_prefix,
                    "%s.iso" % out_prefix)

            yield os.path.join(
                self.project, self.image_type, "%s-src" % self.config.series)
Exemplo n.º 6
0
def anonftpsync(config):
    env = dict(os.environ)
    for key, value in _anonftpsync_options(config).items():
        env[key] = value
    target = os.path.join(config.root, "ftp")
    fqdn = socket.getfqdn()
    lock_base = "Archive-Update-in-Progress-%s" % fqdn
    lock = os.path.join(target, lock_base)
    pkglist = "--include-from=" + config["RSYNC_PKGLIST_PATH"]
    if subprocess.call(
            ["lockfile", "-!", "-l", "43200", "-r", "0", lock]) == 0:
        raise Exception(
            "%s is unable to start rsync; lock file exists." % fqdn)
    try:
        log_path = os.path.join(config.root, "log", "rsync.log")
        osextras.ensuredir(os.path.dirname(log_path))
        with open(log_path, "w") as log:
            command_base = [
                "rsync", "--recursive", "--links", "--hard-links", "--times",
                "--verbose", "--stats", "--chmod=Dg+s,g+rwX",
                pkglist,
                "--exclude", lock_base,
                "--exclude", "project/trace/%s" % fqdn,
            ]
            exclude = env.get("RSYNC_EXCLUDE", "").split()
            include = env.get("RSYNC_INCLUDE", "").split()
            source_target = ["%s/" % env["RSYNC_SRC"], "%s/" % target]

            subprocess.call(
                command_base + [
                    "--exclude", "Packages*", "--exclude", "Sources*",
                    "--exclude", "Release*", "--exclude", "InRelease",
                    "--include", "i18n/by-hash/**", "--exclude", "i18n/*",
                ] + include + exclude + source_target,
                stdout=log, stderr=subprocess.STDOUT, env=env)

            # Second pass to update metadata and clean up old files.
            subprocess.call(
                command_base + [
                    "--delay-updates", "--delete", "--delete-after",
                ] + include + exclude + source_target,
                stdout=log, stderr=subprocess.STDOUT, env=env)

        # Delete dangling symlinks.
        for dirpath, _, filenames in os.walk(target):
            for filename in filenames:
                path = os.path.join(dirpath, filename)
                if os.path.islink(path) and not os.path.exists(path):
                    os.unlink(path)

        trace_dir = os.path.join(target, "project", "trace")
        osextras.ensuredir(trace_dir)
        with open(os.path.join(trace_dir, fqdn), "w") as trace:
            subprocess.check_call(["date", "-u"], stdout=trace)

        # Note: if you don't have savelog, use any other log rotation
        # facility, or comment this out, the log will simply be overwritten
        # each time.
        with open("/dev/null", "w") as devnull:
            subprocess.call(
                ["savelog", log_path],
                stdout=devnull, stderr=subprocess.STDOUT)
    finally:
        osextras.unlink_force(lock)
Exemplo n.º 7
0
 def test_unlink_file_missing(self):
     path = os.path.join(self.temp_dir, "file")
     osextras.unlink_force(path)
     self.assertFalse(os.path.exists(path))
Exemplo n.º 8
0
 def test_unlink_file_present(self):
     path = os.path.join(self.temp_dir, "file")
     touch(path)
     osextras.unlink_force(path)
     self.assertFalse(os.path.exists(path))
Exemplo n.º 9
0
def build_ubuntu_defaults_locale(config):
    locale = config["UBUNTU_DEFAULTS_LOCALE"]
    if locale != "zh_CN":
        raise UnknownLocale(
            "UBUNTU_DEFAULTS_LOCALE='%s' not currently supported!" % locale)

    series = config["DIST"]
    if series < "oneiric":
        # Original hack: repack an existing image.
        iso = config["ISO"]
        if not iso:
            raise Exception(
                "Pass ISO=<path to Ubuntu image> in the environment.")

        scratch = os.path.join(
            config.root, "scratch", "ubuntu-chinese-edition", series.name)
        bsdtar_tree = os.path.join(scratch, "bsdtar-tree")

        log_marker("Unpacking")
        if os.path.isdir(bsdtar_tree):
            subprocess.check_call(["chmod", "-R", "+w", bsdtar_tree])
        osextras.mkemptydir(bsdtar_tree)
        subprocess.check_call(["bsdtar", "-xf", iso, "-C", bsdtar_tree])
        subprocess.check_call(["chmod", "-R", "+w", bsdtar_tree])

        log_marker("Transforming (robots in disguise)")
        with open(os.path.join(bsdtar_tree, "isolinux", "lang"), "w") as lang:
            print(locale, file=lang)
        subprocess.call([
            "mkisofs",
            "-r", "-V", "Ubuntu Chinese %s i386" % series.version,
            "-o", os.path.join(scratch, os.path.basename(iso)),
            "-cache-inodes", "-J", "-l",
            "-b", "isolinux/isolinux.bin", "-c", "isolinux/boot.cat",
            "-no-emul-boot", "-boot-load-size", "4", "-boot-info-table",
            bsdtar_tree,
        ])

        iso_prefix = iso.rsplit(".", 1)[0]
        scratch_prefix = os.path.join(
            scratch, os.path.basename(iso).rsplit(".", 1)[0])

        for ext in "list", "manifest":
            if os.path.exists("%s.%s" % (iso_prefix, ext)):
                shutil.copy2(
                    "%s.%s" % (iso_prefix, ext),
                    "%s.%s" % (scratch_prefix, ext))
            else:
                osextras.unlink_force("%s.%s" % (scratch_prefix, ext))
    else:
        log_marker("Downloading live filesystem images")
        download_live_filesystems(config)
        scratch = live_output_directory(config)
        for entry in os.listdir(scratch):
            if "." in entry:
                os.rename(
                    os.path.join(scratch, entry),
                    os.path.join(scratch, "%s-desktop-%s" % (series, entry)))
        pi_makelist = os.path.join(
            config.root, "debian-cd", "tools", "pi-makelist")
        for entry in os.listdir(scratch):
            if entry.endswith(".iso"):
                entry_path = os.path.join(scratch, entry)
                list_path = "%s.list" % entry_path.rsplit(".", 1)[0]
                with open(list_path, "w") as list_file:
                    subprocess.check_call(
                        [pi_makelist, entry_path], stdout=list_file)
Exemplo n.º 10
0
 def test_unlink_file_missing(self):
     path = os.path.join(self.temp_dir, "file")
     osextras.unlink_force(path)
     self.assertFalse(os.path.exists(path))
Exemplo n.º 11
0
 def test_unlink_file_present(self):
     path = os.path.join(self.temp_dir, "file")
     touch(path)
     osextras.unlink_force(path)
     self.assertFalse(os.path.exists(path))
Exemplo n.º 12
0
 def __exit__(self, unused_exc_type, unused_exc_value, unused_exc_tb):
     osextras.unlink_force(self.lock_path)
Exemplo n.º 13
0
    def publish(self, date):
        self.new_publish_dir(date)
        published = []
        self.checksum_dirs = []
        if not self.config["CDIMAGE_ONLYSOURCE"]:
            for arch in self.config.arches:
                published.extend(
                    list(self.publish_binary(self.publish_type, arch, date)))
            if self.project == "edubuntu" and self.publish_type == "server":
                for arch in self.config.arches:
                    published.extend(
                        list(self.publish_binary("serveraddon", arch, date)))
        published.extend(list(self.publish_source(date)))

        if not published:
            logger.warning("No CDs produced!")
            return

        target_dir = os.path.join(self.publish_base, date)

        source_report = os.path.join(
            self.britney_report, "%s_probs.html" % self.config.series)
        target_report = os.path.join(target_dir, "report.html")
        if (self.config["CDIMAGE_INSTALL_BASE"] and
            os.path.exists(source_report)):
            shutil.copy2(source_report, target_report)
        else:
            osextras.unlink_force(target_report)

        if not self.config["CDIMAGE_ONLYSOURCE"]:
            checksum_directory(
                self.config, target_dir, old_directories=self.checksum_dirs,
                map_expr=r"s/\.\(img\|img\.gz\|iso\|iso\.gz\|tar\.gz\)$/.raw/")
            subprocess.check_call(
                [os.path.join(self.config.root, "bin", "make-web-indices"),
                 target_dir, self.config.series, "daily"])

        target_dir_source = os.path.join(target_dir, "source")
        if os.path.isdir(target_dir_source):
            checksum_directory(
                self.config, target_dir_source,
                old_directories=[os.path.join(self.image_output, "src")],
                map_expr=r"s/\.\(img\|img\.gz\|iso\|iso\.gz\|tar\.gz\)$/.raw/")
            subprocess.check_call(
                [os.path.join(self.config.root, "bin", "make-web-indices"),
                 target_dir_source, self.config.series, "daily"])

        if (self.image_type.endswith("-live") or
            self.image_type.endswith("dvd")):
            # Create and publish metalink files.
            md5sums_metalink = os.path.join(target_dir, "MD5SUMS-metalink")
            md5sums_metalink_gpg = os.path.join(
                target_dir, "MD5SUMS-metalink.gpg")
            osextras.unlink_force(md5sums_metalink)
            osextras.unlink_force(md5sums_metalink_gpg)
            basedir, reldir = self.metalink_dirs(date)
            if subprocess.call([
                os.path.join(self.config.root, "bin", "make-metalink"),
                basedir, self.config.series, reldir, "cdimage.ubuntu.com",
                ]) == 0:
                metalink_checksum_directory(self.config, target_dir)
            else:
                for name in os.listdir(target_dir):
                    if name.endswith(".metalink"):
                        osextras.unlink_force(os.path.join(target_dir, name))

        publish_current = os.path.join(self.publish_base, "current")
        osextras.unlink_force(publish_current)
        os.symlink(date, publish_current)

        manifest_lock = os.path.join(
            self.config.root, "etc", ".lock-manifest-daily")
        try:
            subprocess.check_call(["lockfile", "-r", "4", manifest_lock])
        except subprocess.CalledProcessError:
            logger.error("Couldn't acquire manifest-daily lock!")
            raise
        try:
            manifest_daily = os.path.join(
                self.tree.directory, ".manifest-daily")
            with AtomicFile(manifest_daily) as manifest_daily_file:
                for line in self.tree.manifest():
                    print(line, file=manifest_daily_file)
            os.chmod(
                manifest_daily, os.stat(manifest_daily).st_mode | stat.S_IWGRP)

            # Create timestamps for this run.
            # TODO cjwatson 20120807: Shouldn't these be in www/full
            # rather than www/full[/project]?
            trace_dir = os.path.join(self.full_tree, ".trace")
            osextras.ensuredir(trace_dir)
            fqdn = socket.getfqdn()
            with open(os.path.join(trace_dir, fqdn), "w") as trace_file:
                subprocess.check_call(["date", "-u"], stdout=trace_file)
        finally:
            osextras.unlink_force(manifest_lock)

        subprocess.check_call([
            os.path.join(self.config.root, "bin", "post-qa"), date,
            ] + published)
Exemplo n.º 14
0
    def publish_binary(self, publish_type, arch, date):
        in_prefix = "%s-%s-%s" % (self.config.series, publish_type, arch)
        out_prefix = "%s-%s-%s" % (self.config.series, publish_type, arch)
        source_dir = os.path.join(self.image_output, arch)
        source_prefix = os.path.join(source_dir, in_prefix)
        target_dir = os.path.join(self.publish_base, date)
        target_prefix = os.path.join(target_dir, out_prefix)

        if not os.path.exists("%s.raw" % source_prefix):
            logger.warning("No %s image for %s!" % (publish_type, arch))
            for name in osextras.listdir_force(target_dir):
                if name.startswith("%s." % out_prefix):
                    os.unlink(os.path.join(target_dir, name))
            return

        logger.info("Publishing %s ..." % arch)
        osextras.ensuredir(target_dir)
        extension = self.detect_image_extension(source_prefix)
        shutil.move(
            "%s.raw" % source_prefix, "%s.%s" % (target_prefix, extension))
        if os.path.exists("%s.list" % source_prefix):
            shutil.move("%s.list" % source_prefix, "%s.list" % target_prefix)
        self.checksum_dirs.append(source_dir)
        with ChecksumFileSet(
            self.config, target_dir, sign=False) as checksum_files:
            checksum_files.remove("%s.%s" % (out_prefix, extension))

        # Jigdo integration
        if os.path.exists("%s.jigdo" % source_prefix):
            logger.info("Publishing %s jigdo ..." % arch)
            shutil.move("%s.jigdo" % source_prefix, "%s.jigdo" % target_prefix)
            shutil.move(
                "%s.template" % source_prefix, "%s.template" % target_prefix)
            if self.jigdo_ports(arch):
                self.replace_jigdo_mirror(
                    "%s.jigdo" % target_prefix,
                    "http://archive.ubuntu.com/ubuntu",
                    "http://ports.ubuntu.com/ubuntu-ports")
        else:
            osextras.unlink_force("%s.jigdo" % target_prefix)
            osextras.unlink_force("%s.template" % target_prefix)

        # Live filesystem manifests
        if os.path.exists("%s.manifest" % source_prefix):
            logger.info("Publishing %s live manifest ..." % arch)
            shutil.move(
                "%s.manifest" % source_prefix, "%s.manifest" % target_prefix)
        else:
            osextras.unlink_force("%s.manifest" % target_prefix)

        if (self.config["CDIMAGE_SQUASHFS_BASE"] and
            os.path.exists("%s.squashfs" % source_prefix)):
            logger.info("Publishing %s squashfs ..." % arch)
            shutil.move(
                "%s.squashfs" % source_prefix, "%s.squashfs" % target_prefix)
        else:
            osextras.unlink_force("%s.squashfs" % target_prefix)

        # Flashable Android boot images
        if os.path.exists("%s.bootimg" % source_prefix):
            logger.info("Publishing %s abootimg bootloader images ..." % arch)
            shutil.move(
                "%s.bootimg" % source_prefix, "%s.bootimg" % target_prefix)

        # zsync metafiles
        if self.try_zsyncmake and osextras.find_on_path("zsyncmake"):
            logger.info("Making %s zsync metafile ..." % arch)
            osextras.unlink_force("%s.%s.zsync" % (target_prefix, extension))
            zsyncmake(
                "%s.%s" % (target_prefix, extension),
                "%s.%s.zsync" % (target_prefix, extension),
                "%s.%s" % (out_prefix, extension))

        size = os.stat("%s.%s" % (target_prefix, extension)).st_size
        if size > self.size_limit_extension(extension):
            with open("%s.OVERSIZED" % target_prefix, "a"):
                pass
        else:
            osextras.unlink_force("%s.OVERSIZED" % target_prefix)

        yield os.path.join(self.project, self.image_type_dir, in_prefix)