Exemplo n.º 1
0
    def write_tasks_project(self, project, source=False):
        if source:
            master_project = "source"
        else:
            master_project = project
        output_dir = self.tasks_output_dir(master_project)
        osextras.ensuredir(output_dir)

        for arch in self.config.arches:
            initrd_packages = self.common_initrd_packages(arch)
            packages = defaultdict(list)
            cpparch = arch.replace("+", "_")
            for seed in self.list_seeds("all"):
                if seed == "supported":
                    seedsource = "%s+build-depends" % seed
                else:
                    seedsource = seed
                seed_path = self.seed_path(arch, seedsource)
                if not os.path.exists(seed_path):
                    continue
                with open(os.path.join(output_dir, seed), "a") as task_file:
                    print("#ifdef ARCH_%s" % cpparch, file=task_file)
                    for package in sorted(
                            self.task_packages(arch, seed, seedsource)):
                        if package not in initrd_packages:
                            packages[seed].append(package)
                            print(package, file=task_file)
                    print("#endif /* ARCH_%s */" % cpparch, file=task_file)

            tasks = defaultdict(list)
            for input_seeds, task in self.seed_task_mapping(project, arch):
                for input_seed in input_seeds:
                    for pkg in packages.get(input_seed, []):
                        tasks[pkg].append(task)

            # Help debian-cd to regenerate Task headers, to make sure that
            # we don't accidentally end up out of sync with the archive and
            # break the package installation step.
            # Note that the results of this will be wrong for source images,
            # but that doesn't matter since they won't be used there.
            override_path = os.path.join(output_dir, "override.%s" % arch)
            with open(override_path, "w") as override:
                for pkg, tasknames in sorted(tasks.items()):
                    print("%s  Task  %s" % (pkg, ", ".join(tasknames)),
                          file=override)
            # Help debian-cd to get priorities in sync with the current base
            # system, so that debootstrap >= 0.3.1 can work out the correct
            # set of packages to install.
            important_path = os.path.join(output_dir, "important.%s" % arch)
            with open(important_path, "w") as important_file:
                important = []
                for seed in self.list_seeds("debootstrap"):
                    important.extend(packages.get(seed, []))
                for pkg in sorted(important):
                    if not re_not_base.match(pkg):
                        print(pkg, file=important_file)

            with open(os.path.join(output_dir, "MASTER"), "w") as master:
                for entry in self.master_task_entries(project, source=source):
                    print(entry, file=master)
Exemplo n.º 2
0
 def make_deb(self, path, section, priority, files={}):
     osextras.ensuredir(os.path.dirname(path))
     build_dir = os.path.join(self.temp_dir, "make_deb")
     try:
         base = os.path.basename(path).split(".", 1)[0]
         name, version, arch = base.split("_")
         control_dir = os.path.join(build_dir, "DEBIAN")
         with mkfile(os.path.join(control_dir, "control")) as control:
             print(dedent("""\
                 Package: %s
                 Version: %s
                 Architecture: %s
                 Section: %s
                 Priority: %s
                 Maintainer: Fake Maintainer <*****@*****.**>
                 Description: fake package""") %
                   (name, version, arch, section, priority),
                   file=control)
         for file_path, file_contents in files.items():
             rel_path = os.path.join(build_dir,
                                     os.path.relpath(file_path, "/"))
             with mkfile(rel_path, mode="wb") as fp:
                 fp.write(file_contents)
         with open("/dev/null", "w") as devnull:
             subprocess.check_call(["dpkg-deb", "-b", build_dir, path],
                                   stdout=devnull)
     finally:
         shutil.rmtree(build_dir)
Exemplo n.º 3
0
 def test_new_publish_dir_skips_different_series(self):
     publisher = self.make_publisher("ubuntu", "daily")
     publish_current = os.path.join(publisher.publish_base, "current")
     osextras.ensuredir(publish_current)
     image = "warty-alternate-i386.iso"
     touch(os.path.join(publish_current, image))
     publisher.new_publish_dir("20120807")
     self.assertEqual(
         [], os.listdir(os.path.join(publisher.publish_base, "20120807")))
Exemplo n.º 4
0
 def test_new_publish_dir_honours_no_copy(self):
     self.config["CDIMAGE_NOCOPY"] = "1"
     publisher = self.make_publisher("ubuntu", "daily")
     publish_current = os.path.join(publisher.publish_base, "current")
     osextras.ensuredir(publish_current)
     touch(os.path.join(
         publish_current, "%s-alternate-i386.iso" % self.config.series))
     publisher.new_publish_dir("20120807")
     self.assertEqual(
         [], os.listdir(os.path.join(publisher.publish_base, "20120807")))
Exemplo n.º 5
0
 def test_new_publish_dir_copies_same_series(self):
     publisher = self.make_publisher("ubuntu", "daily")
     publish_current = os.path.join(publisher.publish_base, "current")
     osextras.ensuredir(publish_current)
     image = "%s-alternate-i386.iso" % self.config.series
     touch(os.path.join(publish_current, image))
     publisher.new_publish_dir("20120807")
     self.assertEqual(
         [image],
         os.listdir(os.path.join(publisher.publish_base, "20120807")))
Exemplo n.º 6
0
def update_local_indices(config):
    packages = os.path.join(config.root, "local", "packages")
    if not os.path.isdir(packages):
        return

    database = os.path.normpath(os.path.join(packages, os.pardir, "database"))
    dists = os.path.join(database, "dists")
    indices = os.path.join(database, "indices")
    pool = os.path.join(packages, "pool", "local")
    osextras.ensuredir(dists)
    osextras.ensuredir(indices)

    for arch in config.cpuarches:
        binary_list_path = os.path.join(
            dists, "%s_local_binary-%s.list" % (config.series, arch))
        di_binary_list_path = os.path.join(
            dists, "%s_local_debian-installer_binary-%s.list" % (
                config.series, arch))
        override_path = os.path.join(
            indices, "override.%s.local.%s" % (config.series, arch))
        di_override_path = os.path.join(
            indices, "override.%s.local.debian-installer.%s" % (
                config.series, arch))

        with open(binary_list_path, "w") as binary_list, \
                open(di_binary_list_path, "w") as di_binary_list, \
                open(override_path, "w") as override, \
                open(di_override_path, "w") as di_override:
            for dirpath, deb in _find_endswith(
                    pool, ["_%s.deb" % arch, "_all.deb"]):
                deb_path = os.path.join(dirpath, deb)
                print(os.path.relpath(deb_path, packages), file=binary_list)
                name = deb.split("_", 1)[0]
                section = _dpkg_field(deb_path, "Section").split("/")[-1]
                priority = _dpkg_field(deb_path, "Priority")
                print(
                    "%s\t%s\tlocal/%s" % (name, priority, section),
                    file=override)

            for dirpath, udeb in _find_endswith(
                    pool, ["_%s.udeb" % arch, "_all.udeb"]):
                udeb_path = os.path.join(dirpath, udeb)
                print(
                    os.path.relpath(udeb_path, packages), file=di_binary_list)
                name = udeb.split("_", 1)[0]
                priority = _dpkg_field(udeb_path, "Priority")
                print(
                    "%s\t%s\tlocal/debian-installer" % (name, priority),
                    file=di_override)

        osextras.ensuredir(os.path.join(
            packages, "dists", config.series, "local", "binary-%s" % arch))
        osextras.ensuredir(os.path.join(
            packages, "dists", config.series, "local", "debian-installer",
            "binary-%s" % arch))

    subprocess.check_call(
        ["apt-ftparchive", "generate", "apt-ftparchive.conf"], cwd=packages)
Exemplo n.º 7
0
    def new_publish_dir(self, date):
        """Copy previous published tree as a starting point for a new one.

        This allows single-architecture rebuilds to carry over other
        architectures from previous builds.
        """
        publish_base = self.publish_base
        publish_date = os.path.join(publish_base, date)
        publish_current = os.path.join(publish_base, "current")
        osextras.ensuredir(publish_date)
        if not self.config["CDIMAGE_NOCOPY"]:
            for name in sorted(osextras.listdir_force(publish_current)):
                if name.startswith("%s-" % self.config.series):
                    os.link(
                        os.path.join(publish_current, name),
                        os.path.join(publish_date, name))
Exemplo n.º 8
0
    def publish_source(self, date):
        for i in count(1):
            in_prefix = "%s-src-%d" % (self.config.series, i)
            out_prefix = "%s-src-%d" % (self.config.series, i)
            source_dir = os.path.join(self.image_output, "src")
            source_prefix = os.path.join(source_dir, in_prefix)
            target_dir = os.path.join(self.publish_base, date, "source")
            target_prefix = os.path.join(target_dir, out_prefix)
            if not os.path.exists("%s.raw" % source_prefix):
                break

            logger.info("Publishing source %d ..." % i)
            osextras.ensuredir(target_dir)
            shutil.move("%s.raw" % source_prefix, "%s.iso" % target_prefix)
            shutil.move("%s.list" % source_prefix, "%s.list" % target_prefix)
            with ChecksumFileSet(
                self.config, target_dir, sign=False) as checksum_files:
                checksum_files.remove("%s.iso" % out_prefix)

            # Jigdo integration
            if os.path.exists("%s.jigdo" % source_prefix):
                logger.info("Publishing source %d jigdo ..." % i)
                shutil.move(
                    "%s.jigdo" % source_prefix, "%s.jigdo" % target_prefix)
                shutil.move(
                    "%s.template" % source_prefix,
                    "%s.template" % target_prefix)
            else:
                logger.warning("No jigdo for source %d!" % i)
                osextras.unlink_force("%s.jigdo" % target_prefix)
                osextras.unlink_force("%s.template" % target_prefix)

            # zsync metafiles
            if self.try_zsyncmake and osextras.find_on_path("zsyncmake"):
                logger.info("Making source %d zsync metafile ..." % i)
                osextras.unlink_force("%s.iso.zsync" % target_prefix)
                zsyncmake(
                    "%s.iso" % target_prefix, "%s.iso.zsync" % target_prefix,
                    "%s.iso" % out_prefix)

            yield os.path.join(
                self.project, self.image_type, "%s-src" % self.config.series)
Exemplo n.º 9
0
def extract_debootstrap(config):
    output_dir = os.path.join(config.root, "scratch", config.project,
                              config.full_series, config.image_type,
                              "debootstrap")

    osextras.ensuredir(output_dir)

    for fullarch in config.arches:
        arch = fullarch.split("+")[0]
        mirror = find_mirror(config, arch)
        # TODO: This might be more sensible with python-debian or python-apt.
        packages_path = os.path.join(mirror, "dists", config.series, "main",
                                     "debian-installer", "binary-%s" % arch,
                                     "Packages.gz")
        with gzip.GzipFile(packages_path, "rb") as packages:
            grep_dctrl = subprocess.Popen(
                ["grep-dctrl", "-nsFilename", "-PX", "debootstrap-udeb"],
                stdin=subprocess.PIPE,
                stdout=subprocess.PIPE)
            udeb, _ = grep_dctrl.communicate(packages.read())
        if not isinstance(udeb, str):
            udeb = udeb.decode()
        udeb = udeb.rstrip("\n")
        udeb_path = os.path.join(mirror, udeb)
        if not udeb or not os.path.exists(udeb_path):
            logger.warning("No debootstrap-udeb for %s/%s!" %
                           (config.series, arch))
            continue
        # TODO: With python-debian, we could extract the one file we need
        # directly.
        unpack_dir = os.path.join(output_dir, "unpack-%s" % fullarch)
        try:
            shutil.rmtree(unpack_dir)
        except OSError:
            pass
        subprocess.check_call(["dpkg", "-x", udeb_path, unpack_dir])
        shutil.copy2(
            os.path.join(unpack_dir, _debootstrap_script(config)),
            os.path.join(output_dir, "%s-%s" % (config.series, fullarch)))
Exemplo n.º 10
0
 def make_publisher(self, project, image_type, **kwargs):
     self.config["PROJECT"] = project
     self.tree = DailyTree(self.config)
     publisher = DailyTreePublisher(self.tree, image_type, **kwargs)
     osextras.ensuredir(publisher.image_output)
     osextras.ensuredir(publisher.britney_report)
     osextras.ensuredir(publisher.full_tree)
     return publisher
Exemplo n.º 11
0
def open_log(config):
    if config["DEBUG"] or config["CDIMAGE_NOLOG"]:
        return None

    project = config.project
    if config["UBUNTU_DEFAULTS_LOCALE"] == "zh_CN":
        project = "ubuntu-chinese-edition"
    log_path = os.path.join(
        config.root, "log", project, config.full_series,
        "%s-%s.log" % (config.image_type, config["CDIMAGE_DATE"]))
    osextras.ensuredir(os.path.dirname(log_path))
    log = os.open(log_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o666)
    os.dup2(log, 1)
    os.dup2(log, 2)
    os.close(log)
    sys.stdout = os.fdopen(1, "w", 1)
    sys.stderr = os.fdopen(2, "w", 1)
    reset_logging()
    # Since we now know we aren't going to be spamming the terminal, it's
    # safe to crank up debian-cd's verbosity so that the logs are most
    # useful.
    config["VERBOSE"] = "3"
    return log_path
Exemplo n.º 12
0
 def test_ensuredir_previously_present(self):
     new_dir = os.path.join(self.temp_dir, "dir")
     os.mkdir(new_dir)
     osextras.ensuredir(new_dir)
     self.assertTrue(os.path.isdir(new_dir))
Exemplo n.º 13
0
def mkfile(path, mode="w"):
    osextras.ensuredir(os.path.dirname(path))
    with open(path, mode) as f:
        yield f
Exemplo n.º 14
0
def build_livecd_base(config):
    log_marker("Downloading live filesystem images")
    download_live_filesystems(config)

    if (config.project in ("ubuntu-server") and
            config.image_type == "daily-preinstalled"):
        log_marker("Copying images to debian-cd output directory")
        scratch_dir = os.path.join(
            config.root, "scratch", config.project, config.full_series,
            config.image_type)
        live_dir = os.path.join(scratch_dir, "live")
        for arch in config.arches:
            output_dir = os.path.join(scratch_dir, "debian-cd", arch)
            osextras.ensuredir(output_dir)
            live_prefix = os.path.join(live_dir, arch)
            rootfs = "%s.disk1.img.xz" % (live_prefix)
            output_prefix = os.path.join(output_dir,
                                         "%s-preinstalled-server-%s" %
                                         (config.series, arch))
            with open("%s.type" % output_prefix, "w") as f:
                print("EXT4 Filesystem Image", file=f)
            shutil.copy2(rootfs, "%s.raw" % output_prefix)
            shutil.copy2(
                "%s.manifest" % live_prefix, "%s.manifest" % output_prefix)

    if (config.project == "ubuntu-core" and
            config.image_type == "daily-live"):
        log_marker("Copying images to debian-cd output directory")
        scratch_dir = os.path.join(
            config.root, "scratch", config.project, config.full_series,
            config.image_type)
        live_dir = os.path.join(scratch_dir, "live")
        for arch in config.arches:
            output_dir = os.path.join(scratch_dir, "debian-cd", arch)
            osextras.ensuredir(output_dir)
            live_prefix = os.path.join(live_dir, arch)
            rootfs = "%s.img.xz" % (live_prefix)
            output_prefix = os.path.join(output_dir,
                                         "%s-live-core-%s" %
                                         (config.series, arch))
            with open("%s.type" % output_prefix, "w") as f:
                print("Disk Image", file=f)
            shutil.copy2(rootfs, "%s.raw" % output_prefix)
            shutil.copy2(
                "%s.manifest" % live_prefix, "%s.manifest" % output_prefix)
            shutil.copy2(
                "%s.model-assertion" % live_prefix,
                "%s.model-assertion" % output_prefix)

    if (config.project in ("ubuntu-base", "ubuntu-touch") or
        (config.project == "ubuntu-core" and
         config.subproject == "system-image")):
        log_marker("Copying images to debian-cd output directory")
        scratch_dir = os.path.join(
            config.root, "scratch", config.project, config.full_series,
            config.image_type)
        live_dir = os.path.join(scratch_dir, "live")
        for arch in config.arches:
            live_prefix = os.path.join(live_dir, arch)
            rootfs = "%s.rootfs.tar.gz" % live_prefix
            if os.path.exists(rootfs):
                output_dir = os.path.join(scratch_dir, "debian-cd", arch)
                osextras.ensuredir(output_dir)
                if config.project == "ubuntu-core":
                    output_prefix = os.path.join(
                        output_dir,
                        "%s-preinstalled-core-%s" % (config.series, arch))
                elif config.project == "ubuntu-base":
                    output_prefix = os.path.join(
                        output_dir, "%s-base-%s" % (config.series, arch))
                elif config.project == "ubuntu-touch":
                    output_prefix = os.path.join(
                        output_dir,
                        "%s-preinstalled-touch-%s" % (config.series, arch))
                shutil.copy2(rootfs, "%s.raw" % output_prefix)
                with open("%s.type" % output_prefix, "w") as f:
                    print("tar archive", file=f)
                shutil.copy2(
                    "%s.manifest" % live_prefix, "%s.manifest" % output_prefix)
                if config.project == "ubuntu-touch":
                    osextras.link_force(
                        "%s.raw" % output_prefix, "%s.tar.gz" % output_prefix)
                    add_android_support(config, arch, output_dir)
                    custom = "%s.custom.tar.gz" % live_prefix
                    if os.path.exists(custom):
                        shutil.copy2(
                            custom, "%s.custom.tar.gz" % output_prefix)
                if config.project == "ubuntu-core":
                    for dev in ("azure.device", "device", "raspi2.device",
                                "plano.device"):
                        device = "%s.%s.tar.gz" % (live_prefix, dev)
                        if os.path.exists(device):
                            shutil.copy2(
                                device, "%s.%s.tar.gz" % (output_prefix, dev))
                    for snaptype in ("os", "kernel", "raspi2.kernel",
                                     "dragonboard.kernel"):
                        snap = "%s.%s.snap" % (live_prefix, snaptype)
                        if os.path.exists(snap):
                            shutil.copy2(
                                snap, "%s.%s.snap" % (output_prefix, snaptype))
Exemplo n.º 15
0
def anonftpsync(config):
    env = dict(os.environ)
    for key, value in _anonftpsync_options(config).items():
        env[key] = value
    target = os.path.join(config.root, "ftp")
    fqdn = socket.getfqdn()
    lock_base = "Archive-Update-in-Progress-%s" % fqdn
    lock = os.path.join(target, lock_base)
    pkglist = "--include-from=" + config["RSYNC_PKGLIST_PATH"]
    if subprocess.call(
            ["lockfile", "-!", "-l", "43200", "-r", "0", lock]) == 0:
        raise Exception(
            "%s is unable to start rsync; lock file exists." % fqdn)
    try:
        log_path = os.path.join(config.root, "log", "rsync.log")
        osextras.ensuredir(os.path.dirname(log_path))
        with open(log_path, "w") as log:
            command_base = [
                "rsync", "--recursive", "--links", "--hard-links", "--times",
                "--verbose", "--stats", "--chmod=Dg+s,g+rwX",
                pkglist,
                "--exclude", lock_base,
                "--exclude", "project/trace/%s" % fqdn,
            ]
            exclude = env.get("RSYNC_EXCLUDE", "").split()
            include = env.get("RSYNC_INCLUDE", "").split()
            source_target = ["%s/" % env["RSYNC_SRC"], "%s/" % target]

            subprocess.call(
                command_base + [
                    "--exclude", "Packages*", "--exclude", "Sources*",
                    "--exclude", "Release*", "--exclude", "InRelease",
                    "--include", "i18n/by-hash/**", "--exclude", "i18n/*",
                ] + include + exclude + source_target,
                stdout=log, stderr=subprocess.STDOUT, env=env)

            # Second pass to update metadata and clean up old files.
            subprocess.call(
                command_base + [
                    "--delay-updates", "--delete", "--delete-after",
                ] + include + exclude + source_target,
                stdout=log, stderr=subprocess.STDOUT, env=env)

        # Delete dangling symlinks.
        for dirpath, _, filenames in os.walk(target):
            for filename in filenames:
                path = os.path.join(dirpath, filename)
                if os.path.islink(path) and not os.path.exists(path):
                    os.unlink(path)

        trace_dir = os.path.join(target, "project", "trace")
        osextras.ensuredir(trace_dir)
        with open(os.path.join(trace_dir, fqdn), "w") as trace:
            subprocess.check_call(["date", "-u"], stdout=trace)

        # Note: if you don't have savelog, use any other log rotation
        # facility, or comment this out, the log will simply be overwritten
        # each time.
        with open("/dev/null", "w") as devnull:
            subprocess.call(
                ["savelog", log_path],
                stdout=devnull, stderr=subprocess.STDOUT)
    finally:
        osextras.unlink_force(lock)
Exemplo n.º 16
0
 def test_ensuredir_previously_present(self):
     new_dir = os.path.join(self.temp_dir, "dir")
     os.mkdir(new_dir)
     osextras.ensuredir(new_dir)
     self.assertTrue(os.path.isdir(new_dir))
Exemplo n.º 17
0
    def publish(self, date):
        self.new_publish_dir(date)
        published = []
        self.checksum_dirs = []
        if not self.config["CDIMAGE_ONLYSOURCE"]:
            for arch in self.config.arches:
                published.extend(
                    list(self.publish_binary(self.publish_type, arch, date)))
            if self.project == "edubuntu" and self.publish_type == "server":
                for arch in self.config.arches:
                    published.extend(
                        list(self.publish_binary("serveraddon", arch, date)))
        published.extend(list(self.publish_source(date)))

        if not published:
            logger.warning("No CDs produced!")
            return

        target_dir = os.path.join(self.publish_base, date)

        source_report = os.path.join(
            self.britney_report, "%s_probs.html" % self.config.series)
        target_report = os.path.join(target_dir, "report.html")
        if (self.config["CDIMAGE_INSTALL_BASE"] and
            os.path.exists(source_report)):
            shutil.copy2(source_report, target_report)
        else:
            osextras.unlink_force(target_report)

        if not self.config["CDIMAGE_ONLYSOURCE"]:
            checksum_directory(
                self.config, target_dir, old_directories=self.checksum_dirs,
                map_expr=r"s/\.\(img\|img\.gz\|iso\|iso\.gz\|tar\.gz\)$/.raw/")
            subprocess.check_call(
                [os.path.join(self.config.root, "bin", "make-web-indices"),
                 target_dir, self.config.series, "daily"])

        target_dir_source = os.path.join(target_dir, "source")
        if os.path.isdir(target_dir_source):
            checksum_directory(
                self.config, target_dir_source,
                old_directories=[os.path.join(self.image_output, "src")],
                map_expr=r"s/\.\(img\|img\.gz\|iso\|iso\.gz\|tar\.gz\)$/.raw/")
            subprocess.check_call(
                [os.path.join(self.config.root, "bin", "make-web-indices"),
                 target_dir_source, self.config.series, "daily"])

        if (self.image_type.endswith("-live") or
            self.image_type.endswith("dvd")):
            # Create and publish metalink files.
            md5sums_metalink = os.path.join(target_dir, "MD5SUMS-metalink")
            md5sums_metalink_gpg = os.path.join(
                target_dir, "MD5SUMS-metalink.gpg")
            osextras.unlink_force(md5sums_metalink)
            osextras.unlink_force(md5sums_metalink_gpg)
            basedir, reldir = self.metalink_dirs(date)
            if subprocess.call([
                os.path.join(self.config.root, "bin", "make-metalink"),
                basedir, self.config.series, reldir, "cdimage.ubuntu.com",
                ]) == 0:
                metalink_checksum_directory(self.config, target_dir)
            else:
                for name in os.listdir(target_dir):
                    if name.endswith(".metalink"):
                        osextras.unlink_force(os.path.join(target_dir, name))

        publish_current = os.path.join(self.publish_base, "current")
        osextras.unlink_force(publish_current)
        os.symlink(date, publish_current)

        manifest_lock = os.path.join(
            self.config.root, "etc", ".lock-manifest-daily")
        try:
            subprocess.check_call(["lockfile", "-r", "4", manifest_lock])
        except subprocess.CalledProcessError:
            logger.error("Couldn't acquire manifest-daily lock!")
            raise
        try:
            manifest_daily = os.path.join(
                self.tree.directory, ".manifest-daily")
            with AtomicFile(manifest_daily) as manifest_daily_file:
                for line in self.tree.manifest():
                    print(line, file=manifest_daily_file)
            os.chmod(
                manifest_daily, os.stat(manifest_daily).st_mode | stat.S_IWGRP)

            # Create timestamps for this run.
            # TODO cjwatson 20120807: Shouldn't these be in www/full
            # rather than www/full[/project]?
            trace_dir = os.path.join(self.full_tree, ".trace")
            osextras.ensuredir(trace_dir)
            fqdn = socket.getfqdn()
            with open(os.path.join(trace_dir, fqdn), "w") as trace_file:
                subprocess.check_call(["date", "-u"], stdout=trace_file)
        finally:
            osextras.unlink_force(manifest_lock)

        subprocess.check_call([
            os.path.join(self.config.root, "bin", "post-qa"), date,
            ] + published)
Exemplo n.º 18
0
    def publish_binary(self, publish_type, arch, date):
        in_prefix = "%s-%s-%s" % (self.config.series, publish_type, arch)
        out_prefix = "%s-%s-%s" % (self.config.series, publish_type, arch)
        source_dir = os.path.join(self.image_output, arch)
        source_prefix = os.path.join(source_dir, in_prefix)
        target_dir = os.path.join(self.publish_base, date)
        target_prefix = os.path.join(target_dir, out_prefix)

        if not os.path.exists("%s.raw" % source_prefix):
            logger.warning("No %s image for %s!" % (publish_type, arch))
            for name in osextras.listdir_force(target_dir):
                if name.startswith("%s." % out_prefix):
                    os.unlink(os.path.join(target_dir, name))
            return

        logger.info("Publishing %s ..." % arch)
        osextras.ensuredir(target_dir)
        extension = self.detect_image_extension(source_prefix)
        shutil.move(
            "%s.raw" % source_prefix, "%s.%s" % (target_prefix, extension))
        if os.path.exists("%s.list" % source_prefix):
            shutil.move("%s.list" % source_prefix, "%s.list" % target_prefix)
        self.checksum_dirs.append(source_dir)
        with ChecksumFileSet(
            self.config, target_dir, sign=False) as checksum_files:
            checksum_files.remove("%s.%s" % (out_prefix, extension))

        # Jigdo integration
        if os.path.exists("%s.jigdo" % source_prefix):
            logger.info("Publishing %s jigdo ..." % arch)
            shutil.move("%s.jigdo" % source_prefix, "%s.jigdo" % target_prefix)
            shutil.move(
                "%s.template" % source_prefix, "%s.template" % target_prefix)
            if self.jigdo_ports(arch):
                self.replace_jigdo_mirror(
                    "%s.jigdo" % target_prefix,
                    "http://archive.ubuntu.com/ubuntu",
                    "http://ports.ubuntu.com/ubuntu-ports")
        else:
            osextras.unlink_force("%s.jigdo" % target_prefix)
            osextras.unlink_force("%s.template" % target_prefix)

        # Live filesystem manifests
        if os.path.exists("%s.manifest" % source_prefix):
            logger.info("Publishing %s live manifest ..." % arch)
            shutil.move(
                "%s.manifest" % source_prefix, "%s.manifest" % target_prefix)
        else:
            osextras.unlink_force("%s.manifest" % target_prefix)

        if (self.config["CDIMAGE_SQUASHFS_BASE"] and
            os.path.exists("%s.squashfs" % source_prefix)):
            logger.info("Publishing %s squashfs ..." % arch)
            shutil.move(
                "%s.squashfs" % source_prefix, "%s.squashfs" % target_prefix)
        else:
            osextras.unlink_force("%s.squashfs" % target_prefix)

        # Flashable Android boot images
        if os.path.exists("%s.bootimg" % source_prefix):
            logger.info("Publishing %s abootimg bootloader images ..." % arch)
            shutil.move(
                "%s.bootimg" % source_prefix, "%s.bootimg" % target_prefix)

        # zsync metafiles
        if self.try_zsyncmake and osextras.find_on_path("zsyncmake"):
            logger.info("Making %s zsync metafile ..." % arch)
            osextras.unlink_force("%s.%s.zsync" % (target_prefix, extension))
            zsyncmake(
                "%s.%s" % (target_prefix, extension),
                "%s.%s.zsync" % (target_prefix, extension),
                "%s.%s" % (out_prefix, extension))

        size = os.stat("%s.%s" % (target_prefix, extension)).st_size
        if size > self.size_limit_extension(extension):
            with open("%s.OVERSIZED" % target_prefix, "a"):
                pass
        else:
            osextras.unlink_force("%s.OVERSIZED" % target_prefix)

        yield os.path.join(self.project, self.image_type_dir, in_prefix)