Example #1
0
def can_sign(config):
    gpgconf, secring, pubring, trustdb = _gnupg_files(config)
    if (not os.path.exists(secring) or not os.path.exists(pubring)
            or not os.path.exists(trustdb) or not config["SIGNING_KEYID"]):
        logger.warning("No keys found; not signing images.")
        return False
    return True
Example #2
0
    def detect_image_extension(self, source_prefix):
        subp = subprocess.Popen(
            ["file", "-b", "%s.raw" % source_prefix], stdout=subprocess.PIPE,
            universal_newlines=True)
        output = subp.communicate()[0].rstrip("\n")
        if output.startswith("# "):
            output = output[2:]

        if output.startswith("ISO 9660 CD-ROM filesystem data "):
            return "iso"
        elif output.startswith("x86 boot sector"):
            return "img"
        elif output.startswith("gzip compressed data"):
            with open("%s.type" % source_prefix) as compressed_type:
                real_output = compressed_type.readline().rstrip("\n")
            if real_output.startswith("ISO 9660 CD-ROM filesystem data "):
                return "iso.gz"
            elif real_output.startswith("x86 boot sector"):
                return "img.gz"
            elif real_output.startswith("tar archive"):
                return "tar.gz"
            else:
                logger.warning(
                    "Unknown compressed file type '%s'; assuming .img.gz" %
                    real_output)
                return "img.gz"
        else:
            logger.warning("Unknown file type '%s'; assuming .iso" % output)
            return "iso"
Example #3
0
 def name_to_series(self, name):
     """Return the series for a file basename."""
     version = name.split("-")[1]
     try:
         return Series.find_by_version(".".join(version.split(".")[:2]))
     except ValueError:
         logger.warning("Unknown version: %s" % version)
         raise
Example #4
0
def can_sign(config):
    gpgconf, secring, pubring, trustdb = _gnupg_files(config)
    if (
        not os.path.exists(secring)
        or not os.path.exists(pubring)
        or not os.path.exists(trustdb)
        or not config["SIGNING_KEYID"]
    ):
        logger.warning("No keys found; not signing images.")
        return False
    return True
Example #5
0
def _prepare_check_installable(config):
    _, image_top, live, data = _check_installable_dirs(config)
    mkemptydir(data)

    for fullarch in config.arches:
        arch = fullarch.split("+")[0]

        packages = os.path.join(data, "Packages_%s" % arch)
        with open(packages, "wb") as packages_file:
            if config["CDIMAGE_SQUASHFS_BASE"]:
                squashfs = os.path.join(live, "%s.squashfs" % fullarch)
                if os.path.exists(squashfs):
                    _ensure_tempdir()
                    with open("/dev/null", "w") as devnull:
                        subprocess.check_call([
                            "unsquashfs",
                            "-d",
                            os.path.join(_tempdir, fullarch),
                            squashfs,
                            "/var/lib/dpkg/status",
                        ],
                                              stdout=devnull)
                    status_path = os.path.join(_tempdir, fullarch, "var",
                                               "lib", "dpkg", "status")
                    with open(os.path.join(status_path)) as status:
                        subprocess.call([
                            "grep-dctrl",
                            "-XFStatus",
                            "install ok installed",
                        ],
                                        stdin=status,
                                        stdout=packages_file)

            for component in "main", "restricted", "universe", "multiverse":
                packages_gz = os.path.join(image_top,
                                           "%s-%s" % (config.series, fullarch),
                                           "CD1", "dists", config.series,
                                           component, "binary-%s" % arch,
                                           "Packages.gz")
                if os.path.exists(packages_gz):
                    packages_gz_file = gzip.GzipFile(packages_gz)
                    try:
                        packages_file.write(packages_gz_file.read())
                    finally:
                        packages_gz_file.close()

        if os.stat(packages).st_size == 0:
            logger.warning("No Packages.gz for %s/%s; not checking" %
                           (config.series, arch))
            os.unlink(packages)

    with open(os.path.join(data, "Sources"), "w"):
        pass
Example #6
0
def tracker_set_rebuild_status(config, current_state, new_state, arches=None):

    if not isinstance(arches, list):
        arches = [arches]

    # Only import it here as we need to have the right paths in sys.path
    try:
        from isotracker import ISOTracker
    except ImportError:
        # Become a no-op if the isotracker module can't be found
        return

    if not arches:
        arches = config.arches

    tree = Tree.get_daily(config)
    publisher = Publisher.get_daily(tree, "daily")

    # Build a dict of tracker instance and product list
    qa_products = {}
    for arch in arches:
        qaproduct = publisher.qa_product(config.project, config.image_type,
                                         None, arch)

        if not qaproduct:
            continue

        if qaproduct[1] not in qa_products:
            qa_products[qaproduct[1]] = []

        qa_products[qaproduct[1]].append(qaproduct[0])

    # Iterate through the trackers and set the new status
    for instance, products in qa_products.items():
        try:
            tracker = ISOTracker(target="%s-%s" %
                                 (instance, config.full_series))
        except xmlrpclib.Error as e:
            logger.warning("Unable to contact tracker: %s" % e)
            continue

        for rebuild in tracker.qatracker.get_rebuilds(current_state):
            if rebuild.series_title.lower() != config.full_series:
                continue

            if rebuild.product_title in products:
                rebuild.status = new_state
                rebuild.save()
def _prepare_check_installable(config):
    _, image_top, live, data = _check_installable_dirs(config)
    mkemptydir(data)

    for fullarch in config.arches:
        arch = fullarch.split("+")[0]

        packages = os.path.join(data, "Packages_%s" % arch)
        with open(packages, "w") as packages_file:
            if config["CDIMAGE_SQUASHFS_BASE"]:
                squashfs = os.path.join(live, "%s.squashfs" % fullarch)
                if os.path.exists(squashfs):
                    _ensure_tempdir()
                    with open("/dev/null", "w") as devnull:
                        subprocess.check_call([
                            "unsquashfs",
                            "-d", os.path.join(_tempdir, fullarch),
                            squashfs, "/var/lib/dpkg/status",
                            ], stdout=devnull)
                    status_path = os.path.join(
                        _tempdir, fullarch, "var", "lib", "dpkg", "status")
                    with open(os.path.join(status_path)) as status:
                        subprocess.call([
                            "grep-dctrl", "-XFStatus", "install ok installed",
                            ], stdin=status, stdout=packages_file)

            for component in "main", "restricted", "universe", "multiverse":
                packages_gz = os.path.join(
                    image_top, "%s-%s" % (config.series, fullarch), "CD1",
                    "dists", config.series, component, "binary-%s" % arch,
                    "Packages.gz")
                if os.path.exists(packages_gz):
                    packages_gz_file = gzip.GzipFile(packages_gz)
                    try:
                        packages_file.write(packages_gz_file.read())
                    finally:
                        packages_gz_file.close()

        if os.stat(packages).st_size == 0:
            logger.warning(
                "No Packages.gz for %s/%s; not checking" %
                (config.series, arch))
            os.unlink(packages)

    with open(os.path.join(data, "Sources"), "w"):
        pass
Example #8
0
    def publish_source(self, date):
        for i in count(1):
            in_prefix = "%s-src-%d" % (self.config.series, i)
            out_prefix = "%s-src-%d" % (self.config.series, i)
            source_dir = os.path.join(self.image_output, "src")
            source_prefix = os.path.join(source_dir, in_prefix)
            target_dir = os.path.join(self.publish_base, date, "source")
            target_prefix = os.path.join(target_dir, out_prefix)
            if not os.path.exists("%s.raw" % source_prefix):
                break

            logger.info("Publishing source %d ..." % i)
            osextras.ensuredir(target_dir)
            shutil.move("%s.raw" % source_prefix, "%s.iso" % target_prefix)
            shutil.move("%s.list" % source_prefix, "%s.list" % target_prefix)
            with ChecksumFileSet(
                self.config, target_dir, sign=False) as checksum_files:
                checksum_files.remove("%s.iso" % out_prefix)

            # Jigdo integration
            if os.path.exists("%s.jigdo" % source_prefix):
                logger.info("Publishing source %d jigdo ..." % i)
                shutil.move(
                    "%s.jigdo" % source_prefix, "%s.jigdo" % target_prefix)
                shutil.move(
                    "%s.template" % source_prefix,
                    "%s.template" % target_prefix)
            else:
                logger.warning("No jigdo for source %d!" % i)
                osextras.unlink_force("%s.jigdo" % target_prefix)
                osextras.unlink_force("%s.template" % target_prefix)

            # zsync metafiles
            if self.try_zsyncmake and osextras.find_on_path("zsyncmake"):
                logger.info("Making source %d zsync metafile ..." % i)
                osextras.unlink_force("%s.iso.zsync" % target_prefix)
                zsyncmake(
                    "%s.iso" % target_prefix, "%s.iso.zsync" % target_prefix,
                    "%s.iso" % out_prefix)

            yield os.path.join(
                self.project, self.image_type, "%s-src" % self.config.series)
Example #9
0
def extract_debootstrap(config):
    output_dir = os.path.join(config.root, "scratch", config.project,
                              config.full_series, config.image_type,
                              "debootstrap")

    osextras.ensuredir(output_dir)

    for fullarch in config.arches:
        arch = fullarch.split("+")[0]
        mirror = find_mirror(config, arch)
        # TODO: This might be more sensible with python-debian or python-apt.
        packages_path = os.path.join(mirror, "dists", config.series, "main",
                                     "debian-installer", "binary-%s" % arch,
                                     "Packages.gz")
        with gzip.GzipFile(packages_path, "rb") as packages:
            grep_dctrl = subprocess.Popen(
                ["grep-dctrl", "-nsFilename", "-PX", "debootstrap-udeb"],
                stdin=subprocess.PIPE,
                stdout=subprocess.PIPE)
            udeb, _ = grep_dctrl.communicate(packages.read())
        if not isinstance(udeb, str):
            udeb = udeb.decode()
        udeb = udeb.rstrip("\n")
        udeb_path = os.path.join(mirror, udeb)
        if not udeb or not os.path.exists(udeb_path):
            logger.warning("No debootstrap-udeb for %s/%s!" %
                           (config.series, arch))
            continue
        # TODO: With python-debian, we could extract the one file we need
        # directly.
        unpack_dir = os.path.join(output_dir, "unpack-%s" % fullarch)
        try:
            shutil.rmtree(unpack_dir)
        except OSError:
            pass
        subprocess.check_call(["dpkg", "-x", udeb_path, unpack_dir])
        shutil.copy2(
            os.path.join(unpack_dir, _debootstrap_script(config)),
            os.path.join(output_dir, "%s-%s" % (config.series, fullarch)))
Example #10
0
    def publish(self, date):
        self.new_publish_dir(date)
        published = []
        self.checksum_dirs = []
        if not self.config["CDIMAGE_ONLYSOURCE"]:
            for arch in self.config.arches:
                published.extend(
                    list(self.publish_binary(self.publish_type, arch, date)))
            if self.project == "edubuntu" and self.publish_type == "server":
                for arch in self.config.arches:
                    published.extend(
                        list(self.publish_binary("serveraddon", arch, date)))
        published.extend(list(self.publish_source(date)))

        if not published:
            logger.warning("No CDs produced!")
            return

        target_dir = os.path.join(self.publish_base, date)

        source_report = os.path.join(
            self.britney_report, "%s_probs.html" % self.config.series)
        target_report = os.path.join(target_dir, "report.html")
        if (self.config["CDIMAGE_INSTALL_BASE"] and
            os.path.exists(source_report)):
            shutil.copy2(source_report, target_report)
        else:
            osextras.unlink_force(target_report)

        if not self.config["CDIMAGE_ONLYSOURCE"]:
            checksum_directory(
                self.config, target_dir, old_directories=self.checksum_dirs,
                map_expr=r"s/\.\(img\|img\.gz\|iso\|iso\.gz\|tar\.gz\)$/.raw/")
            subprocess.check_call(
                [os.path.join(self.config.root, "bin", "make-web-indices"),
                 target_dir, self.config.series, "daily"])

        target_dir_source = os.path.join(target_dir, "source")
        if os.path.isdir(target_dir_source):
            checksum_directory(
                self.config, target_dir_source,
                old_directories=[os.path.join(self.image_output, "src")],
                map_expr=r"s/\.\(img\|img\.gz\|iso\|iso\.gz\|tar\.gz\)$/.raw/")
            subprocess.check_call(
                [os.path.join(self.config.root, "bin", "make-web-indices"),
                 target_dir_source, self.config.series, "daily"])

        if (self.image_type.endswith("-live") or
            self.image_type.endswith("dvd")):
            # Create and publish metalink files.
            md5sums_metalink = os.path.join(target_dir, "MD5SUMS-metalink")
            md5sums_metalink_gpg = os.path.join(
                target_dir, "MD5SUMS-metalink.gpg")
            osextras.unlink_force(md5sums_metalink)
            osextras.unlink_force(md5sums_metalink_gpg)
            basedir, reldir = self.metalink_dirs(date)
            if subprocess.call([
                os.path.join(self.config.root, "bin", "make-metalink"),
                basedir, self.config.series, reldir, "cdimage.ubuntu.com",
                ]) == 0:
                metalink_checksum_directory(self.config, target_dir)
            else:
                for name in os.listdir(target_dir):
                    if name.endswith(".metalink"):
                        osextras.unlink_force(os.path.join(target_dir, name))

        publish_current = os.path.join(self.publish_base, "current")
        osextras.unlink_force(publish_current)
        os.symlink(date, publish_current)

        manifest_lock = os.path.join(
            self.config.root, "etc", ".lock-manifest-daily")
        try:
            subprocess.check_call(["lockfile", "-r", "4", manifest_lock])
        except subprocess.CalledProcessError:
            logger.error("Couldn't acquire manifest-daily lock!")
            raise
        try:
            manifest_daily = os.path.join(
                self.tree.directory, ".manifest-daily")
            with AtomicFile(manifest_daily) as manifest_daily_file:
                for line in self.tree.manifest():
                    print(line, file=manifest_daily_file)
            os.chmod(
                manifest_daily, os.stat(manifest_daily).st_mode | stat.S_IWGRP)

            # Create timestamps for this run.
            # TODO cjwatson 20120807: Shouldn't these be in www/full
            # rather than www/full[/project]?
            trace_dir = os.path.join(self.full_tree, ".trace")
            osextras.ensuredir(trace_dir)
            fqdn = socket.getfqdn()
            with open(os.path.join(trace_dir, fqdn), "w") as trace_file:
                subprocess.check_call(["date", "-u"], stdout=trace_file)
        finally:
            osextras.unlink_force(manifest_lock)

        subprocess.check_call([
            os.path.join(self.config.root, "bin", "post-qa"), date,
            ] + published)
Example #11
0
    def publish_binary(self, publish_type, arch, date):
        in_prefix = "%s-%s-%s" % (self.config.series, publish_type, arch)
        out_prefix = "%s-%s-%s" % (self.config.series, publish_type, arch)
        source_dir = os.path.join(self.image_output, arch)
        source_prefix = os.path.join(source_dir, in_prefix)
        target_dir = os.path.join(self.publish_base, date)
        target_prefix = os.path.join(target_dir, out_prefix)

        if not os.path.exists("%s.raw" % source_prefix):
            logger.warning("No %s image for %s!" % (publish_type, arch))
            for name in osextras.listdir_force(target_dir):
                if name.startswith("%s." % out_prefix):
                    os.unlink(os.path.join(target_dir, name))
            return

        logger.info("Publishing %s ..." % arch)
        osextras.ensuredir(target_dir)
        extension = self.detect_image_extension(source_prefix)
        shutil.move(
            "%s.raw" % source_prefix, "%s.%s" % (target_prefix, extension))
        if os.path.exists("%s.list" % source_prefix):
            shutil.move("%s.list" % source_prefix, "%s.list" % target_prefix)
        self.checksum_dirs.append(source_dir)
        with ChecksumFileSet(
            self.config, target_dir, sign=False) as checksum_files:
            checksum_files.remove("%s.%s" % (out_prefix, extension))

        # Jigdo integration
        if os.path.exists("%s.jigdo" % source_prefix):
            logger.info("Publishing %s jigdo ..." % arch)
            shutil.move("%s.jigdo" % source_prefix, "%s.jigdo" % target_prefix)
            shutil.move(
                "%s.template" % source_prefix, "%s.template" % target_prefix)
            if self.jigdo_ports(arch):
                self.replace_jigdo_mirror(
                    "%s.jigdo" % target_prefix,
                    "http://archive.ubuntu.com/ubuntu",
                    "http://ports.ubuntu.com/ubuntu-ports")
        else:
            osextras.unlink_force("%s.jigdo" % target_prefix)
            osextras.unlink_force("%s.template" % target_prefix)

        # Live filesystem manifests
        if os.path.exists("%s.manifest" % source_prefix):
            logger.info("Publishing %s live manifest ..." % arch)
            shutil.move(
                "%s.manifest" % source_prefix, "%s.manifest" % target_prefix)
        else:
            osextras.unlink_force("%s.manifest" % target_prefix)

        if (self.config["CDIMAGE_SQUASHFS_BASE"] and
            os.path.exists("%s.squashfs" % source_prefix)):
            logger.info("Publishing %s squashfs ..." % arch)
            shutil.move(
                "%s.squashfs" % source_prefix, "%s.squashfs" % target_prefix)
        else:
            osextras.unlink_force("%s.squashfs" % target_prefix)

        # Flashable Android boot images
        if os.path.exists("%s.bootimg" % source_prefix):
            logger.info("Publishing %s abootimg bootloader images ..." % arch)
            shutil.move(
                "%s.bootimg" % source_prefix, "%s.bootimg" % target_prefix)

        # zsync metafiles
        if self.try_zsyncmake and osextras.find_on_path("zsyncmake"):
            logger.info("Making %s zsync metafile ..." % arch)
            osextras.unlink_force("%s.%s.zsync" % (target_prefix, extension))
            zsyncmake(
                "%s.%s" % (target_prefix, extension),
                "%s.%s.zsync" % (target_prefix, extension),
                "%s.%s" % (out_prefix, extension))

        size = os.stat("%s.%s" % (target_prefix, extension)).st_size
        if size > self.size_limit_extension(extension):
            with open("%s.OVERSIZED" % target_prefix, "a"):
                pass
        else:
            osextras.unlink_force("%s.OVERSIZED" % target_prefix)

        yield os.path.join(self.project, self.image_type_dir, in_prefix)