Example #1
0
    def germinate_project(self, project):
        osextras.mkemptydir(self.output_dir(project))

        for arch in self.config.arches:
            logger.info("Germinating for %s/%s ..." %
                        (self.config.series, arch))
            self.germinate_arch(project, arch)
Example #2
0
def zsyncmake(infile, outfile, url):
    command = ["zsyncmake"]
    if infile.endswith(".gz"):
        command.append("-Z")
    command.extend(["-o", outfile, "-u", url, infile])
    if subprocess.call(command) != 0:
        logger.info("Trying again with block size 2048 ...")
        command[1:1] = ["-b", "2048"]
        subprocess.check_call(command)
Example #3
0
 def live_build_finished(arch, full_name, machine, status, text_status,
                         lp_build=None):
     timestamp = time.strftime("%F %T")
     logger.info("%s on %s finished at %s (%s)" % (
         full_name, machine, timestamp, text_status))
     tracker_set_rebuild_status(config, [0, 1, 2], 3, arch)
     if status == 0:
         successful.add(arch)
         if arch == "amd64" and "amd64+mac" in config.arches:
             successful.add("amd64+mac")
     else:
         live_build_notify_failure(config, arch, lp_build=lp_build)
Example #4
0
def _trigger_mirror(config, key, user, host, background=False):
    logger.info("%s:" % host)
    command = [
        "ssh",
        "-i",
        key,
        "-o",
        "StrictHostKeyChecking no",
        "-o",
        "BatchMode yes",
        "%s@%s" % (user, host),
        _trigger_command(config),
    ]
    if background:
        subprocess.Popen(command)
    else:
        subprocess.call(command)
Example #5
0
    def publish_source(self, date):
        for i in count(1):
            in_prefix = "%s-src-%d" % (self.config.series, i)
            out_prefix = "%s-src-%d" % (self.config.series, i)
            source_dir = os.path.join(self.image_output, "src")
            source_prefix = os.path.join(source_dir, in_prefix)
            target_dir = os.path.join(self.publish_base, date, "source")
            target_prefix = os.path.join(target_dir, out_prefix)
            if not os.path.exists("%s.raw" % source_prefix):
                break

            logger.info("Publishing source %d ..." % i)
            osextras.ensuredir(target_dir)
            shutil.move("%s.raw" % source_prefix, "%s.iso" % target_prefix)
            shutil.move("%s.list" % source_prefix, "%s.list" % target_prefix)
            with ChecksumFileSet(
                self.config, target_dir, sign=False) as checksum_files:
                checksum_files.remove("%s.iso" % out_prefix)

            # Jigdo integration
            if os.path.exists("%s.jigdo" % source_prefix):
                logger.info("Publishing source %d jigdo ..." % i)
                shutil.move(
                    "%s.jigdo" % source_prefix, "%s.jigdo" % target_prefix)
                shutil.move(
                    "%s.template" % source_prefix,
                    "%s.template" % target_prefix)
            else:
                logger.warning("No jigdo for source %d!" % i)
                osextras.unlink_force("%s.jigdo" % target_prefix)
                osextras.unlink_force("%s.template" % target_prefix)

            # zsync metafiles
            if self.try_zsyncmake and osextras.find_on_path("zsyncmake"):
                logger.info("Making source %d zsync metafile ..." % i)
                osextras.unlink_force("%s.iso.zsync" % target_prefix)
                zsyncmake(
                    "%s.iso" % target_prefix, "%s.iso.zsync" % target_prefix,
                    "%s.iso" % out_prefix)

            yield os.path.join(
                self.project, self.image_type, "%s-src" % self.config.series)
Example #6
0
def log_marker(message):
    logger.info("===== %s =====" % message)
    logger.info(time.strftime("%a %b %e %H:%M:%S UTC %Y", time.gmtime()))
Example #7
0
def run_live_builds(config):
    builds = {}
    lp_builds = []
    for arch in config.arches:
        if arch == "amd64+mac":
            # Use normal amd64 live image on amd64+mac.
            continue
        full_name = live_build_full_name(config, arch)
        timestamp = time.strftime("%F %T")
        lp, lp_livefs = get_lp_livefs(config, arch)
        if lp_livefs is None:
            machine = live_builder(config, arch)
        else:
            machine = "Launchpad"
        logger.info(
            "%s on %s starting at %s" % (full_name, machine, timestamp))
        tracker_set_rebuild_status(config, [0, 1], 2, arch)
        if lp_livefs is not None:
            lp_kwargs = live_build_lp_kwargs(config, lp, lp_livefs, arch)
            lp_build = lp_livefs.requestBuild(**lp_kwargs)
            logger.info("%s: %s" % (full_name, lp_build.web_link))
            lp_builds.append((lp_build, arch, full_name, machine, None))
        else:
            proc = subprocess.Popen(live_build_command(config, arch))
            builds[proc.pid] = (proc, arch, full_name, machine)

    successful = set()

    def live_build_finished(arch, full_name, machine, status, text_status,
                            lp_build=None):
        timestamp = time.strftime("%F %T")
        logger.info("%s on %s finished at %s (%s)" % (
            full_name, machine, timestamp, text_status))
        tracker_set_rebuild_status(config, [0, 1, 2], 3, arch)
        if status == 0:
            successful.add(arch)
            if arch == "amd64" and "amd64+mac" in config.arches:
                successful.add("amd64+mac")
        else:
            live_build_notify_failure(config, arch, lp_build=lp_build)

    while builds or lp_builds:
        # Check for non-Launchpad build results.
        if builds:
            pid, status = os.waitpid(0, os.WNOHANG)
            if pid and pid in builds:
                _, arch, full_name, machine = builds.pop(pid)
                live_build_finished(
                    arch, full_name, machine, status,
                    "success" if status == 0 else "failed")

        # Check for Launchpad build results.
        pending_lp_builds = []
        for lp_item in lp_builds:
            lp_build, arch, full_name, machine, log_timeout = lp_item
            lp_build.lp_refresh()
            if lp_build.buildstate in (
                    "Needs building", "Currently building", "Uploading build"):
                pending_lp_builds.append(lp_item)
            elif lp_build.buildstate == "Successfully built":
                live_build_finished(
                    arch, full_name, machine, 0, lp_build.buildstate,
                    lp_build=lp_build)
            elif (lp_build.build_log_url is None and
                  (log_timeout is None or time.time() < log_timeout)):
                # Wait up to five minutes for Launchpad to fetch the build
                # log from the slave.  We need a timeout since in rare cases
                # this might fail.
                if log_timeout is None:
                    log_timeout = time.time() + 300
                pending_lp_builds.append(
                    (lp_build, arch, full_name, machine, log_timeout))
            else:
                live_build_finished(
                    arch, full_name, machine, 1, lp_build.buildstate,
                    lp_build=lp_build)
        lp_builds = pending_lp_builds

        if lp_builds:
            # Wait a while before polling Launchpad again.  If a
            # non-Launchpad build completes in the meantime, it will
            # interrupt this sleep with SIGCHLD.
            time.sleep(15)

    if not successful:
        raise LiveBuildsFailed("No live filesystem builds succeeded.")
    return successful
Example #8
0
    def germinate_arch(self, project, arch):
        cpuarch = arch.split("+")[0]

        for dist in self.germinate_dists:
            for suffix in (
                    "binary-%s/Packages.gz" % cpuarch,
                    "source/Sources.gz",
                    "debian-installer/binary-%s/Packages.gz" % cpuarch,
            ):
                files = [
                    "dists/%s/%s/%s" % (dist, component, suffix)
                    for component in self.components
                ]
                if self.config["LOCAL"]:
                    files.append("%s/dists/%s/local/%s" %
                                 (self.config["LOCALDEBS"], dist, suffix))
                self.make_index(project, arch, files[0], files)

        arch_output_dir = os.path.join(self.output_dir(project), arch)
        osextras.mkemptydir(arch_output_dir)
        if (self.config["GERMINATE_HINTS"]
                and os.path.isfile(self.config["GERMINATE_HINTS"])):
            shutil.copy2(self.config["GERMINATE_HINTS"],
                         os.path.join(arch_output_dir, "hints"))
        command = [
            self.germinate_path,
            "--seed-source",
            ",".join(self.seed_sources(project)),
            "--mirror",
            "file://%s/" % self.output_dir(project),
            "--seed-dist",
            self.seed_dist(project),
            "--dist",
            ",".join(self.germinate_dists),
            "--arch",
            cpuarch,
            "--components",
            "main",
            "--no-rdepends",
        ]
        if self.use_bzr:
            command.append("--bzr")
        if self.config.image_type == "source":
            command.append("--always-follow-build-depends")
        proxy_check_call(self.config,
                         "germinate",
                         command,
                         cwd=arch_output_dir)
        output_structure = os.path.join(self.output_dir(project), "STRUCTURE")
        shutil.copy2(os.path.join(arch_output_dir, "structure"),
                     output_structure)

        if self.config.series == "breezy":
            # Unfortunately, we now need a second germinate run to figure
            # out the dependencies of language packs and the like.
            extras = []
            with open(os.path.join(arch_output_dir, "ship.acsets"),
                      "w") as ship_acsets:
                output = GerminateOutput(self.config, output_structure)
                for pkg in output.seed_packages(arch, "ship.seed"):
                    extras.append("desktop/%s" % pkg)
                    print(pkg, file=ship_acsets)
            if extras:
                logger.info(
                    "Re-germinating for %s/%s language pack dependencies ..." %
                    (self.config.series, arch))
                command.extend(["--seed-packages", ",".join(extras)])
                proxy_check_call(self.config,
                                 "germinate",
                                 command,
                                 cwd=arch_output_dir)
Example #9
0
def send_mail(subject, generator, recipients, body, dry_run=False):
    if dry_run:
        logger.info("Would send mail to: %s" % ", ".join(recipients))
        logger.info("")
        logger.info("Subject: %s" % subject)
        logger.info("X-Generated-By: %s" % generator)
        logger.info("")
        if isinstance(body, text_file_type):
            for line in body:
                logger.info(line.rstrip("\n"))
        else:
            for line in body.splitlines():
                logger.info(line)
        logger.info("")
    else:
        command = [
            "mail", "-s", subject, "-a", "X-Generated-By: %s" % generator]
        command.extend(recipients)
        if isinstance(body, text_file_type):
            mailer = subprocess.Popen(command, stdin=body)
        else:
            mailer = subprocess.Popen(command, stdin=subprocess.PIPE)
            if bytes is not str and isinstance(body, str):
                body = body.encode()
            mailer.stdin.write(body)
            mailer.stdin.close()
        mailer.wait()
Example #10
0
    def publish_binary(self, publish_type, arch, date):
        in_prefix = "%s-%s-%s" % (self.config.series, publish_type, arch)
        out_prefix = "%s-%s-%s" % (self.config.series, publish_type, arch)
        source_dir = os.path.join(self.image_output, arch)
        source_prefix = os.path.join(source_dir, in_prefix)
        target_dir = os.path.join(self.publish_base, date)
        target_prefix = os.path.join(target_dir, out_prefix)

        if not os.path.exists("%s.raw" % source_prefix):
            logger.warning("No %s image for %s!" % (publish_type, arch))
            for name in osextras.listdir_force(target_dir):
                if name.startswith("%s." % out_prefix):
                    os.unlink(os.path.join(target_dir, name))
            return

        logger.info("Publishing %s ..." % arch)
        osextras.ensuredir(target_dir)
        extension = self.detect_image_extension(source_prefix)
        shutil.move(
            "%s.raw" % source_prefix, "%s.%s" % (target_prefix, extension))
        if os.path.exists("%s.list" % source_prefix):
            shutil.move("%s.list" % source_prefix, "%s.list" % target_prefix)
        self.checksum_dirs.append(source_dir)
        with ChecksumFileSet(
            self.config, target_dir, sign=False) as checksum_files:
            checksum_files.remove("%s.%s" % (out_prefix, extension))

        # Jigdo integration
        if os.path.exists("%s.jigdo" % source_prefix):
            logger.info("Publishing %s jigdo ..." % arch)
            shutil.move("%s.jigdo" % source_prefix, "%s.jigdo" % target_prefix)
            shutil.move(
                "%s.template" % source_prefix, "%s.template" % target_prefix)
            if self.jigdo_ports(arch):
                self.replace_jigdo_mirror(
                    "%s.jigdo" % target_prefix,
                    "http://archive.ubuntu.com/ubuntu",
                    "http://ports.ubuntu.com/ubuntu-ports")
        else:
            osextras.unlink_force("%s.jigdo" % target_prefix)
            osextras.unlink_force("%s.template" % target_prefix)

        # Live filesystem manifests
        if os.path.exists("%s.manifest" % source_prefix):
            logger.info("Publishing %s live manifest ..." % arch)
            shutil.move(
                "%s.manifest" % source_prefix, "%s.manifest" % target_prefix)
        else:
            osextras.unlink_force("%s.manifest" % target_prefix)

        if (self.config["CDIMAGE_SQUASHFS_BASE"] and
            os.path.exists("%s.squashfs" % source_prefix)):
            logger.info("Publishing %s squashfs ..." % arch)
            shutil.move(
                "%s.squashfs" % source_prefix, "%s.squashfs" % target_prefix)
        else:
            osextras.unlink_force("%s.squashfs" % target_prefix)

        # Flashable Android boot images
        if os.path.exists("%s.bootimg" % source_prefix):
            logger.info("Publishing %s abootimg bootloader images ..." % arch)
            shutil.move(
                "%s.bootimg" % source_prefix, "%s.bootimg" % target_prefix)

        # zsync metafiles
        if self.try_zsyncmake and osextras.find_on_path("zsyncmake"):
            logger.info("Making %s zsync metafile ..." % arch)
            osextras.unlink_force("%s.%s.zsync" % (target_prefix, extension))
            zsyncmake(
                "%s.%s" % (target_prefix, extension),
                "%s.%s.zsync" % (target_prefix, extension),
                "%s.%s" % (out_prefix, extension))

        size = os.stat("%s.%s" % (target_prefix, extension)).st_size
        if size > self.size_limit_extension(extension):
            with open("%s.OVERSIZED" % target_prefix, "a"):
                pass
        else:
            osextras.unlink_force("%s.OVERSIZED" % target_prefix)

        yield os.path.join(self.project, self.image_type_dir, in_prefix)