Exemplo n.º 1
0
 def set_packages_hash(self):
     # sort and deduplicate requested packages
     if "packages" in self.params:
         self.params["packages"] = sorted(list(set(
             self.params["packages"])))
         self.params["packages_hash"] = get_hash(
             " ".join(self.params["packages"]), 12)
     else:
         self.params["packages"] = ""
         self.params["packages_hash"] = ""
Exemplo n.º 2
0
    def __init__(self, params):
        self.config = Config()
        self.log = logging.getLogger(__name__)
        self.log.info("config initialized")
        self.database = Database(self.config)
        self.log.info("database initialized")
        self.params = params

        if "defaults_hash" not in self.params:
            self.params["defaults_hash"] = ""
            if "defaults" in self.params:
                if self.params["defaults"] != "":
                    self.params["defaults_hash"] = get_hash(
                        self.params["defaults"], 32)
        if not self.params["defaults_hash"]:
            self.params["defaults_hash"] = ""
Exemplo n.º 3
0
    def build(self):
        self.log.debug("create and parse manifest")

        # fail path in case of erros
        fail_log_path = self.config.get_folder(
            "download_folder") + "/faillogs/faillog-{}.txt".format(
                self.params["request_hash"])

        self.image = Image(self.params)

        if self.params["packages_hash"]:
            packages_image = set(self.database.get_packages_image(self.params))
            self.log.debug("packages_image %s", packages_image)
            packages_requested = set(
                self.database.get_packages_hash(self.params["packages_hash"]))
            self.log.debug("packages_requested %s", packages_requested)
            packages_remove = packages_image - packages_requested
            self.log.debug("packages_remove %s", packages_remove)
            packages_requested.update(
                set(map(lambda x: "-" + x, packages_remove)))
            self.params["packages"] = " ".join(packages_requested)
            self.log.debug("packages param %s", self.params["packages"])
        else:
            self.log.debug("build package with default packages")

        # first determine the resulting manifest hash
        return_code, manifest_content, errors = self.run_meta("manifest")

        if return_code == 0:
            self.image.params["manifest_hash"] = get_hash(manifest_content, 15)

            manifest_pattern = r"(.+) - (.+)\n"
            manifest_packages = re.findall(manifest_pattern, manifest_content)
            self.database.add_manifest_packages(
                self.image.params["manifest_hash"], manifest_packages)
            self.log.info("successfully parsed manifest")
        else:
            self.log.error("couldn't determine manifest")
            print(manifest_content)
            print(errors)
            self.write_log(fail_log_path, stderr=errors)
            self.database.set_requests_status(self.params["request_hash"],
                                              "manifest_fail")
            return False

        # set directory where image is stored on server
        self.image.set_image_dir()
        self.log.debug("dir %s", self.image.params["dir"])

        # calculate hash based on resulted manifest
        self.image.params["image_hash"] = get_hash(
            " ".join(self.image.as_array("manifest_hash")), 15)

        # set log path in case of success
        success_log_path = self.image.params[
            "dir"] + "/buildlog-{}.txt".format(self.params["image_hash"])

        # set build_status ahead, if stuff goes wrong it will be changed
        self.build_status = "created"

        # check if image already exists
        if not self.image.created() or not self.database.image_exists(
                self.params["image_hash"]):
            self.log.info("build image")
            with tempfile.TemporaryDirectory() as build_dir:
                # now actually build the image with manifest hash as
                # EXTRA_IMAGE_NAME
                self.log.info("build image at %s", build_dir)
                self.params["worker"] = self.location
                self.params["BIN_DIR"] = build_dir
                self.params["j"] = str(os.cpu_count())
                self.params["EXTRA_IMAGE_NAME"] = self.params["manifest_hash"]
                # if uci defaults are added, at least at parts of the hash to
                # time image name
                if self.params["defaults_hash"]:
                    defaults_dir = build_dir + "/files/etc/uci-defaults/"
                    # create folder to store uci defaults
                    os.makedirs(defaults_dir)
                    # request defaults content from database
                    defaults_content = self.database.get_defaults(
                        self.params["defaults_hash"])
                    # TODO check if special encoding is required
                    with open(defaults_dir + "99-server-defaults",
                              "w") as defaults_file:
                        defaults_file.write(defaults_content)

                    # tell ImageBuilder to integrate files
                    self.params["FILES"] = build_dir + "/files/"
                    self.params["EXTRA_IMAGE_NAME"] += (
                        "-" + self.params["defaults_hash"][:6])

                # download is already performed for manifest creation
                self.params["NO_DOWNLOAD"] = "1"

                build_start = time.time()
                return_code, buildlog, errors = self.run_meta("image")
                self.image.params["build_seconds"] = int(time.time() -
                                                         build_start)

                if return_code == 0:
                    # create folder in advance
                    os.makedirs(self.image.params["dir"], exist_ok=True)

                    self.log.debug(os.listdir(build_dir))

                    for filename in os.listdir(build_dir):
                        if os.path.exists(self.image.params["dir"] + "/" +
                                          filename):
                            break
                        shutil.move(build_dir + "/" + filename,
                                    self.image.params["dir"])

                    # possible sysupgrade names, ordered by likeliness
                    possible_sysupgrade_files = [
                        "*-squashfs-sysupgrade.bin",
                        "*-squashfs-sysupgrade.tar",
                        "*-squashfs-nand-sysupgrade.bin",
                        "*-squashfs.trx",
                        "*-squashfs.chk",
                        "*-squashfs.bin",
                        "*-squashfs-sdcard.img.gz",
                        "*-combined-squashfs*",
                        "*.img.gz",
                    ]

                    sysupgrade = None

                    for sysupgrade_file in possible_sysupgrade_files:
                        sysupgrade = glob.glob(self.image.params["dir"] + "/" +
                                               sysupgrade_file)
                        if sysupgrade:
                            break

                    if not sysupgrade:
                        self.log.debug("sysupgrade not found")
                        if buildlog.find("too big") != -1:
                            self.log.warning("created image was to big")
                            self.database.set_requests_status(
                                self.params["request_hash"], "imagesize_fail")
                            self.write_log(fail_log_path, buildlog, errors)
                            return False
                        else:
                            self.build_status = "no_sysupgrade"
                            self.image.params["sysupgrade"] = ""
                    else:
                        self.image.params["sysupgrade"] = os.path.basename(
                            sysupgrade[0])

                    self.write_log(success_log_path, buildlog)
                    self.database.insert_dict("images",
                                              self.image.get_params())
                    self.log.info("build successfull")
                else:
                    self.log.info("build failed")
                    self.database.set_requests_status(
                        self.params["request_hash"], "build_fail")
                    self.write_log(fail_log_path, buildlog, errors)
                    return False
        else:
            self.log.info("image already there")

        self.log.info(
            "link request %s to image %s",
            self.params["request_hash"],
            self.params["image_hash"],
        )
        self.database.done_build_job(
            self.params["request_hash"],
            self.image.params["image_hash"],
            self.build_status,
        )
        return True
    def _process_request(self):
        self.log.debug("request_json: %s", self.request_json)

        # if request_hash is available check the database directly
        if "request_hash" in self.request_json:
            self.request = self.database.check_request_hash(
                self.request_json["request_hash"])

            if not self.request:
                self.response_status = HTTPStatus.NOT_FOUND
                return self.respond()
            else:
                return self.return_status()

        # TODO check for profile or board

        # generic approach for
        # https://github.com/aparcar/attendedsysupgrade-server/issues/91
        self.request_json["board"] = self.request_json["board"].replace(
            ",", "_")

        self.request_json["profile"] = self.request_json[
            "board"]  # TODO fix this workaround

        request_hash = get_request_hash(self.request_json)
        request_database = self.database.check_request_hash(request_hash)

        # if found return instantly the status
        if request_database:
            self.log.debug("found image in database: %s",
                           request_database["request_status"])
            self.request = request_database
            return self.return_status()
        else:
            self.request["request_hash"] = request_hash
            self.response_json["request_hash"] = self.request["request_hash"]

        # if not perform various checks to see if the request is acutally valid

        # validate distro and version
        if "distro" not in self.request_json:
            self.response_status = HTTPStatus.PRECONDITION_FAILED  # 412
            self.response_header["X-Missing-Param"] = "distro"
            return self.respond()
        else:
            bad_request = self.check_bad_distro()
            if bad_request:
                return bad_request

        if "version" not in self.request_json:
            self.request["version"] = self.config.get(
                self.request["distro"]).get("latest")
        else:
            bad_request = self.check_bad_version()
            if bad_request:
                return bad_request

        # check for valid target
        bad_target = self.check_bad_target()
        if bad_target:
            return bad_target

        # validate attached defaults
        if "defaults" in self.request_json:
            if self.request_json["defaults"]:
                # check if the uci file exceeds the max file size. this should
                # be done as the uci-defaults are at least temporary stored in
                # the database to be passed to a worker
                if getsizeof(self.request_json["defaults"]) > self.config.get(
                        "max_defaults_size", 1024):
                    self.response_json[
                        "error"] = "attached defaults exceed max size"
                    self.response_status = (
                        420)  # this error code is the best I could find
                    self.respond()
                else:
                    self.request["defaults_hash"] = get_hash(
                        self.request_json["defaults"], 32)
                    self.database.insert_defaults(
                        self.request["defaults_hash"],
                        self.request_json["defaults"])

        # add package_hash to database
        if "packages" in self.request_json:
            # check for existing packages
            bad_packages = self.check_bad_packages(
                self.request_json["packages"])
            if bad_packages:
                return bad_packages
            self.request["packages_hash"] = get_packages_hash(
                self.request_json["packages"])
            self.database.insert_packages_hash(self.request["packages_hash"],
                                               self.request["packages"])

        # now some heavy guess work is done to figure out the profile
        # eventually this could be simplified if upstream unifirm the
        # profiles/boards
        if "board" in self.request_json:
            self.log.debug("board in request, search for %s",
                           self.request_json["board"])
            self.request["profile"] = self.database.check_profile(
                self.request["distro"],
                self.request["version"],
                self.request["target"],
                self.request_json["board"],
            )

        if not self.request["profile"]:
            if "model" in self.request_json:
                self.log.debug("model in request, search for %s",
                               self.request_json["model"])
                self.request["profile"] = self.database.check_model(
                    self.request["distro"],
                    self.request["version"],
                    self.request["target"],
                    self.request_json["model"],
                )
                self.log.debug("model search found profile %s",
                               self.request["profile"])

        if not self.request["profile"]:
            if self.database.check_profile(
                    self.request["distro"],
                    self.request["version"],
                    self.request["target"],
                    "Generic",
            ):
                self.request["profile"] = "Generic"
            elif self.database.check_profile(
                    self.request["distro"],
                    self.request["version"],
                    self.request["target"],
                    "generic",
            ):
                self.request["profile"] = "generic"
            else:
                self.response_json[
                    "error"] = "unknown device, please check model and board params"
                self.response_status = HTTPStatus.PRECONDITION_FAILED  # 412
                return self.respond()

        # all checks passed, eventually add to queue!
        self.log.debug("add build job %s", self.request)
        self.database.add_build_job(self.request)
        return self.return_queued()
    def _process_request(self):
        if "distro" not in self.request_json:
            self.response_status = HTTPStatus.PRECONDITION_FAILED  # 412
            self.response_header["X-Missing-Param"] = "distro"
            return self.respond()
        else:
            bad_request = self.check_bad_distro()
            if bad_request:
                return bad_request
            self.log.debug("passed distro check")

        if "target" not in self.request_json:
            self.response_status = HTTPStatus.PRECONDITION_FAILED  # 412
            self.response_header["X-Missing-Param"] = "target"
            return self.respond()

        if "version" not in self.request_json:
            self.response_json["version"] = self.config.get(self.request["distro"]).get(
                "latest"
            )
            return self.respond()
        else:
            bad_request = self.check_bad_version()
            if bad_request:
                return bad_request
            self.log.debug("passed version check")
            if self.config.version(self.request["distro"], self.request["version"]).get(
                "snapshots", False
            ):
                revision = self.database.get_revision(
                    self.request["distro"],
                    self.request["version"],
                    self.request_json["target"],
                )
                if self.request_json.get("revision") != revision:
                    self.response_json["revision"] = revision
                    self.response_json["version"] = self.request["version"]
            else:
                latest_version = self.config.get(self.request["distro"]).get("latest")
                if latest_version != self.request["version"]:
                    self.response_json["version"] = latest_version
                else:
                    self.response_status = HTTPStatus.NO_CONTENT  # 204

        # check if target/sutarget still exists in new version
        bad_request = self.check_bad_target()
        if bad_request:
            return bad_request

        if "installed" not in self.request_json:
            return self.respond()
        else:
            bad_request = self.check_bad_packages(self.request_json["installed"].keys())
            if bad_request:
                return bad_request

        self.outdated_version = self.request["version"]
        self.request["version"] = self.request_json["version"]

        # check if packages exists in new version
        bad_request = self.check_bad_packages(self.request_json["installed"].keys())
        if bad_request:
            return bad_request

        # if a version jump happens make sure to check for package changes,
        # drops & renames
        if "version" in self.response_json:
            # this version transforms packages, e.g. kmod-ipv6 was dropped at
            # in the 17.01 release as it became part of the kernel. this
            # functions checks for these changes and tell the client what
            # packages to request in the build request
            self.response_json["packages"] = self.database.transform_packages(
                self.request["distro"],
                self.outdated_version,
                self.request["version"],
                " ".join(self.request_json["installed"].keys()),
            )
            self.response_status = HTTPStatus.OK  # 200
        else:
            self.response_json["packages"] = list(self.request_json["installed"].keys())
            self.response_status = HTTPStatus.NO_CONTENT  # 204

        manifest_content = ""
        for package, version in sorted(self.request_json["installed"].items()):
            manifest_content += "{} - {}\n".format(package, version)
        self.request["manifest_hash"] = get_hash(manifest_content, 15)

        self.request["manifest"] = self.request_json["installed"]

        if "version" in self.response_json or "upgrade_packages" in self.request_json:
            # TODO this result in double jsonifying
            # problem is postgres gives back perfect json while the rest of the
            # json response is a dict, until it's decoded in the end
            self.response_json["upgrades"] = json.loads(
                self.database.get_manifest_upgrades(self.request)
            )
            if self.response_json["upgrades"] != {}:
                self.response_status = HTTPStatus.OK  # 200

        # finally respond
        return self.respond()