def update_arch_packages(branch: dict, arch: str): current_app.logger.info(f"Update {branch['name']}/{arch}") r = get_redis() packages_path = branch["path_packages"].format(branch=branch["name"]) if not is_modified(current_app.config["UPSTREAM_URL"] + f"/{packages_path}/{arch}/feeds.conf"): current_app.logger.debug( f"{branch['name']}/{arch}: Skip package update") return packages = {} # first update extra repos in case they contain redundant packages to core for name, url in branch.get("extra_repos", {}).items(): current_app.logger.debug(f"Update extra repo {name} at {url}") packages.update(parse_packages_file(f"{url}/Packages.manifest", name)) # update default repositories afterwards so they overwrite redundancies for repo in branch["repos"]: repo_packages = get_packages_arch_repo(branch, arch, repo) current_app.logger.debug( f"{branch['name']}/{arch}/{repo}: Found {len(repo_packages)} packages" ) packages.update(repo_packages) if len(packages) == 0: current_app.logger.warning( f"{branch['name']}/{arch}: No packages found") return output_path = current_app.config["JSON_PATH"] / packages_path output_path.mkdir(exist_ok=True, parents=True) (output_path / f"{arch}-manifest.json").write_text( json.dumps(packages, sort_keys=True, separators=(",", ":"))) package_index = dict( map(lambda p: (p[0], p[1]["version"]), packages.items())) (output_path / f"{arch}-index.json").write_text( json.dumps(package_index, sort_keys=True, separators=(",", ":"))) current_app.logger.info( f"{arch}: found {len(package_index.keys())} packages") r.sadd(f"packages-{branch['name']}-{arch}", *package_index.keys())
def update_target_packages(branch: dict, version: str, target: str): current_app.logger.info(f"{version}/{target}: Update packages") version_path = branch["path"].format(version=version) r = get_redis() if not is_modified(current_app.config["UPSTREAM_URL"] + "/" + version_path + f"/targets/{target}/packages/Packages.manifest"): current_app.logger.debug(f"{version}/{target}: Skip package update") return packages = get_packages_target_base(branch, version, target) if len(packages) == 0: current_app.logger.warning(f"No packages found for {target}") return current_app.logger.debug(f"{version}/{target}: Found {len(packages)}") r.sadd(f"packages-{branch['name']}-{version}-{target}", *list(packages.keys())) output_path = current_app.config[ "JSON_PATH"] / version_path / "targets" / target output_path.mkdir(exist_ok=True, parents=True) (output_path / "manifest.json").write_text( json.dumps(packages, sort_keys=True, separators=(",", ":"))) package_index = dict( map(lambda p: (p[0], p[1]["version"]), packages.items())) (output_path / "index.json").write_text( json.dumps( { "architecture": packages["base-files"]["architecture"], "packages": package_index, }, sort_keys=True, separators=(",", ":"), )) current_app.logger.info( f"{version}: found {len(package_index.keys())} packages") r.sadd(f"packages-{branch['name']}-{version}", *package_index.keys())
def parse_packages_file(url, repo): r = get_redis() req = requests.get(url) if req.status_code != 200: current_app.logger.warning(f"No Packages found at {url}") return {} packages = {} mapping = {} linebuffer = "" for line in req.text.splitlines(): if line == "": parser = email.parser.Parser() package = parser.parsestr(linebuffer) source_name = package.get("SourceName") if source_name: packages[source_name] = dict( (name.lower().replace("-", "_"), val) for name, val in package.items()) packages[source_name]["repository"] = repo package_name = package.get("Package") if source_name != package_name: mapping[package_name] = source_name else: current_app.logger.warning(f"Something weird about {package}") linebuffer = "" else: linebuffer += line + "\n" for package, source in mapping.items(): if not r.hexists("mapping-abi", package): current_app.logger.info( f"{repo}: Add ABI mapping {package} -> {source}") r.hset("mapping-abi", package, source) return packages
def update_branch(branch): r = get_redis() version_path = branch["path"].format(version=branch["versions"][0]) targets = list( filter( lambda t: not t.startswith("."), requests.get(current_app.config["UPSTREAM_URL"] + f"/{version_path}/targets?json-targets").json(), )) if not targets: current_app.logger.warning("No targets found for {branch['name']}") return r.sadd(f"targets-{branch['name']}", *list(targets)) packages_path = branch["path_packages"].format(branch=branch["name"]) packages_path = branch["path_packages"].format(branch=branch["name"]) output_path = current_app.config["JSON_PATH"] / packages_path output_path.mkdir(exist_ok=True, parents=True) for version in branch["versions"]: current_app.logger.info(f"Update {branch['name']}/{version}") # TODO: ugly version_path = branch["path"].format(version=version) version_path_abs = current_app.config["JSON_PATH"] / version_path output_path = current_app.config["JSON_PATH"] / packages_path version_path_abs.mkdir(exist_ok=True, parents=True) packages_symlink = version_path_abs / "packages" if not packages_symlink.exists(): packages_symlink.symlink_to(output_path) for target in targets: update_target_packages(branch, version, target) for target in targets: update_target_profiles(branch, version, target) overview = { "branch": branch["name"], "release": version, "image_url": current_app.config["UPSTREAM_URL"] + f"/{version_path}/targets/{{target}}", "profiles": [], } for profile_file in (version_path_abs / "targets").rglob("**/*.json"): if profile_file.stem in ["index", "manifest", "overview"]: continue profile = json.loads(profile_file.read_text()) overview["profiles"].append({ "id": profile_file.stem, "target": profile["target"], "titles": profile["titles"], }) (version_path_abs / "overview.json").write_text( json.dumps(overview, sort_keys=True, separators=(",", ":"))) for arch in r.smembers(f"architectures-{branch['name']}"): update_arch_packages(branch, arch.decode("utf-8"))
def update_target_profiles(branch: dict, version: str, target: str): """Update available profiles of a specific version Args: branch(dict): Containing all branch information as defined in BRANCHES version(str): Version within branch target(str): Target within version """ current_app.logger.info(f"{version}/{target}: Update profiles") r = get_redis() version_path = branch["path"].format(version=version) req = requests.head(current_app.config["UPSTREAM_URL"] + f"/{version_path}/targets/{target}/profiles.json") if req.status_code != 200: current_app.logger.warning( f"{version}/{target}: Could not download profiles.json") return profiles_url = (current_app.config["UPSTREAM_URL"] + f"/{version_path}/targets/{target}/profiles.json") if not is_modified(profiles_url): current_app.logger.debug(f"{version}/{target}: Skip profiles update") return metadata = requests.get(profiles_url).json() profiles = metadata.pop("profiles", {}) r.sadd(f"architectures-{branch['name']}", metadata["arch_packages"]) r.hset(f"architecture-{branch['name']}", target, metadata["arch_packages"]) queue = Queue(connection=r) registry = FinishedJobRegistry(queue=queue) version_code = r.get(f"revision-{version}-{target}") if version_code: version_code = version_code.decode() for request_hash in r.smembers(f"builds-{version_code}-{target}"): current_app.logger.warning( f"{version_code}/{target}: Delete outdated job build") try: request_hash = request_hash.decode() registry.remove(request_hash, delete_job=True) rmtree(current_app.config["STORE_PATH"] / request_hash) except NoSuchJobError: current_app.logger.warning(f"Job was already deleted") r.delete(f"build-{version_code}-{target}") r.set( f"revision-{version}-{target}", metadata["version_code"], ) current_app.logger.info( f"{version}/{target}: Found {len(profiles)} profiles") for profile, data in profiles.items(): for supported in data.get("supported_devices", []): if not r.hexists(f"mapping-{branch['name']}-{version}-{target}", supported): current_app.logger.info( f"{version}/{target}: Add profile mapping {supported} -> {profile}" ) r.hset(f"mapping-{branch['name']}-{version}-{target}", supported, profile) r.sadd(f"profiles-{branch['name']}-{version}-{target}", profile) profile_path = (current_app.config["JSON_PATH"] / version_path / "targets" / target / profile).with_suffix(".json") profile_path.parent.mkdir(exist_ok=True, parents=True) profile_path.write_text( json.dumps( { **metadata, **data, "id": profile, "build_at": datetime.utcfromtimestamp( int(metadata.get( "source_date_epoch", 0))).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), }, sort_keys=True, separators=(",", ":"), )) data["target"] = target