def dict_to_docref(document): id = document["_id"] kind = trim(document["type"], prefix="git-") if kind == "branches": return BRANCHES_DOCREF elif kind == "branch": return BranchDocref(document["branch"]) elif kind in ("tree", "commit", "blob"): return ShaDocRef(trim(document["type"], prefix="git-"), document["sha"]) else: raise NotImplementedError(document)
def id_to_docref(id): most = trim(id, prefix="git-") if most == "branches": return BRANCHES_DOCREF kind, name = most.split("-", 1) assert kind in ("branch", "tree", "commit", "blob"), repr(id) return DocRef(id, kind, name)
def resolve_document_using_git(git, docref): document = docref_to_dict(docref) kind = docref.kind id = docref.id get = lambda f, n=docref.name: call( git + ["show", "--quiet", "--format=format:%s" % (f,), n], do_check=False) get = lambda a: git_show(git, docref.name, a) if kind == "branches": branches = [trim(a, prefix=" remotes/origin/") for a in read_lines(call(git + ["branch", "-a"]))] document["branches"] = [] for branch in branches: document["branches"].append(docref_to_dict(BranchDocref(branch))) elif kind == "branch": sha = get1( read_lines( call(git + ["rev-parse", "remotes/origin/" + docref.name]))) document["commit"] = docref_to_dict(ShaDocRef("commit", sha)) elif kind == "commit": document.update( {"author": {"name": get("%an"), "email": get("%ae"), "date": get("%ai")}, "committer": {"name": get("%cn"), "email": get("%ce"), "date": get("%ci")}, "message": get("%B"), "tree": docref_to_dict(ShaDocRef("tree", get("%T"))), "parents": [], }) for p in sorted(get("%P").split(" ")): if p == "": continue document["parents"].append( docref_to_dict(ShaDocRef("commit", p))) elif kind == "tree": document["children"] = [] for line in read_lines(call(git + ["ls-tree", docref.name])): child_mode, child_kind, rest = line.split(" ", 2) child_sha, child_basename = rest.split("\t", 1) ref = {"child": docref_to_dict(ShaDocRef(child_kind, child_sha)), "basename": child_basename, "mode": octal_to_symbolic_mode(child_mode)} document["children"].append(ref) document["children"].sort(key=lambda a: a["child"]["sha"]) elif kind == "blob": blob = call(git + ["show", docref.name], do_crlf_fix=False) if is_text(blob): document["encoding"] = "raw" document["raw"] = blob else: document["encoding"] = "base64" document["base64"] = base64.b64encode(blob) else: raise NotImplementedError(kind) return document
def find_dependencies(document): kind = trim(document["type"], prefix="git-") if kind == "branches": for branch in document["branches"]: yield dict_to_docref(branch) elif kind == "branch": yield dict_to_docref(document["commit"]) elif kind == "commit": for parent in document["parents"]: yield dict_to_docref(parent) yield dict_to_docref(document["tree"]) elif kind == "blob": pass elif kind == "tree": for child in document["children"]: yield dict_to_docref(child["child"]) else: raise NotImplementedError(document)
def get_release_file(base_url): for src, dst in [("", "")] + list(PORTS_MIRRORS): url = base_url if url.startswith(src): url = dst + trim(url, prefix=src) try: file_data = get(url).content except Exception, e: try: file_data_bz2 = get(url + ".bz2").content except Exception, e: try: file_data_gz = get(url + ".gz").content except Exception, e: continue else: file_data = call(["gunzip"], stdin_data=file_data_bz2, do_crlf_fix=False)
def ubuntu_to_hg(hg_path, username, do_development=False): def hg(argv, **kwargs): kwargs.setdefault("cwd", hg_path) kwargs.setdefault("do_print", True) prefix = ["hg"] if username is not None and argv[0] in ("commit", "ci"): prefix.extend(["--config", "ui.username=%s" % (username,)]) return call(prefix + argv, **kwargs) with with_ubuntu_keyring() as gpg: meta_release_data = get( join(BASE_URL, "meta-release-development")).content meta_release = parse_control_file(meta_release_data) group_by(meta_release, lambda r: r["dist"]) if not os.path.exists(hg_path): os.makedirs(hg_path) hg(["init"]) branches = set([a.split()[0] for a in read_lines(hg(["branches"]))]) ok_branches = set() seen_supported_non_lts = False for release in meta_release: branch = "ubuntu_codename_%s" % (release["dist"],) is_lts = "LTS" in release["version"] is_supported = release["supported"] == "1" if is_supported and not is_lts: seen_supported_non_lts = True is_development = not is_supported and seen_supported_non_lts if not is_supported and not is_development: continue ok_branches.add(branch) if is_development and not do_development: continue done = set() if branch not in branches: hg(["update", "--clean", "--rev", "00"]) hg(["branch", "--force", branch]) else: hg(["update", "--clean", branch]) hg(["--config", "extensions.purge=", "purge", "--all"]) release_gpg_path = os.path.join(hg_path, "Release.gpg") release_path = os.path.join(hg_path, "Release") old_sha1sums = {} release_gpg_data = get(release["release-file"] + ".gpg").content if os.path.exists(release_gpg_path): if release_gpg_data == read_file(release_gpg_path): continue release_data = read_file(release_path) old_sha1sums = get_release_sha1sums(release_data) old_sha1sums["Release"] = hashlib.sha1( release_data).hexdigest() old_sha1sums["Release.gpg"] = hashlib.sha1( release_gpg_data).hexdigest() # for relpath in sorted(old_sha1sums): # if posixpath.dirname(relpath) == "Index": # index_data = read_file(os.path.join(hg_path, relpath)) # child_sha1sums = get_release_sha1sums(index_data) # for relpath2 in sorted(child_sha1sums): # relpath3 = posixpath.join( # posixpath.dirname(relpath), relpath2) # old_sha1sums[relpath3] = child_sha1sums[relpath2] release_data = get(release["release-file"]).content with open(release_gpg_path, "wb") as fh: fh.write(release_gpg_data) done.add("Release") with open(release_path, "wb") as fh: fh.write(release_data) done.add("Release.gpg") gpg(["--verify", release_gpg_path, release_path]) new_sha1sums = get_release_sha1sums(release_data) new_sha1sums["Release.gpg"] = hashlib.sha1( release_gpg_data).hexdigest() new_sha1sums["Release"] = hashlib.sha1( release_data).hexdigest() # for relpath in sorted(new_sha1sums): # if posixpath.basename(relpath) == "Index": # if new_sha1sums[relpath] == old_sha1sums.get(relpath): # index_data = read_file(os.path.join(hg_path, relpath)) # else: # index_data = get( # posixpath.join( # posixpath.dirname(release["Release-File"]), # relpath)).content # sha1sum = hashlib.sha1(index_data).hexdigest() # if sha1sum != new_sha1sums[relpath]: # raise Exception("sha1sum mismatch for %r: " # "got %s expecting %s" # % (url, sha1sum, # new_sha1sums[relpath])) # index_path = os.path.join(hg_path, relpath) # if not os.path.exists(os.path.dirname(index_path)): # os.makedirs(os.path.dirname(index_path)) # with open(index_path, "wb") as fh: # fh.write(index_data) # done.add(relpath) # child_sha1sums = get_release_sha1sums(index_data) # for relpath2 in sorted(child_sha1sums): # relpath3 = posixpath.join( # posixpath.dirname(relpath), relpath2) # new_sha1sums[relpath3] = child_sha1sums[relpath2] for relpath in old_sha1sums: if relpath in new_sha1sums: continue file_path = os.path.join(hg_path, relpath) call(["rm", "-rf", "--one-file-system", file_path]) for relpath in new_sha1sums: if relpath in old_sha1sums: if new_sha1sums[relpath] == old_sha1sums[relpath]: continue if (relpath.endswith(".gz") and trim(relpath, suffix=".gz") in new_sha1sums): continue if (relpath.endswith(".bz2") and trim(relpath, suffix=".bz2") in new_sha1sums): continue if relpath in done: continue file_path = os.path.join(hg_path, relpath) file_data = get_release_file( posixpath.join( posixpath.dirname(release["release-file"]), relpath)) sha1sum = hashlib.sha1(file_data).hexdigest() if sha1sum != new_sha1sums[relpath]: raise Exception("sha1sum mismatch for %r: " "got %s expecting %s" % (url, sha1sum, new_sha1sums[relpath])) if not os.path.exists(os.path.dirname(file_path)): os.makedirs(os.path.dirname(file_path)) with open(file_path, "wb") as fh: fh.write(file_data) hg(["addremove"]) if len(read_lines(hg(["status"]))) > 0: hg(["commit", "-m", "Update from upstream"]) for branch in branches: if branch == "default" or branch in ok_branches: continue hg(["update", "--clean", branch]) hg(["commit", "--close-branch", "-m", "Closing unsupported release"])
def main(argv): parser = optparse.OptionParser(__doc__) parser.add_option("--install", dest="do_install", default=False, action="store_true") parser.add_option("--skip-proxy-install", dest="do_proxy_install", default=True, action="store_false") parser.add_option("--skip-blank-sources-list-install", dest="do_blank", default=True, action="store_false") parser.add_option("--proxy", dest="proxy", default="http://ubuntu.devel.cmedltd.com:3142/") parser.add_option("--no-proxy", dest="proxy", action="store_const", const=None) parser.add_option("--proxy-basename", dest="proxy_basename", default="02-generated-proxy") parser.add_option("--basename", dest="basename", default="generated") options, args = parser.parse_args(argv) custom_json = json.dumps({}) if len(args) > 0: custom_json = args.pop(0) if len(args) > 0: parser.error("Unexpected: %r" % (args,)) default_json = json.dumps(DEFAULT_CONFIG) defaults = json.loads(default_json) custom = json.loads(custom_json) for key, value in defaults.items(): custom.setdefault(key, value) if custom["distribution"] is None: custom["distribution"] = trim( call(["lsb_release", "--short", "--codename"]), suffix="\r\n") output = render_to_sources_list(custom) if options.do_install: assert "\0" not in options.basename and "/" not in options.basename,\ repr(options.basename) file_data = { "/etc/apt/sources.list.d/%s.list" % (options.basename,): output} if options.do_blank: file_data["/etc/apt/sources.list"] = BLANK_SOURCES if options.do_proxy_install: if options.proxy is None: proxy_data = "# No proxy\r\n" else: assert '"' not in options.proxy, repr(options.proxy) proxy_data = ('Acquire::HTTP { Proxy::"%s"; }\r\n' % (options.proxy,)) assert ("\0" not in options.proxy_basename and "/" not in options.proxy_basename), \ repr(options.proxy_basename) file_data["/etc/apt/apt.conf.d/%s" % (options.proxy_basename,)] = \ proxy_data child = subprocess.Popen(["sudo", "python", "-c", """\ assert __name__ == "__main__" import sys assert len(sys.argv) == 1 import json file_data = json.loads(sys.stdin.read()) for file_path, data in sorted(file_data.items()): fh = open(file_path, "wb") try: fh.write(data) finally: fh.close() """], stdin=subprocess.PIPE) child.communicate(json.dumps(file_data)) assert child.returncode == 0, child.returncode sys.stdout.write(output)
def resolve_document_using_git(git, docref): document = docref_to_dict(docref) kind = docref.kind id = docref.id get = lambda a: git_show(git, docref.name, a) if kind == "branches": branches = set() for line in read_lines(call(git + ["branch", "-a"])): if line.startswith(" "): line = trim(line, prefix=" ") elif line.startswith("* "): line = trim(line, prefix="* ") if line == "(no branch)": continue else: raise NotImplementedError(line) if " -> " in line: ba, bb = line.split(" -> ", 1) branches.add(posixpath.basename(ba)) branches.add(posixpath.basename(bb)) else: branches.add(posixpath.basename(line)) branches = list(sorted(b for b in branches if b != "HEAD")) document["branches"] = [] for branch in branches: document["branches"].append(docref_to_dict(BranchDocref(branch))) elif kind == "branch": sha = get1( read_lines( call(git + ["rev-parse", docref.name]))) document["commit"] = docref_to_dict(ShaDocRef("commit", sha)) elif kind == "commit": document.update( {"author": {"name": get("%an"), "email": get("%ae"), "date": get("%ai")}, "committer": {"name": get("%cn"), "email": get("%ce"), "date": get("%ci")}, "message": get("%B"), "tree": docref_to_dict(ShaDocRef("tree", get("%T"))), "parents": [], }) for p in sorted(get("%P").split(" ")): if p == "": continue document["parents"].append( docref_to_dict(ShaDocRef("commit", p))) elif kind == "tree": document["children"] = [] for line in read_lines(call(git + ["ls-tree", docref.name])): child_mode, child_kind, rest = line.split(" ", 2) child_sha, child_basename = rest.split("\t", 1) ref = {"child": docref_to_dict(ShaDocRef(child_kind, child_sha)), "basename": child_basename, "mode": octal_to_symbolic_mode(child_mode)} document["children"].append(ref) document["children"].sort(key=lambda a: a["child"]["sha"]) elif kind == "blob": blob = call(git + ["show", docref.name], do_crlf_fix=False) if is_text(blob): document["encoding"] = "raw" document["raw"] = blob else: document["encoding"] = "base64" document["base64"] = base64.b64encode(blob) else: raise NotImplementedError(kind) return document