Esempio n. 1
0
def fetch_store_path(path,
                     dest_file,
                     binary_cache=DEFAULT_BINARY_CACHE_URL,
                     tmp_dir=os.getcwd()):
    if not path.startswith("/nix/store/"):
        raise Exception("path not valid")
    ni = NarInfo(fetch_file_from_cache(nar_info_from_path(path)))

    with tempfile.TemporaryDirectory(dir=tmp_dir) as temp_dir:
        with ccd(temp_dir):
            nar_location = os.path.join(temp_dir,
                                        os.path.basename(ni.d['URL']))
            download_file_from_cache(ni.d['URL'], nar_location, binary_cache)
            assert (os.path.isfile(nar_location))
            if ni.d['Compression'] == 'xz' and nar_location.endswith("xz"):
                nar_extract_location = ".".join(nar_location.split(".")[:-1])
                with lzma.open(nar_location) as n:
                    with open(nar_extract_location, "wb") as ne:
                        ne.write(n.read())
            elif ni.d['Compression'] == 'bzip2' and nar_location.endswith(
                    "bz2"):
                nar_extract_location = ".".join(nar_location.split(".")[:-1])
                with bz2.open(nar_location) as n:
                    with open(nar_extract_location, "wb") as ne:
                        ne.write(n.read())
            else:
                nar_extract_location = nar_location
            path_in_nar = '/'.join([''] + path.split('/')[4:])
            subprocess.run("nix cat-nar {} {} > {}".format(
                nar_extract_location, path_in_nar, dest_file),
                           shell=True)
            assert (os.path.isfile(dest_file))
def create_channel_release(channel, hydra, project, jobset, job, cache, outdir, tmpdir, target_cache=None):
    release_info = ReleaseInfo(fetch_release_info(hydra, project, jobset, job))
    k = Karkinos(hydra, release_info.eval_id)
    eval_info = EvalInfo(k.fetch_eval_info())
    store_paths = k.fetch_store_paths()
    files_cache = os.path.join(outdir, "nixos-files.sqlite")

    out_dir = os.path.abspath(os.path.join(outdir, channel, release_info.name))
    tmp_dir = os.path.abspath(tmpdir)
    assert(os.path.isdir(tmp_dir))

    if os.path.isfile(os.path.join(out_dir, 'git-revision')):
        return out_dir

    os.makedirs(out_dir, exist_ok=True)
    with open(os.path.join(out_dir, "src-url"), "w") as f:
        f.write(k.eval_url)

    if target_cache == None:
        with open(os.path.join(out_dir, "binary-cache-url"), "w") as f:
            f.write(cache)
    else:
        with open(os.path.join(out_dir, "binary-cache-url"), "w") as f:
            f.write(target_cache)

    with open(os.path.join(out_dir, 'store-paths'), 'w') as f:
        f.write("\n".join(set(store_paths)))

    with lzma.open(os.path.join(out_dir, 'store-paths.xz'), 'w') as f:
        f.write("\n".join(set(store_paths)).encode('utf-8'))

    if channel.startswith('nixos'):
        k.download_file('nixos.channel', out_dir, 'nixexprs.tar.xz', tmp_dir=tmp_dir)
        k.download_file('nixos.iso_minimal.x86_64-linux', out_dir, tmp_dir=tmp_dir)
        if not channel.endswith('-small'):
            k.download_file('nixos.iso_minimal.i686-linux', out_dir, tmp_dir=tmp_dir)
            k.download_file('nixos.iso_graphical.x86_64-linux', out_dir, tmp_dir=tmp_dir)
            k.download_file('nixos.ova.x86_64-linux', out_dir, tmp_dir=tmp_dir)
    else:
        k.download_file('tarball', out_dir, 'nixexprs.tar.gz', tmp_dir=tmp_dir)

    if channel.startswith('nixos'):
        nixexpr_tar = os.path.join(out_dir, 'nixexprs.tar.xz')
        with tarfile.open(nixexpr_tar, "r:xz") as nixexpr:
            if any([s for s in nixexpr.getnames() if 'programs.sqlite' in s]):
                contains_programs = True
            else:
                contains_programs = False

        if not contains_programs:
            with tempfile.TemporaryDirectory() as temp_dir:
                nixexpr = tarfile.open(nixexpr_tar, 'r:xz')
                nixexpr.extractall(temp_dir)
                nixexpr.close()

                expr_dir = os.path.join(temp_dir, os.listdir(temp_dir)[0])

                try:
                    subprocess.check_call('generate-programs-index {} {} {} {} {}'.format(
                                    files_cache,
                                    os.path.join(expr_dir, 'programs.sqlite'),
                                    cache,
                                    os.path.join(out_dir, 'store-paths'),
                                    os.path.join(expr_dir,'nixpkgs')),
                                    shell=True)
                    os.remove(os.path.join(expr_dir, 'programs.sqlite-journal'))
                    os.remove(nixexpr_tar)
                    nixexpr = tarfile.open(nixexpr_tar, 'w:xz')
                    with ccd(temp_dir):
                        nixexpr.add(os.listdir()[0])
                    nixexpr.close()
                except(subprocess.CalledProcessError):
                    print("Could not execute {}".format("generate-programs-index"))

    with open(os.path.join(out_dir, "git-revision"), "w") as f:
        f.write(eval_info.git_rev)
    return out_dir
Esempio n. 3
0
def mirror_tarballs(target_dir,
                    tmp_dir,
                    git_repo,
                    git_revision,
                    concurrent=DEFAULT_CONCURRENT_DOWNLOADS):
    global failed_entries
    global download_queue
    create_mirror_dirs(target_dir, git_revision)
    download_queue = queue.Queue()
    threads = []
    repo_path = os.path.join(tmp_dir, "nixpkgs")
    os.makedirs(repo_path, exist_ok=True)
    with ccd(repo_path):
        exists = False
        try:
            repo = Repository(os.path.join(repo_path, ".git"))
            repo.remotes["origin"].fetch()
            exists = True
        except:
            pass
        if not exists:
            repo = clone_repository(git_repo, repo_path)
        repo.reset(git_revision, GIT_RESET_HARD)
        with ccd(repo.workdir):
            success = False
            env = os.environ.copy()
            env["NIX_PATH"] = "nixpkgs={}".format(repo.workdir)
            for expr in NIX_EXPRS:
                res = subprocess.run(nix_instantiate_cmd(expr),
                                     shell=True,
                                     stdout=subprocess.PIPE,
                                     env=env)
                if res.returncode != 0:
                    print("nix instantiate failed!")
                else:
                    success = True
                    break
            if success is False:
                return "fatal: all nix instantiate processes failed!"
            output = json.loads(res.stdout.decode('utf-8').strip())
    #    with open(os.path.join(target_dir, "tars.json"), "w") as f:
    #        f.write(json.dumps(output))
    #with open(os.path.join(target_dir, "tars.json"), "r") as f:
    #    output = json.loads(f.read())
    for idx, entry in enumerate(output):
        if not (len(
            [x
             for x in VALID_URL_SCHEMES if entry['url'].startswith(x)]) == 1):
            append_failed_entry(entry)
            print("url {} is not in the supported url schemes.".format(
                entry['url']))
            continue
        elif (len(check_presence(target_dir, entry['hash']))
              or len(check_presence(target_dir, entry['name']))):
            print("url {} already mirrored".format(entry['url']))
            continue
        else:
            download_queue.put(entry)
    for i in range(concurrent):
        t = threading.Thread(target=download_worker,
                             args=(
                                 target_dir,
                                 git_revision,
                                 repo.workdir,
                             ))
        threads.append(t)
        t.start()
    download_queue.join()
    for i in range(concurrent):
        download_queue.put(None)
    for t in threads:
        t.join()
    log = "########################\n"
    log += "SUMMARY OF FAILED FILES:\n"
    log += "########################\n"
    for entry in failed_entries:
        log += "url:{}, name:{}\n".format(entry['url'], entry['name'])
    with open(os.path.join(target_dir, "revisions", git_revision, "log"),
              "w") as f:
        f.write(log)
    return log