Ejemplo n.º 1
0
def post_package_indexing(tempdir: Path, channel_name, subdirs, files,
                          packages):
    with get_db_manager() as db:
        # the most recent created key is fetched since we
        # cannot get `user_id` outside a request / API call.
        query = (db.query(db_models.RepodataSigningKey).filter(
            db_models.RepodataSigningKey.channel_name == channel_name,
        ).order_by(desc('time_created')).first())

        if query:
            for subdir in subdirs:
                repodata_folderpath = tempdir / channel_name / subdir

                RepoSigner(repodata_folderpath, query.private_key)

                with open(tempdir / channel_name / subdir /
                          "repodata_signed.json") as f:
                    repodata_signed = f.read()

                add_temp_static_file(
                    repodata_signed,
                    channel_name,
                    subdir,
                    "repodata_signed.json",
                    tempdir,
                    files,
                )
Ejemplo n.º 2
0
def post_package_indexing(tempdir: Path, channel_name, subdirs, files,
                          packages):
    with get_db_manager() as db:

        query = (
            db.query(PackageVersion).filter(
                PackageVersion.channel_name == channel_name,
                PackageVersion.package_name ==
                f"{channel_name}-repodata-patches",
                PackageVersion.version_order == 0,  # newest patch package
            ).order_by(PackageVersion.version.desc()))
        patches_pkg = query.one_or_none()

    if patches_pkg:
        filename = patches_pkg.filename
        fs = pkgstore.serve_path(channel_name, "noarch/" + filename)
        package_format = patches_pkg.package_format

        if package_format == PackageFormatEnum.tarbz2:
            extract_ = extract_from_tarfile
        else:
            extract_ = extract_from_conda

        with extract_(fs) as tar:

            for subdir in subdirs:
                packages[subdir] = {}
                path = f"{subdir}/patch_instructions.json"

                patch_instructions = _load_instructions(tar, path)

                with open(tempdir / channel_name / subdir /
                          "repodata.json") as fs:
                    repodata_str = fs.read()
                    repodata = json.loads(repodata_str)

                add_temp_static_file(
                    repodata_str,
                    channel_name,
                    subdir,
                    "repodata_from_packages.json",
                    tempdir,
                    files,
                )

                patch_repodata(repodata, patch_instructions)

                packages[subdir].update(repodata["packages"])
                packages[subdir].update(repodata["packages.conda"])

                patched_repodata_str = json.dumps(repodata)
                add_temp_static_file(
                    patched_repodata_str,
                    channel_name,
                    subdir,
                    "repodata.json",
                    tempdir,
                    files,
                )
Ejemplo n.º 3
0
def post_package_indexing(tempdir: Path, channel_name, subdirs, files,
                          packages):
    pins = {}
    for subdir in subdirs:
        with open(tempdir / channel_name / subdir / "repodata.json") as f:
            repodata = json.load(f)

        current_repodata = _build_current_repodata(subdir, repodata, pins)

        current_repodata_string = json.dumps(current_repodata,
                                             indent=2,
                                             sort_keys=True)

        add_temp_static_file(
            current_repodata_string,
            channel_name,
            subdir,
            "current_repodata.json",
            tempdir,
            files,
        )
Ejemplo n.º 4
0
def update_indexes(dao, pkgstore, channel_name, subdirs=None):
    jinjaenv = _jinjaenv()
    channeldata = channel_data.export(dao, channel_name)

    if subdirs is None:
        subdirs = sorted(channeldata["subdirs"], key=_subdir_key)

    # Generate channeldata.json and its compressed version
    chandata_json = json.dumps(channeldata, indent=2, sort_keys=False)
    add_static_file(chandata_json, channel_name, None, "channeldata.json",
                    pkgstore)

    # Generate index.html for the "root" directory
    channel_index = jinjaenv.get_template("channeldata-index.html.j2").render(
        title=channel_name,
        packages=channeldata["packages"],
        subdirs=subdirs,
        current_time=datetime.now(timezone.utc),
    )

    add_static_file(channel_index, channel_name, None, "index.html", pkgstore)

    # NB. No rss.xml is being generated here
    files = {}
    packages = {}
    subdir_template = jinjaenv.get_template("subdir-index.html.j2")

    tempdir = tempfile.TemporaryDirectory()
    tempdir_path = Path(tempdir.name)

    pm = quetz.config.get_plugin_manager()

    for sdir in subdirs:
        logger.debug(
            f"creating indexes for subdir {sdir} of channel {channel_name}")
        raw_repodata = repo_data.export(dao, channel_name, sdir)

        pm.hook.post_index_creation(
            raw_repodata=raw_repodata,
            channel_name=channel_name,
            subdir=sdir,
        )

        files[sdir] = []
        packages[sdir] = raw_repodata["packages"]

        repodata = json.dumps(raw_repodata, indent=2, sort_keys=False)

        add_temp_static_file(repodata, channel_name, sdir, "repodata.json",
                             tempdir_path, files)

    pm.hook.post_package_indexing(
        tempdir=tempdir_path,
        channel_name=channel_name,
        subdirs=subdirs,
        files=files,
        packages=packages,
    )

    for sdir in subdirs:
        # Generate subdir index.html
        subdir_index_html = subdir_template.render(
            title=f"{channel_name}/{sdir}",
            packages=packages[sdir],
            current_time=datetime.now(timezone.utc),
            add_files=files[sdir],
        )
        add_static_file(subdir_index_html, channel_name, sdir, "index.html",
                        pkgstore)

    # recursively walk through the tree
    tmp_suffix = uuid.uuid4().hex
    after_upload_move = []
    for path in tempdir_path.rglob('*.*'):
        rel_path = path.relative_to(tempdir_path)
        to_upload = open(path, 'rb')
        if len(rel_path.parts) == 2:
            channel_name, filename = rel_path.parts
            dest = f"{filename}{tmp_suffix}"
        elif len(rel_path.parts) == 3:
            channel_name, sdir, filename = rel_path.parts
            dest = f"{sdir}/{filename}{tmp_suffix}"
        else:
            raise NotImplementedError(
                "We can only handle channel_name/subdir/file.xyz OR"
                "channel_name/file.xyz")

        logger.debug(f"Uploading {path} -> {channel_name} {dest}")
        if type(pkgstore).__name__ == "S3Store":
            with pkgstore._get_fs() as fs:
                bucket = pkgstore._bucket_map(channel_name)
                fs.put_file(path, f"{bucket}/{dest}")
        else:
            pkgstore.add_file(to_upload.read(), channel_name, dest)

        after_upload_move.append(dest)

    for f_to_move in after_upload_move:
        logger.debug("Moving to final destination: "
                     f"{f_to_move} -> {f_to_move[:-len(tmp_suffix)]}")
        pkgstore.move_file(channel_name, f_to_move,
                           f_to_move[:-len(tmp_suffix)])

    tempdir.cleanup()