Exemple #1
0
    def create_channel(
        self,
        data: rest_models.Channel,
        user_id: Optional[bytes],
        role: Optional[str],
        size_limit: Optional[int] = None,
    ):
        if '_' in data.name:
            raise errors.ValidationError(
                "_ should not be used in channel name")
        if not data.name.isascii():
            raise errors.ValidationError(
                "only ASCII characters should be used in channel name")

        channel = Channel(
            name=data.name,
            description=data.description,
            mirror_channel_url=data.mirror_channel_url,
            mirror_mode=data.mirror_mode,
            private=data.private,
            channel_metadata=json.dumps(data.metadata.__dict__),
            size_limit=size_limit,
        )

        self.db.add(channel)

        if role and user_id:
            member = ChannelMember(channel=channel, user_id=user_id, role=role)
            self.db.add(member)

        self.db.commit()

        return channel
Exemple #2
0
    def create_job(self, user_id, function_name, items_spec):

        paths = function_name.split(":")

        if len(paths) == 2:
            plugin_name, job_name = paths
            entry_points = list(
                pkg_resources.iter_entry_points('quetz.jobs', plugin_name))
            if not entry_points:
                raise errors.ValidationError(
                    f"invalid function {function_name}: "
                    f"plugin {plugin_name} not installed")
            job_module = entry_points[0].load()
            try:
                job_function = getattr(job_module, job_name)
            except AttributeError:
                raise errors.ValidationError(
                    f"invalid function '{job_name}' name in plugin '{plugin_name}'"
                )
        elif len(paths) == 1:
            raise errors.ValidationError(
                f"invalid function {function_name}: no such built-in function,"
                " please provide plugin name")
        else:
            raise errors.ValidationError(
                f"invalid function {function_name} - could not parse")

        serialized = pickle.dumps(job_function)
        job = Job(
            owner_id=user_id,
            manifest=serialized,
            items_spec=items_spec,
        )
        self.db.add(job)
        self.db.commit()
        return job
Exemple #3
0
def handle_package_files(
    channel,
    files,
    dao,
    auth,
    force,
    package=None,
    is_mirror_op=False,
):
    user_id = auth.assert_user()

    # quick fail if not allowed to upload
    # note: we're checking later that `parts[0] == conda_info.package_name`
    total_size = 0
    for file in files:
        parts = file.filename.rsplit("-", 2)
        if len(parts) != 3:
            raise HTTPException(
                status_code=status.HTTP_400_BAD_REQUEST,
                detail=f"package file name has wrong format {file.filename}",
            )
        else:
            package_name = parts[0]
        auth.assert_upload_file(channel.name, package_name)
        if force:
            auth.assert_overwrite_package_version(channel.name, package_name)

        # workaround for https://github.com/python/cpython/pull/3249
        if type(file.file) is SpooledTemporaryFile and not hasattr(
                file, "seekable"):
            file.file.seekable = file.file._file.seekable

        file.file.seek(0, os.SEEK_END)
        size = file.file.tell()
        total_size += size
        file.file.seek(0)

    dao.assert_size_limits(channel.name, total_size)

    channel_proxylist = []
    if channel.mirror_mode:
        if not is_mirror_op:
            raise HTTPException(
                status_code=status.HTTP_400_BAD_REQUEST,
                detail="Cannot upload packages to mirror channel",
            )
        else:
            channel_proxylist = json.loads(channel.channel_metadata).get(
                'proxylist', [])

    pkgstore.create_channel(channel.name)
    nthreads = config.general_package_unpack_threads
    with ThreadPoolExecutor(max_workers=nthreads) as executor:
        try:
            conda_infos = [
                ci for ci in executor.map(
                    _extract_and_upload_package,
                    files,
                    (channel.name, ) * len(files),
                    (channel_proxylist, ) * len(files),
                )
            ]
        except exceptions.PackageError as e:
            raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
                                detail=e.detail)

    conda_infos = [ci for ci in conda_infos if ci is not None]

    for file, condainfo in zip(files, conda_infos):
        logger.debug(f"Handling {condainfo.info['name']} -> {file.filename}")
        package_type = "tar.bz2" if file.filename.endswith(
            ".tar.bz2") else "conda"
        UPLOAD_COUNT.labels(
            channel=channel.name,
            platform=condainfo.info["subdir"],
            package_name=condainfo.info["name"],
            version=condainfo.info["version"],
            package_type=package_type,
        ).inc()

        package_name = condainfo.info["name"]
        parts = file.filename.rsplit("-", 2)

        # check that the filename matches the package name
        # TODO also validate version and build string
        if parts[0] != condainfo.info["name"]:
            raise HTTPException(
                status_code=status.HTTP_400_BAD_REQUEST,
                detail="filename does not match package name",
            )
        if package and (parts[0] != package.name
                        or package_name != package.name):
            raise HTTPException(
                status_code=status.HTTP_400_BAD_REQUEST,
                detail=(
                    f"requested package endpoint '{package.name}'"
                    f"does not match the uploaded package name '{parts[0]}'"),
            )

        def _delete_file(condainfo, filename):
            dest = os.path.join(condainfo.info["subdir"], file.filename)
            pkgstore.delete_file(channel.name, dest)

        if not package and not dao.get_package(channel.name, package_name):

            try:
                if not channel_proxylist or package_name not in channel_proxylist:
                    pm.hook.validate_new_package(
                        channel_name=channel.name,
                        package_name=package_name,
                        file_handler=file.file,
                        condainfo=condainfo,
                    )
                    # validate uploaded package size and existence
                    try:
                        pkgsize, _, _ = pkgstore.get_filemetadata(
                            channel.name,
                            f"{condainfo.info['subdir']}/{file.filename}")
                        if pkgsize != condainfo.info['size']:
                            raise errors.ValidationError(
                                f"Uploaded package {file.filename} "
                                "file size is wrong! Deleting")
                    except FileNotFoundError:
                        raise errors.ValidationError(
                            f"Uploaded package {file.filename} "
                            "file did not upload correctly!")

                package_data = rest_models.Package(
                    name=package_name,
                    summary=str(condainfo.about.get("summary", "n/a")),
                    description=str(condainfo.about.get("description", "n/a")),
                )
            except pydantic.main.ValidationError as err:
                _delete_file(condainfo, file.filename)
                raise errors.ValidationError(
                    "Validation Error for package: " +
                    f"{channel.name}/{file.filename}: {str(err)}")
            except errors.ValidationError as err:
                _delete_file(condainfo, file.filename)
                logger.error(
                    f"Validation error in: {channel.name}/{file.filename}: {str(err)}"
                )
                raise err

            dao.create_package(
                channel.name,
                package_data,
                user_id,
                authorization.OWNER,
            )

        # Update channeldata info
        dao.update_package_channeldata(channel.name, package_name,
                                       condainfo.channeldata)

        try:
            version = dao.create_version(
                channel_name=channel.name,
                package_name=package_name,
                package_format=condainfo.package_format,
                platform=condainfo.info["subdir"],
                version=condainfo.info["version"],
                build_number=condainfo.info["build_number"],
                build_string=condainfo.info["build"],
                size=condainfo.info["size"],
                filename=file.filename,
                info=json.dumps(condainfo.info),
                uploader_id=user_id,
                upsert=force,
            )
        except IntegrityError:
            logger.error(
                f"duplicate package '{package_name}' in channel '{channel.name}'"
            )
            raise HTTPException(status_code=status.HTTP_409_CONFLICT,
                                detail="Duplicate")

        pm.hook.post_add_package_version(version=version, condainfo=condainfo)