Ejemplo n.º 1
0
def reindex_packages_from_store(
    dao: Dao,
    config: Config,
    channel_name: str,
    user_id: bytes,
):
    """Reindex packages from files in the package store"""

    db = dao.db

    pkgstore = config.get_package_store()

    all_files = pkgstore.list_files(channel_name)
    pkg_files = [f for f in all_files if f.endswith(".tar.bz2")]

    channel = dao.get_channel(channel_name)

    if channel:
        for package in channel.packages:
            db.delete(package)
        db.commit()
    else:
        data = rest_models.Channel(name=channel_name,
                                   description="re-indexed from files",
                                   private=True)
        channel = dao.create_channel(data, user_id, authorization.OWNER)

    for fname in pkg_files:
        fid = pkgstore.serve_path(channel_name, fname)
        handle_file(channel_name, fname, fid, dao, user_id)
    update_indexes(dao, pkgstore, channel_name)
Ejemplo n.º 2
0
def test_channel_with_huge_size_limit(dao, user, db):
    channel_data = rest_models.Channel(name="my-channel",
                                       private=False,
                                       size_limit=1000000000000000000)
    channel = dao.create_channel(channel_data, user.id, "owner",
                                 1000000000000000000)
    del channel
Ejemplo n.º 3
0
def channel(dao, db, user, channel_name):

    channel_data = rest_models.Channel(name=channel_name, private=False)
    channel = dao.create_channel(channel_data, user.id, "owner")
    yield channel

    db.delete(channel)
    db.commit()
Ejemplo n.º 4
0
def channel(dao, db, user, channel_name):

    channel_data = rest_models.Channel(name=channel_name, private=False)
    channel = dao.create_channel(channel_data, user.id, "owner")
    yield channel

    try:
        db.delete(channel)
        db.commit()
    except ObjectDeletedError:
        pass
Ejemplo n.º 5
0
def user_with_channel(dao, db):
    channel_data = rest_models.Channel(name="new-test-channel", private=False)

    user = dao.create_user_with_role("new-user")
    user_id = user.id
    channel = dao.create_channel(channel_data, user_id, "owner")
    db.commit()

    yield user_id
    db.delete(channel)
    db.delete(user)
    db.commit()
Ejemplo n.º 6
0
def channel(dao, user, db):
    channel_data = rest_models.Channel(
        name="test_channel",
        private=False,
    )

    channel = dao.create_channel(channel_data, user.id, "owner")

    yield channel

    db.delete(channel)
    db.commit()
Ejemplo n.º 7
0
def channel(dao, db, user, channel_role):
    channel_data = rest_models.Channel(
        name="test-channel",
        private=False,
    )

    if channel_role:
        c = dao.create_channel(channel_data,
                               user_id=user.id,
                               role=channel_role)
    else:
        c = dao.create_channel(channel_data)

    yield c
Ejemplo n.º 8
0
def mirror_channel(dao, user, db):

    channel_data = rest_models.Channel(
        name="test_mirror_channel",
        private=False,
        mirror_channel_url="http://host",
        mirror_mode="mirror",
    )

    channel = dao.create_channel(channel_data, user.id, "owner")

    yield channel

    db.delete(channel)
    db.commit()
Ejemplo n.º 9
0
def create_user_with_identity(
    dao: Dao,
    provider: str,
    profile: 'base.UserProfile',
    default_role: Optional[str],
    default_channels: Optional[List[str]],
) -> User:

    username = profile["login"]
    user = dao.create_user_with_profile(
        username=username,
        provider=provider,
        emails=profile.get("emails", []),
        identity_id=profile["id"],
        name=profile["name"],
        avatar_url=profile["avatar_url"],
        role=default_role,
        exist_ok=False,
    )

    if default_channels is not None:

        for channel_name in default_channels:

            i = 0

            while (dao.db.query(Channel).filter(
                    Channel.name == channel_name).one_or_none()):

                channel_name = f"{username}-{i}"

                i += 1

            channel_meta = rest_models.Channel(
                name=channel_name,
                description=f"{username}'s default channel",
                private=True,
            )

            dao.create_channel(channel_meta, user.id, OWNER)

    return user
Ejemplo n.º 10
0
def test_create_channel_with_invalid_name(dao, user, db):
    with pytest.raises(errors.ValidationError):
        channel_data = rest_models.Channel(name="my_channel", private=False)
        channel = dao.create_channel(channel_data, user.id, "owner")
        del channel
Ejemplo n.º 11
0
def reindex_packages_from_store(dao: Dao,
                                config: Config,
                                channel_name: str,
                                user_id,
                                sync: bool = True):
    """Reindex packages from files in the package store"""

    logger.debug(f"Re-indexing channel {channel_name}")

    channel = dao.get_channel(channel_name)
    pkg_db = []
    if channel:
        if not sync:
            for package in channel.packages:
                dao.db.delete(package)
            dao.db.commit()
        else:
            dao.cleanup_channel_db(channel_name)
            for package in channel.packages:
                for pv in package.package_versions:  # type: ignore
                    pkg_db.append(f"{pv.platform}/{pv.filename}")
            dao.db.commit()
    else:
        data = rest_models.Channel(name=channel_name,
                                   description="re-indexed from store",
                                   private=True)
        channel = dao.create_channel(data, user_id, authorization.OWNER)

    logger.debug(f"Reading package list for channel {channel_name}")
    user_id = uuid_to_bytes(user_id)
    pkgstore = config.get_package_store()
    all_files = pkgstore.list_files(channel_name)
    pkg_files = [f for f in all_files if f.endswith(".tar.bz2")]
    nthreads = config.general_package_unpack_threads

    logger.debug(
        f"Found {len(pkg_db)} packages for channel {channel_name} in database")
    logger.debug(
        f"Found {len(pkg_files)} packages for channel {channel_name} in pkgstore"
    )

    pkg_files = list(set(pkg_files) - set(pkg_db))
    logger.debug(
        f"Importing {len(pkg_files)} packages for channel {channel_name}" +
        " from pkgstore")

    for pkg_group in chunks(pkg_files, nthreads * 8):
        tic = time.perf_counter()
        with ThreadPoolExecutor(max_workers=nthreads) as executor:
            results = []
            for fname in pkg_group:
                results.append(
                    executor.submit(handle_condainfo, pkgstore, channel_name,
                                    fname))
            for future in as_completed(results):
                condainfo = future.result()
                if condainfo:
                    handle_file(channel_name, condainfo, dao, user_id)

        toc = time.perf_counter()
        logger.debug(
            f"Imported files {pkg_group[0]} to {pkg_group[-1]} " +
            f"for channel {channel_name} in {toc - tic:0.4f} seconds " +
            f"using {nthreads} threads")

        try:
            update_indexes(dao, pkgstore, channel_name)
            dao.db.commit()
        except IntegrityError:
            dao.rollback()
            logger.error(f"Update index {channel_name} failed")
    dao.cleanup_channel_db(channel_name)
    dao.db.commit()