Beispiel #1
0
def reindex_packages_from_store(dao: Dao,
                                config: Config,
                                channel_name: str,
                                user_id,
                                sync: bool = True):
    """Reindex packages from files in the package store"""

    logger.debug(f"Re-indexing channel {channel_name}")

    channel = dao.get_channel(channel_name)
    pkg_db = []
    if channel:
        if not sync:
            for package in channel.packages:
                dao.db.delete(package)
            dao.db.commit()
        else:
            dao.cleanup_channel_db(channel_name)
            for package in channel.packages:
                for pv in package.package_versions:  # type: ignore
                    pkg_db.append(f"{pv.platform}/{pv.filename}")
            dao.db.commit()
    else:
        data = rest_models.Channel(name=channel_name,
                                   description="re-indexed from store",
                                   private=True)
        channel = dao.create_channel(data, user_id, authorization.OWNER)

    logger.debug(f"Reading package list for channel {channel_name}")
    user_id = uuid_to_bytes(user_id)
    pkgstore = config.get_package_store()
    all_files = pkgstore.list_files(channel_name)
    pkg_files = [f for f in all_files if f.endswith(".tar.bz2")]
    nthreads = config.general_package_unpack_threads

    logger.debug(
        f"Found {len(pkg_db)} packages for channel {channel_name} in database")
    logger.debug(
        f"Found {len(pkg_files)} packages for channel {channel_name} in pkgstore"
    )

    pkg_files = list(set(pkg_files) - set(pkg_db))
    logger.debug(
        f"Importing {len(pkg_files)} packages for channel {channel_name}" +
        " from pkgstore")

    for pkg_group in chunks(pkg_files, nthreads * 8):
        tic = time.perf_counter()
        with ThreadPoolExecutor(max_workers=nthreads) as executor:
            results = []
            for fname in pkg_group:
                results.append(
                    executor.submit(handle_condainfo, pkgstore, channel_name,
                                    fname))
            for future in as_completed(results):
                condainfo = future.result()
                if condainfo:
                    handle_file(channel_name, condainfo, dao, user_id)

        toc = time.perf_counter()
        logger.debug(
            f"Imported files {pkg_group[0]} to {pkg_group[-1]} " +
            f"for channel {channel_name} in {toc - tic:0.4f} seconds " +
            f"using {nthreads} threads")

        try:
            update_indexes(dao, pkgstore, channel_name)
            dao.db.commit()
        except IntegrityError:
            dao.rollback()
            logger.error(f"Update index {channel_name} failed")
    dao.cleanup_channel_db(channel_name)
    dao.db.commit()
Beispiel #2
0
def cleanup_channel_db(dao: Dao, channel_name: str, dry_run: bool):
    dao.cleanup_channel_db(channel_name, dry_run)