def post_channel(new_channel: rest_models.Channel, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules)): user_id = auth.assert_user() channel = dao.get_channel(new_channel.name) if channel: raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=f'Channel {new_channel.name} exists') dao.create_channel(new_channel, user_id, authorization.OWNER)
def reindex_packages_from_store( dao: Dao, config: Config, channel_name: str, user_id: bytes, ): """Reindex packages from files in the package store""" db = dao.db pkgstore = config.get_package_store() all_files = pkgstore.list_files(channel_name) pkg_files = [f for f in all_files if f.endswith(".tar.bz2")] channel = dao.get_channel(channel_name) if channel: for package in channel.packages: db.delete(package) db.commit() else: data = rest_models.Channel(name=channel_name, description="re-indexed from files", private=True) channel = dao.create_channel(data, user_id, authorization.OWNER) for fname in pkg_files: fid = pkgstore.serve_path(channel_name, fname) handle_file(channel_name, fname, fid, dao, user_id) update_indexes(dao, pkgstore, channel_name)
def package_version(db, user, channel_name, package_name, dao: Dao): channel_data = Channel(name=channel_name, private=False) package_data = Package(name=package_name) channel = dao.create_channel(channel_data, user.id, "owner") package = dao.create_package(channel_name, package_data, user.id, "owner") package_format = "tarbz2" package_info = "{}" version = dao.create_version( channel_name, package_name, package_format, "linux-64", "0.1", 0, "", "", package_info, user.id, ) yield version db.delete(version) db.delete(package) db.delete(channel) db.commit()
def public_channel(dao: Dao, user, channel_role, channel_name, db): channel_data = Channel(name=channel_name, private=False) channel = dao.create_channel(channel_data, user.id, channel_role) yield channel db.delete(channel) db.commit()
def public_channel(dao: Dao, user, channel_role): channel_name = "public-channel" channel_data = Channel(name=channel_name, private=False) channel = dao.create_channel(channel_data, user.id, channel_role) return channel
def create_user_with_identity( dao: Dao, provider: str, profile: 'base.UserProfile', default_role: Optional[str], default_channels: Optional[List[str]], ) -> User: username = profile["login"] user = dao.create_user_with_profile( username=username, provider=provider, emails=profile.get("emails", []), identity_id=profile["id"], name=profile["name"], avatar_url=profile["avatar_url"], role=default_role, exist_ok=False, ) if default_channels is not None: for channel_name in default_channels: i = 0 while (dao.db.query(Channel).filter( Channel.name == channel_name).one_or_none()): channel_name = f"{username}-{i}" i += 1 channel_meta = rest_models.Channel( name=channel_name, description=f"{username}'s default channel", private=True, ) dao.create_channel(channel_meta, user.id, OWNER) return user
def test_package_version(db, dao: Dao, user, channel_name, package_name): channel_data = Channel(name=channel_name, private=False) package_data = Package(name=package_name) dao.create_channel(channel_data, user.id, "owner") package = dao.create_package(channel_name, package_data, user.id, "owner") package_format = "tarbz2" package_info = "{}" versions = [ ("0.1.0", 0), ("1.0.0", 0), ("0.0.1", 0), ("0.0.2", 0), ("0.0.3", 0), ("1.0.0", 1), ("1.0.0", 2), ("0.1.0", 5), ("0.1.0", 2), ] for ver, build_str in versions: dao.create_version( channel_name, package_name, package_format, "linux-64", ver, build_str, "", f"{package_name}-{ver}-{build_str}.tar.bz2", package_info, user.id, size=0, ) res = dao.get_package_versions(package) res_versions = [(VersionOrder(x[0].version), x[0].build_number) for x in res] assert sorted(res_versions, reverse=True) == res_versions
def post_channel( new_channel: rest_models.Channel, background_tasks: BackgroundTasks, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), task: Task = Depends(get_tasks_worker), remote_session: requests.Session = Depends(get_remote_session), ): user_id = auth.assert_user() channel = dao.get_channel(new_channel.name) if channel: raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail=f"Channel {new_channel.name} exists", ) if not new_channel.mirror_channel_url: auth.assert_create_channel() is_mirror = new_channel.mirror_channel_url and new_channel.mirror_mode == "mirror" is_proxy = new_channel.mirror_channel_url and new_channel.mirror_mode == "proxy" if is_mirror: auth.assert_create_mirror_channel() if is_proxy: auth.assert_create_proxy_channel() if new_channel.metadata.actions is None: if is_mirror: actions = [ChannelActionEnum.synchronize] else: actions = [] else: actions = new_channel.metadata.actions channel = dao.create_channel(new_channel, user_id, authorization.OWNER) for action in actions: task.execute_channel_action(action, channel)
def post_channel( request: Request, new_channel: rest_models.Channel, background_tasks: BackgroundTasks, mirror_api_key: Optional[str] = None, register_mirror: bool = False, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), task: Task = Depends(get_tasks_worker), config=Depends(get_config), session: requests.Session = Depends(get_remote_session), ): user_id = auth.assert_user() existing_channel = dao.get_channel(new_channel.name) if existing_channel: raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail=f"Channel {new_channel.name} exists", ) if not new_channel.mirror_channel_url: auth.assert_create_channel() is_mirror = new_channel.mirror_channel_url and new_channel.mirror_mode == "mirror" is_proxy = new_channel.mirror_channel_url and new_channel.mirror_mode == "proxy" if is_mirror: auth.assert_create_mirror_channel() if is_proxy: auth.assert_create_proxy_channel() if new_channel.actions is None: if is_mirror: actions = [ChannelActionEnum.synchronize_repodata] else: actions = [] else: actions = new_channel.actions includelist = new_channel.metadata.includelist excludelist = new_channel.metadata.excludelist if includelist is not None and excludelist is not None: raise HTTPException( status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail="Cannot use both `includelist` and `excludelist` together.", ) user_attrs = new_channel.dict(exclude_unset=True) if "size_limit" in user_attrs: auth.assert_set_channel_size_limit() size_limit = new_channel.size_limit else: if config.configured_section("quotas"): size_limit = config.quotas_channel_quota else: size_limit = None channel = dao.create_channel(new_channel, user_id, authorization.OWNER, size_limit) pkgstore.create_channel(new_channel.name) indexing.update_indexes(dao, pkgstore, new_channel.name) # register mirror if is_mirror and register_mirror: mirror_url = str(new_channel.mirror_channel_url) mirror_url = mirror_url.replace("get", "api/channels") headers = {"x-api-key": mirror_api_key} if mirror_api_key else {} api_endpoint = str( request.url.replace(query=None)) + '/' + new_channel.name request.url response = session.post( mirror_url + '/mirrors', json={ "url": api_endpoint.replace("api/channels", "get"), "api_endpoint": api_endpoint, "metrics_endpoint": api_endpoint.replace("api", "metrics"), }, headers=headers, ) if response.status_code != 201: logger.warning( f"could not register mirror due to error {response.text}") for action in actions: task.execute_channel_action( action, channel, )
def reindex_packages_from_store(dao: Dao, config: Config, channel_name: str, user_id, sync: bool = True): """Reindex packages from files in the package store""" logger.debug(f"Re-indexing channel {channel_name}") channel = dao.get_channel(channel_name) pkg_db = [] if channel: if not sync: for package in channel.packages: dao.db.delete(package) dao.db.commit() else: dao.cleanup_channel_db(channel_name) for package in channel.packages: for pv in package.package_versions: # type: ignore pkg_db.append(f"{pv.platform}/{pv.filename}") dao.db.commit() else: data = rest_models.Channel(name=channel_name, description="re-indexed from store", private=True) channel = dao.create_channel(data, user_id, authorization.OWNER) logger.debug(f"Reading package list for channel {channel_name}") user_id = uuid_to_bytes(user_id) pkgstore = config.get_package_store() all_files = pkgstore.list_files(channel_name) pkg_files = [f for f in all_files if f.endswith(".tar.bz2")] nthreads = config.general_package_unpack_threads logger.debug( f"Found {len(pkg_db)} packages for channel {channel_name} in database") logger.debug( f"Found {len(pkg_files)} packages for channel {channel_name} in pkgstore" ) pkg_files = list(set(pkg_files) - set(pkg_db)) logger.debug( f"Importing {len(pkg_files)} packages for channel {channel_name}" + " from pkgstore") for pkg_group in chunks(pkg_files, nthreads * 8): tic = time.perf_counter() with ThreadPoolExecutor(max_workers=nthreads) as executor: results = [] for fname in pkg_group: results.append( executor.submit(handle_condainfo, pkgstore, channel_name, fname)) for future in as_completed(results): condainfo = future.result() if condainfo: handle_file(channel_name, condainfo, dao, user_id) toc = time.perf_counter() logger.debug( f"Imported files {pkg_group[0]} to {pkg_group[-1]} " + f"for channel {channel_name} in {toc - tic:0.4f} seconds " + f"using {nthreads} threads") try: update_indexes(dao, pkgstore, channel_name) dao.db.commit() except IntegrityError: dao.rollback() logger.error(f"Update index {channel_name} failed") dao.cleanup_channel_db(channel_name) dao.db.commit()