def get_api_keys(dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules)): """Get API keys for current user""" user_id = auth.assert_user() api_key_list = dao.get_package_api_keys(user_id) api_channel_key_list = dao.get_channel_api_keys(user_id) from itertools import groupby return [ rest_models.ApiKey( key=api_key.key, description=api_key.description, roles=[ rest_models.CPRole( channel=member.channel_name, package=member.package_name if hasattr( member, "package_name") else None, role=member.role, ) for member, api_key in member_key_list ], ) for api_key, member_key_list in groupby( [*api_key_list, *api_channel_key_list], lambda member_api_key: member_api_key[1], ) ]
def package_version(db, user, channel_name, package_name, dao: Dao): channel_data = Channel(name=channel_name, private=False) package_data = Package(name=package_name) channel = dao.create_channel(channel_data, user.id, "owner") package = dao.create_package(channel_name, package_data, user.id, "owner") package_format = "tarbz2" package_info = "{}" version = dao.create_version( channel_name, package_name, package_format, "linux-64", "0.1", 0, "", "", package_info, user.id, ) yield version db.delete(version) db.delete(package) db.delete(channel) db.commit()
def create_packages_from_channeldata( channel_name: str, user_id: bytes, channeldata: dict, dao: Dao ): packages = channeldata.get("packages", {}) for package_name, metadata in packages.items(): description = metadata.get("description", "") summary = metadata.get("summary", "") package_data = rest_models.Package( name=package_name, summary=summary, description=description, ) try: package = dao.create_package( channel_name, package_data, user_id, role=authorization.OWNER ) except DBError: # package already exists so skip it so we retrieve and update it package = dao.get_package(channel_name, package_name) package.description = description package.summary = summary package.url = metadata.get("home", "") package.platforms = ":".join(metadata.get("subdirs", [])) package.channeldata = json.dumps(metadata) dao.db.commit()
def delete_package_version( platform: str, filename: str, channel_name: str, package_name: str, dao: Dao = Depends(get_dao), db=Depends(get_db), auth: authorization.Rules = Depends(get_rules), ): version = dao.get_package_version_by_filename(channel_name, package_name, filename, platform) auth.assert_package_delete(version.package) if not version: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"package version {platform}/{filename} not found", ) db.delete(version) db.commit() path = os.path.join(platform, filename) pkgstore.delete_file(channel_name, path) dao.update_channel_size(channel_name)
def create_version_from_metadata( channel_name: str, user_id: bytes, package_file_name: str, package_data: dict, dao: Dao, ): package_name = package_data["name"] package = dao.get_package(channel_name, package_name) if not package: package_info = rest_models.Package( name=package_name, summary=package_data.get("summary", ""), description=package_data.get("description", ""), ) dao.create_package(channel_name, package_info, user_id, "owner") pkg_format = "tarbz2" if package_file_name.endswith(".tar.bz2") else ".conda" version = dao.create_version( channel_name, package_name, pkg_format, get_subdir_compat(package_data), package_data["version"], int(package_data["build_number"]), package_data["build"], package_file_name, json.dumps(package_data), user_id, package_data["size"], ) return version
def check_for_signed_tos(db, user_id, user_role): dao = Dao(db) user = dao.get_user(user_id) if user: if user_role == OWNER: return True else: selected_tos = (db.query(TermsOfService).order_by( TermsOfService.time_created.desc()).first()) if selected_tos: signature = (db.query(TermsOfServiceSignatures).filter( TermsOfServiceSignatures.user_id == user_id).filter( TermsOfServiceSignatures.tos_id == selected_tos.id).one_or_none()) if signature: return True else: detail = f"terms of service is not signed for {user.username}" raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=detail) else: return True else: detail = f"user with id {user_id} not found" raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=detail)
def _check_timestamp(channel: Channel, dao: Dao): """context manager for comparing the package timestamp last synchroninsation timestamp saved in quetz database.""" last_synchronization = channel.timestamp_mirror_sync last_timestamp = 0 def _func(package_name, metadata): if "time_modified" not in metadata: return None # use nonlocal to be able to modified last_timestamp in the # outer scope nonlocal last_timestamp time_modified = metadata["time_modified"] last_timestamp = max(time_modified, last_timestamp) # if channel was never synchronised we can't determine # whether the package is up-to-date from the timestamp is_uptodate = ( time_modified <= last_synchronization if last_synchronization else None ) if is_uptodate is not None: logger.debug(f"comparing synchronisation timestamps of {package_name}") return is_uptodate yield _func # after we are done, we need to update the last_synchronisation # in the db sync_timestamp = max(last_synchronization, last_timestamp) dao.update_channel(channel.name, {"timestamp_mirror_sync": sync_timestamp})
def reindex_packages_from_store( dao: Dao, config: Config, channel_name: str, user_id: bytes, ): """Reindex packages from files in the package store""" db = dao.db pkgstore = config.get_package_store() all_files = pkgstore.list_files(channel_name) pkg_files = [f for f in all_files if f.endswith(".tar.bz2")] channel = dao.get_channel(channel_name) if channel: for package in channel.packages: db.delete(package) db.commit() else: data = rest_models.Channel(name=channel_name, description="re-indexed from files", private=True) channel = dao.create_channel(data, user_id, authorization.OWNER) for fname in pkg_files: fid = pkgstore.serve_path(channel_name, fname) handle_file(channel_name, fname, fid, dao, user_id) update_indexes(dao, pkgstore, channel_name)
def test_get_package_version_metrics_intervals(dao: Dao, channel, db, package_version, interval): now = datetime.datetime(2020, 10, 1, 10, 1, 10) dao.incr_download_count(channel.name, package_version.filename, package_version.platform, timestamp=now) metrics = dao.get_package_version_metrics(package_version.id, interval, "download") timestamp_interval = round_timestamp(now, interval) metrics_dict = [(m.timestamp, m.count) for m in metrics] assert metrics_dict == [(timestamp_interval, 1)] end = timestamp_interval.replace(year=2021) metrics = dao.get_package_version_metrics( package_version.id, interval, "download", start=timestamp_interval, end=end, fill_zeros=True, ) metrics_dict = [(m.timestamp, m.count) for m in metrics] assert metrics_dict[0] == (timestamp_interval, 1) assert metrics_dict[-1] == (end, 0)
def delete_channel_mirror( channel_name: str, mirror_id: str, channel: db_models.Channel = Depends(get_channel_or_fail), auth: authorization.Rules = Depends(get_rules), dao: Dao = Depends(get_dao), ): auth.assert_unregister_mirror(channel_name) dao.delete_channel_mirror(channel_name, mirror_id)
def delete_user( username: str, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), ): user = dao.get_user_by_username(username) auth.assert_delete_user(user.id) dao.delete_user(user.id)
def post_api_key(api_key: rest_models.BaseApiKey, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules)): auth.assert_create_api_key_roles(api_key.roles) user_id = auth.assert_user() key = secrets.token_urlsafe(32) dao.create_api_key(user_id, api_key, key)
def post_api_key( api_key: rest_models.BaseApiKey, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), ): auth.assert_create_api_key_roles(api_key.roles) user_id = auth.assert_user() key = generate_random_key(32) dao.create_api_key(user_id, api_key, key) user_role_keys, custom_role_keys = dao.get_api_keys_with_members( user_id, key) if len(user_role_keys) > 0: key = user_role_keys[0] return rest_models.ApiKey( key=key.key, description=key.description, time_created=key.time_created, expire_at=key.expire_at, roles=None, ) else: key = custom_role_keys[0][0] package_member = custom_role_keys[0][1] channel_member = custom_role_keys[0][2] roles = [] if package_member: roles.append( CPRole( channel=package_member.channel_name, package=package_member.package_name, role=package_member.role, )) if channel_member: roles.append( CPRole( channel=channel_member.channel_name, package=None, role=channel_member.role, )) return rest_models.ApiKey( key=key.key, description=key.description, time_created=key.time_created, expire_at=key.expire_at, roles=roles, )
def delete_channel( channel: db_models.Channel = Depends(get_channel_allow_proxy), dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), ): auth.assert_delete_channel(channel) dao.delete_channel(channel.name) files = pkgstore.list_files(channel.name) for f in files: pkgstore.delete_file(channel.name, destination=f)
def post_channel(new_channel: rest_models.Channel, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules)): user_id = auth.assert_user() channel = dao.get_channel(new_channel.name) if channel: raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=f'Channel {new_channel.name} exists') dao.create_channel(new_channel, user_id, authorization.OWNER)
def post_package(new_package: rest_models.Package, channel: db_models.Channel = Depends(get_channel_or_fail), auth: authorization.Rules = Depends(get_rules), dao: Dao = Depends(get_dao)): user_id = auth.assert_user() package = dao.get_package(channel.name, new_package.name) if package: raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail=f'Package {channel.name}/{new_package.name} exists') dao.create_package(channel.name, new_package, user_id, authorization.OWNER)
def list_user_channels(username: str, dao: Dao, auth: authorization.Rules, skip: int, limit: int): user = dao.get_user_by_username(username) if not user or not user.profile: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"User {username} not found") auth.assert_read_user_data(user.id) channels = dao.get_user_channels_with_role(skip, limit, user.id) return channels
def delete_channel( channel: db_models.Channel = Depends(get_channel_allow_proxy), dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), ): auth.assert_delete_channel(channel) dao.delete_channel(channel.name) try: pkgstore.remove_channel(channel.name) except FileNotFoundError: logger.warning( f"trying to remove non-existent package store for channel {channel.name}" )
def post_package_member( new_member: rest_models.PostMember, package: db_models.Package = Depends(get_package_or_fail), dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules)): auth.assert_add_package_member(package.channel.name, package.name, new_member.role) channel_member = dao.get_package_member(package.channel.name, package.name, new_member.username) if channel_member: raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail=f'Member {new_member.username} in {package.channel.name}/{package.name} exists') dao.create_package_member(package.channel.name, package.name, new_member)
def _init_db(db: Session, config: Config): """Initialize the database and add users from config.""" if config.configured_section("users"): dao = Dao(db) role_map = [ (config.users_admins, "owner"), (config.users_maintainers, "maintainer"), (config.users_members, "member"), ] for users, role in role_map: for username in users: logger.info(f"create user {username} with role {role}") dao.create_user_with_role(username, role)
def get_jobs( dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), status: List[JobStatus] = Query([JobStatus.pending, JobStatus.running]), skip: int = 0, limit: int = PAGINATION_LIMIT, ): # if this is merged https://github.com/tiangolo/fastapi/issues/2077 # we will be able to use non-exploded list, i.e., ?state=running,pending user_id = auth.assert_user() if auth.is_user_elevated(user_id): return dao.get_jobs(states=status, skip=skip, limit=limit) return dao.get_jobs(states=status, skip=skip, limit=limit, owner_id=user_id)
def post_channel_mirror( request: Request, mirror: rest_models.ChannelMirrorBase, channel_name: str, channel: db_models.Channel = Depends(get_channel_or_fail), auth: authorization.Rules = Depends(get_rules), dao: Dao = Depends(get_dao), remote_session: requests.Session = Depends(get_remote_session), ): auth.assert_register_mirror(channel_name) logger.debug(f"registering mirror {mirror.url}") if not mirror.api_endpoint: mirror.api_endpoint = mirror.url.replace("get", "api/channels") if not mirror.metrics_endpoint: mirror.metrics_endpoint = mirror.url.replace("get", "metrics/channels") # check api response response = remote_session.get(mirror.api_endpoint) if response.status_code != 200: raise HTTPException( status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=f"could not connect to remote repository {mirror.url}", ) response_data = response.json() try: mirrored_server = response_data["mirror_channel_url"] except KeyError: raise HTTPException( status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail="mirror server is not quetz server", ) if not mirrored_server: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{mirror.url} is not a mirror server", ) dao.create_channel_mirror(channel_name, mirror.url, mirror.api_endpoint, mirror.metrics_endpoint) logger.info(f"successfully registered mirror {mirror.url}")
def get_packages(channel: db_models.Channel = Depends(get_channel_or_fail), dao: Dao = Depends(get_dao), q: str = None): """ Retrieve all packages in a channel, optionally matching a query `q`. """ return dao.get_packages(channel.name, 0, -1, q)
def search( query: str, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), ): user_id = auth.get_user() return dao.search_packages(query, user_id)
def set_user_role( username: str, role: rest_models.UserRole, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), ): user = dao.get_user_by_username(username) if not user: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"User {username} not found") auth.assert_assign_user_role(role.role) dao.set_user_role(username, role=role.role)
def package_version(db, user, channel_name, package_name, public_package, dao: Dao, config: Config): pkgstore = config.get_package_store() filename = Path("test-package-0.1-0.tar.bz2") with open(filename, 'rb') as fid: pkgstore.add_file(fid.read(), channel_name, 'linux-64' / filename) package_format = "tarbz2" package_info = "{}" version = dao.create_version( channel_name, package_name, package_format, "linux-64", "0.1", 0, "", str(filename), package_info, user.id, ) yield version db.delete(version) db.commit()
def test_package_version(db, dao: Dao, channel_name, package_name, package_with_versions): res = dao.get_package_versions(package_with_versions) res_versions = [(VersionOrder(x[0].version), x[0].build_number) for x in res] assert sorted(res_versions, reverse=True) == res_versions
def post_file_to_channel( background_tasks: BackgroundTasks, files: List[UploadFile] = File(...), force: Optional[bool] = Form(None), channel: db_models.Channel = Depends( ChannelChecker(allow_proxy=False, allow_mirror=False)), dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), ): handle_package_files(channel, files, dao, auth, force) dao.update_channel_size(channel.name) # Background task to update indexes background_tasks.add_task(indexing.update_indexes, dao, pkgstore, channel.name)
def channel_mirror(public_channel, dao: Dao): mirror_url = "http://mirror_server/get/my-mirror" api_endpoint = "http://mirror_server/api/my-mirror" metrics_endpoint = "http://mirror_server/metrics/channels/my-mirror" return dao.create_channel_mirror( public_channel.name, mirror_url, api_endpoint, metrics_endpoint )
def get_package_members( package: db_models.Package = Depends(get_package_or_fail), dao: Dao = Depends(get_dao), ): member_list = dao.get_package_members(package.channel.name, package.name) return member_list