def configure(self, config: Config): if config.configured_section("google"): self.client_id = config.google_client_id self.client_secret = config.google_client_secret self.is_enabled = True if config.configured_section("users"): self.collect_emails = config.users_collect_emails else: self.is_enabled = False # call the configure of base class to set default_channel and default role super().configure(config)
def test_config_is_singleton(config): c = Config() assert c is config Config._instances = {} c_new = Config() assert c_new is not config c_file = Config("config.toml") assert c_file is c_new
def package_version(db, user, channel_name, package_name, public_package, dao: Dao, config: Config): pkgstore = config.get_package_store() filename = Path("test-package-0.1-0.tar.bz2") with open(filename, 'rb') as fid: pkgstore.add_file(fid.read(), channel_name, 'linux-64' / filename) package_format = "tarbz2" package_info = "{}" version = dao.create_version( channel_name, package_name, package_format, "linux-64", "0.1", 0, "", str(filename), package_info, user.id, ) yield version db.delete(version) db.commit()
def test_validate_package_names_files_endpoint(auth_client, public_channel, mocker, package_name, msg, config: Config): pkgstore = config.get_package_store() package_filename = "test-package-0.1-0.tar.bz2" with open(package_filename, "rb") as fid: condainfo = CondaInfo(fid, package_filename) condainfo._parse_conda() # patch conda info condainfo.info['name'] = package_name condainfo.channeldata['packagename'] = package_name mocked_cls = mocker.patch("quetz.main.CondaInfo") mocked_cls.return_value = condainfo with open(package_filename, "rb") as fid: files = {"files": (f"{package_name}-0.1-0.tar.bz2", fid)} response = auth_client.post( f"/api/channels/{public_channel.name}/files/", files=files) if msg: assert response.status_code == 422 assert msg in response.json()["detail"] with pytest.raises(FileNotFoundError): pkgstore.serve_path(public_channel.name, f'linux-64/{package_name}-0.1-0.tar.bz2') else: assert response.status_code == 201 assert pkgstore.serve_path(public_channel.name, f'linux-64/{package_name}-0.1-0.tar.bz2')
def package_version( db, user, public_channel, channel_name, package_name, public_package, dao: Dao, config: Config, ): pkgstore = config.get_package_store() filename = "test-package-0.1-0.tar.bz2" version = add_package_version( filename, "0.1", channel_name, user, dao, package_name ) path = Path(filename) with open(path, "rb") as fid: pkgstore.add_file(fid.read(), channel_name, "linux-64" / path) dao.update_channel_size(channel_name) db.refresh(public_channel) yield version db.delete(version) db.commit()
def reindex_packages_from_store( dao: Dao, config: Config, channel_name: str, user_id: bytes, ): """Reindex packages from files in the package store""" db = dao.db pkgstore = config.get_package_store() all_files = pkgstore.list_files(channel_name) pkg_files = [f for f in all_files if f.endswith(".tar.bz2")] channel = dao.get_channel(channel_name) if channel: for package in channel.packages: db.delete(package) db.commit() else: data = rest_models.Channel(name=channel_name, description="re-indexed from files", private=True) channel = dao.create_channel(data, user_id, authorization.OWNER) for fname in pkg_files: fid = pkgstore.serve_path(channel_name, fname) handle_file(channel_name, fname, fid, dao, user_id) update_indexes(dao, pkgstore, channel_name)
def get_tasks_worker( background_tasks: BackgroundTasks, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), session: requests.Session = Depends(get_remote_session), config: Config = Depends(get_config), ) -> Task: if config.configured_section("worker"): worker = config.worker_type else: worker = "thread" if worker == "thread": worker = ThreadingWorker(background_tasks, dao, auth, session, config) elif worker == "subprocess": worker = SubprocessWorker(auth.API_key, auth.session, config) elif worker == "redis": if rq_available: worker = RQManager( config.worker_redis_ip, config.worker_redis_port, config.worker_redis_db, auth.API_key, auth.session, config, ) else: raise ValueError("redis and rq not installed on machine") else: raise ValueError("wrong configuration in worker.type") return Task(auth, worker)
def test_delete_channel_with_packages( db, auth_client, private_channel, private_package_version, config: Config ): pkg_store = config.get_package_store() pkg_store.add_file("test-file", private_channel.name, "test_file.txt") pkg_store.add_file("second", private_channel.name, "subdir/second_file.txt") response = auth_client.delete(f"/api/channels/{private_channel.name}") channel = ( db.query(db_models.Channel) .filter(db_models.Channel.name == private_channel.name) .one_or_none() ) version = ( db.query(db_models.PackageVersion) .filter_by(package_name=private_package_version.package_name) .one_or_none() ) package = ( db.query(db_models.Package) .filter_by(name=private_package_version.package_name) .one_or_none() ) files = pkg_store.list_files(private_channel.name) assert response.status_code == 200 assert channel is None assert version is None assert package is None assert not files
def _is_deployment(base_dir: Path): config_file = base_dir.joinpath("config.toml") if (base_dir.exists() and config_file.exists() and base_dir.joinpath("channels").exists()): config = Config(str(config_file.resolve())) with working_directory(base_dir): return database_exists(config.sqlalchemy_database_url) return False
def test_config_without_file_path_set(config_str): # the env variable should not be defined for this test to work assert not os.environ.get("QUETZ_CONFIG_FILE") # we need to check whether Config was not initialised before assert not Config._instances with pytest.raises(ValueError, match="Environment"): Config() # check if it works with path even if QUETZ_CONFIG_FILE is # not defined with tempfile.NamedTemporaryFile("w", delete=False) as fid: fid.write(config_str) fid.flush() config = Config(fid.name) assert config.configured_section("users")
def get_db_manager(): config = Config() db = get_session(config.sqlalchemy_database_url) try: yield db finally: db.close()
def get_db_manager(): config = Config() database_url = config.sqlalchemy_database_url db = get_session(database_url, echo=config.sqlalchemy_echo_sql) try: yield db finally: db.close()
def configure(self, config: Config): config_options = self._make_config() config.register(config_options) if config.configured_section("pamauthenticator"): self.provider = config.pamauthenticator_provider self.service = config.pamauthenticator_service self.encoding = config.pamauthenticator_encoding self.check_account = config.pamauthenticator_check_account self.admin_groups = config.pamauthenticator_admin_groups self.maintainer_groups = config.pamauthenticator_maintainer_groups self.member_groups = config.pamauthenticator_member_groups self.is_enabled = True else: self.is_enabled = False super().configure(config)
def make_package_version( db, user, public_channel, channel_name, package_name, public_package, dao: Dao, config: Config, ): pkgstore = config.get_package_store() versions = [] def _make_package_version(filename, version_number, platform="linux-64"): filename = Path(filename) with open(filename, "rb") as fid: pkgstore.add_file(fid.read(), channel_name, platform / filename) package_format = "tarbz2" package_info = "{}" version = dao.create_version( channel_name, package_name, package_format, platform, version_number, 0, "", str(filename), package_info, user.id, size=11, ) dao.update_package_channeldata( channel_name, package_name, { 'name': package_name, 'subdirs': [platform] }, ) dao.update_channel_size(channel_name) versions.append(version) return version yield _make_package_version for version in versions: db.delete(version) db.commit()
def _get_config(path: Union[Path, str]) -> Config: """get config path""" config_file = Path(path) / 'config.toml' if not config_file.exists(): typer.echo(f'Could not find config at {config_file}') raise typer.Abort() config = Config(str(config_file.resolve())) if not os.environ.get(_env_prefix + _env_config_file): os.environ[_env_prefix + _env_config_file] = str(config_file.resolve()) return config
def get_url(): db_path = config.get_main_option("sqlalchemy.url") if not db_path: config_path = context.get_x_argument(as_dictionary=True).get('quetzConfig') deployment_path = os.path.split(config_path)[0] quetz_config = Config(config_path) db_path = quetz_config.sqlalchemy_database_url abs_path = os.path.abspath(deployment_path) db_path = db_path.replace("sqlite:///.", f"sqlite:///{abs_path}") return db_path
def configure(self, config: Config): config.register([ ConfigSection( "dictauthenticator", [ ConfigEntry("users", list, default=list), ], ) ]) if config.configured_section("dictauthenticator"): self.passwords = dict( user_pass.split(":") for user_pass in config.dictauthenticator_users) self.is_enabled = True else: self.passwords = {} # call the config of base class to configure default roles and # channels super().configure(config)
def init_db( path: str = typer.Argument(None, help="The path of the deployment"), ): """init database and fill users from config file [users] sections""" logger.info("Initializing database") config_file = _get_config(path) config = Config(config_file) os.chdir(path) db = get_session(config.sqlalchemy_database_url) _init_db(db, config)
def _init_db(db: Session, config: Config): """Initialize the database and add users from config.""" if config.configured_section("users"): dao = Dao(db) role_map = [ (config.users_admins, "owner"), (config.users_maintainers, "maintainer"), (config.users_members, "member"), ] for users, role in role_map: for username in users: logger.info(f"create user {username} with role {role}") dao.create_user_with_role(username, role)
def make_migrations( path: str = typer.Argument(None, help="The path of the deployment"), message: str = typer.Option(None, help="revision message"), plugin: str = typer.Option("quetz", help="head or heads or plugin name"), initialize: bool = typer.Option(False, help="initialize migrations"), ): """make database migrations for quetz or a plugin""" logger.info("Initializing database") config_file = _get_config(path) config = Config(config_file) os.chdir(path) _make_migrations(config.sqlalchemy_database_url, message, plugin, initialize)
def post_role_file(file: Union[str, bytes], channel_name: str, builder: Callable): if type(file.file) is SpooledTemporaryFile and not hasattr( file, "seekable"): file.file.seekable = file.file._file.seekable file.file.seek(0, os.SEEK_END) file.file.seek(0) role = None with file.file as f: role = assert_role(json.load(f), builder) file.file.seek(0) Config().get_package_store().add_file(f.read(), channel_name, file.filename) return role
def get_tasks_worker( background_tasks: BackgroundTasks, dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), session: requests.Session = Depends(get_remote_session), config: Config = Depends(get_config), ) -> Task: if config.configured_section("worker"): worker = config.worker_type else: worker = "thread" if worker == "thread": worker = ThreadingWorker(background_tasks, dao, auth, session, config) else: raise ValueError("wrong configuration in worker.type") return Task(auth, worker)
def config(config_str, config_dir): config_path = os.path.join(config_dir, "config.toml") with open(config_path, "w") as fid: fid.write(config_str) old_dir = os.path.abspath(os.curdir) os.chdir(config_dir) os.environ["QUETZ_CONFIG_FILE"] = config_path data_dir = os.path.join(os.path.dirname(quetz.__file__), "tests", "data") for filename in os.listdir(data_dir): full_path = os.path.join(data_dir, filename) dest = os.path.join(config_dir, filename) if os.path.isfile(full_path): shutil.copy(full_path, dest) Config._instances = {} config = Config() yield config os.chdir(old_dir)
def package_version( db, user, public_channel, channel_name, package_name, public_package, dao: Dao, config: Config, ): pkgstore = config.get_package_store() filename = Path( __file__).parent / "data" / "xtensor-io-0.10.3-hb585cf6_0.tar.bz2" with open(filename, "rb") as fid: pkgstore.add_file(fid.read(), channel_name, "linux-64" / filename) package_format = "tarbz2" package_info = "{}" version = dao.create_version( channel_name, package_name, package_format, "linux-64", "0.1", 0, "", str(filename), package_info, user.id, size=11, ) dao.update_channel_size(channel_name) db.refresh(public_channel) yield version try: db.delete(version) db.commit() except Exception: pass
def test_update_indexes(config: Config, local_channel, dao): pkgstore = config.get_package_store() update_indexes(dao, pkgstore, local_channel.name) files = pkgstore.list_files(local_channel.name) base_files = [ 'channeldata.json', 'index.html', 'noarch/index.html', 'noarch/repodata.json', ] expected_files = base_files.copy() for suffix in ['.bz2', '.gz']: expected_files.extend(s + suffix for s in base_files) assert sorted(files) == sorted(expected_files)
def get_test_config(): c = { 'github': { 'client_id': '', 'client_secret': '' }, 'sqlalchemy': { 'database_url': '' }, 'session': { 'secret': 'abcdefg', 'https_only': False } } global _db if _db is None: _db = get_session('sqlite:///:memory:') conf = Config(config=c, db_session=_db) return conf
def make_package_version( db, user, public_channel, channel_name, package_name, public_package, dao: Dao, config: Config, ): pkgstore = config.get_package_store() def _make_package_version(filename, version_number, platform="linux-64"): filename = Path(filename) with open(filename, "rb") as fid: pkgstore.add_file(fid.read(), channel_name, platform / filename) package_format = "tarbz2" package_info = "{}" version = dao.create_version( channel_name, package_name, package_format, platform, version_number, 0, "", str(filename), package_info, user.id, size=11, ) dao.update_channel_size(channel_name) return version yield _make_package_version
def _set_user_roles(db: Session, config: Config): """Initialize the database and add users from config.""" if config.configured_section("users"): role_map = [ (config.users_admins, "owner"), (config.users_maintainers, "maintainer"), (config.users_members, "member"), ] default_role = config.users_default_role for users, role in role_map: for username in users: try: provider, username = username.split(":") except ValueError: # use github as default provider raise ValueError( "could not parse the users setting, please provide users in" "the format 'PROVIDER:USERNAME' where PROVIDER is one of" "'google', 'github', 'dummy', etc.") logger.info(f"create user {username} with role {role}") user = (db.query(User).join(Identity).filter( Identity.provider == provider).filter( User.username == username).one_or_none()) if not user: logger.warning(f"could not find user '{username}' " f"with identity from provider '{provider}'") elif user.role is not None and user.role != default_role: logger.warning( f"user has already role {user.role} not assigning a new role" ) else: user.role = role db.commit()
def test_config_with_path(config_dir, config_base): one_path = os.path.join(config_dir, "one_config.toml") other_path = os.path.join(config_dir, "other_config.toml") with open(one_path, 'w') as fid: fid.write("\n".join([config_base, "[users]\nadmins=['one']"])) with open(other_path, 'w') as fid: fid.write("\n".join([config_base, "[users]\nadmins=['other']"])) Config._instances = {} c_one = Config(one_path) assert c_one.configured_section("users") assert c_one.users_admins == ["one"] c_other = Config(other_path) assert c_other.configured_section("users") assert c_other.users_admins == ["other"] c_new = Config(one_path) assert c_new is c_one
def get_config(): config = Config() return config