def test_google_login_http_headers_are_less_than_4k_for_user_with_many_projects( app, client, monkeypatch, db_session): """ Test that when the current user has access to a large number of projects, the http headers of the response from a GET to /login/google/login are less than 4k bytes in size. """ monkeypatch.setitem(config, "MOCK_GOOGLE_AUTH", True) test_session_jwt = create_session_token( app.keypairs[0], config.get("SESSION_TIMEOUT"), context={ "redirect": "https://localhost/user/oauth2/authorize?client_id=7f7kAS4MJraUuo77d7RWHr4mZ6bvGtuzup7hw46I&response_type=id_token&redirect_uri=https://webapp.example/fence&scope=openid+user+data+google_credentials&nonce=randomvalue" }, ) client.set_cookie( "localhost", config["SESSION_COOKIE_NAME"], test_session_jwt, httponly=True, samesite="Lax", ) user_projects = { "test": { f"project{x}": { "read", "read-storage", "update", "upload", "create", "write-storage", "delete", } for x in range(20) } } user_info = { "test": { "tags": {}, } } dbGaP = os.environ.get("dbGaP") or config.get("dbGaP") syncer = UserSyncer(dbGaP, config["DB"], {}) syncer.sync_to_db_and_storage_backend(user_projects, user_info, db_session) resp = client.get("/login/google/login") assert len(str(resp.headers)) < 4096 assert resp.status_code == 302
def syncer(db_session, request, rsa_private_key, kid): if request.param == "google": backend = "google" else: backend = "cleversafe" backend_name = "test-" + backend storage_credentials = {str(backend_name): {"backend": backend}} provider = [{"name": backend_name, "backend": backend}] users = [ {"username": "******", "is_admin": True, "email": "*****@*****.**"}, {"username": "******", "is_admin": True, "email": "*****@*****.**"}, { "username": "******", "is_admin": False, "email": "*****@*****.**", }, { "username": "******", "is_admin": True, "email": "*****@*****.**", }, {"username": "******", "is_admin": True, "email": "*****@*****.**"}, { "username": "******", "is_admin": False, "email": "*****@*****.**", }, { "username": "******", "is_admin": False, "email": "*****@*****.**", }, ] projects = [ { "auth_id": "TCGA-PCAWG", "storage_accesses": [{"buckets": ["test-bucket"], "name": backend_name}], }, { "auth_id": "phs000178", "name": "TCGA", "storage_accesses": [{"buckets": ["test-bucket2"], "name": backend_name}], }, { "auth_id": "phs000179", "name": "BLAH", "storage_accesses": [{"buckets": ["test-bucket3"], "name": backend_name}], }, ] project_mapping = { "phs000178": [ {"name": "TCGA", "auth_id": "phs000178"}, {"name": "TCGA-PCAWG", "auth_id": "TCGA-PCAWG"}, ], "phs000179": [{"name": "BLAH", "auth_id": "phs000179"}], "phstest": [{"name": "Test", "auth_id": "Test"}], } dbGap = yaml_load( open( os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "test-fence-config.yaml", ) ) ).get("dbGaP") test_db = yaml_load( open( os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "test-fence-config.yaml", ) ) ).get("DB") syncer_obj = UserSyncer( dbGaP=dbGap, DB=test_db, db_session=db_session, project_mapping=project_mapping, storage_credentials=storage_credentials, is_sync_from_dbgap_server=False, sync_from_local_csv_dir=LOCAL_CSV_DIR, sync_from_local_yaml_file=LOCAL_YAML_DIR, ) syncer_obj.arborist_client = MagicMock(ArboristClient) def mocked_update(parent_path, resource, **kwargs): resource["tag"] = "123456" resource["subresources"] = [ subresource.get("name", subresource.get("path", "").lstrip("/")) for subresource in resource.get("subresources", []) if subresource.get("name", subresource.get("path", "").lstrip("/")) ] response = {"updated": resource} return response def mocked_get(path, **kwargs): return None syncer_obj.arborist_client.update_resource = MagicMock(side_effect=mocked_update) syncer_obj.arborist_client.get_resource = MagicMock(side_effect=mocked_get) syncer_obj.arborist_client.get_policy.side_effect = lambda _: None syncer_obj.arborist_client._user_url = "/user" for element in provider: udm.create_provider(db_session, element["name"], backend=element["backend"]) test_projects = [] for project in projects: p = udm.create_project_with_dict(db_session, project) test_projects.append(p) for sa in project["storage_accesses"]: for bucket in sa["buckets"]: syncer_obj.storage_manager.create_bucket( sa["name"], db_session, bucket, p ) for user in users: user = User(**user) db_session.add(user) add_visa_manually(db_session, user, rsa_private_key, kid) db_session.commit() return syncer_obj
def init_syncer( dbGaP, STORAGE_CREDENTIALS, DB, projects=None, is_sync_from_dbgap_server=False, sync_from_local_csv_dir=None, sync_from_local_yaml_file=None, arborist=None, folder=None, ): """ sync ACL files from dbGap to auth db and storage backends imports from config is done here because dbGap is an optional requirement for fence so it might not be specified in config Args: projects: path to project_mapping yaml file which contains mapping from dbgap phsids to projects in fence database Returns: None Examples: the expected yaml structure sould look like: .. code-block:: yaml phs000178: - name: TCGA auth_id: phs000178 - name: TCGA-PCAWG auth_id: TCGA-PCAWG phs000235: - name: CGCI auth_id: phs000235 """ try: cirrus_config.update(**config["CIRRUS_CFG"]) except AttributeError: # no cirrus config, continue anyway. Google APIs will probably fail. # this is okay if users don't need access to Google buckets pass if projects is not None and not os.path.exists(projects): logger.error("====={} is not found!!!=======".format(projects)) return if sync_from_local_csv_dir and not os.path.exists(sync_from_local_csv_dir): logger.error("====={} is not found!!!=======".format(sync_from_local_csv_dir)) return if sync_from_local_yaml_file and not os.path.exists(sync_from_local_yaml_file): logger.error("====={} is not found!!!=======".format(sync_from_local_yaml_file)) return project_mapping = None if projects: try: with open(projects, "r") as f: project_mapping = safe_load(f) except IOError: pass return UserSyncer( dbGaP, DB, project_mapping=project_mapping, storage_credentials=STORAGE_CREDENTIALS, is_sync_from_dbgap_server=is_sync_from_dbgap_server, sync_from_local_csv_dir=sync_from_local_csv_dir, sync_from_local_yaml_file=sync_from_local_yaml_file, arborist=arborist, folder=folder, )
def syncer(db_session, request): if request.param == "google": backend = "google" else: backend = "cleversafe" backend_name = "test-" + backend storage_credentials = {str(backend_name): {"backend": backend}} provider = [{"name": backend_name, "backend": backend}] users = [ {"username": "******", "is_admin": True, "email": "*****@*****.**"}, {"username": "******", "is_admin": True, "email": "*****@*****.**"}, { "username": "******", "is_admin": False, "email": "*****@*****.**", }, { "username": "******", "is_admin": True, "email": "*****@*****.**", }, {"username": "******", "is_admin": True, "email": "*****@*****.**"}, ] projects = [ { "auth_id": "TCGA-PCAWG", "storage_accesses": [{"buckets": ["test-bucket"], "name": backend_name}], }, { "auth_id": "phs000178", "name": "TCGA", "storage_accesses": [{"buckets": ["test-bucket2"], "name": backend_name}], }, { "auth_id": "phs000179", "name": "BLAH", "storage_accesses": [{"buckets": ["test-bucket3"], "name": backend_name}], }, ] project_mapping = { "phs000178": [ {"name": "TCGA", "auth_id": "phs000178"}, {"name": "TCGA-PCAWG", "auth_id": "TCGA-PCAWG"}, ], "phs000179": [{"name": "BLAH", "auth_id": "phs000179"}], "phstest": [{"name": "Test", "auth_id": "Test"}], } dbGap = {} test_db = yaml_load( open( os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "test-fence-config.yaml", ) ) ).get("DB") syncer_obj = UserSyncer( dbGaP=dbGap, DB=test_db, db_session=db_session, project_mapping=project_mapping, storage_credentials=storage_credentials, is_sync_from_dbgap_server=False, sync_from_local_csv_dir=LOCAL_CSV_DIR, sync_from_local_yaml_file=LOCAL_YAML_DIR, ) syncer_obj.arborist_client = MagicMock(ArboristClient) syncer_obj.arborist_client.get_policy.side_effect = lambda _: None for element in provider: udm.create_provider(db_session, element["name"], backend=element["backend"]) test_projects = [] for project in projects: p = udm.create_project_with_dict(db_session, project) test_projects.append(p) for sa in project["storage_accesses"]: for bucket in sa["buckets"]: syncer_obj.storage_manager.create_bucket( sa["name"], db_session, bucket, p ) for user in users: db_session.add(User(**user)) db_session.commit() return syncer_obj