def create_providers(data, db_session): s = db_session providers = data["providers"] for provider in providers: prov = CloudProvider() prov.name = provider["name"] prov.backend = provider["backend"] prov.service = provider["service"] s.add(prov) s.flush for name, user in list(data["users"].items()): new_user = User() new_user.username = name new_user.email = user["email"] new_user.is_admin = user["is_admin"] s.add(new_user) user["id"] = new_user.id for project in data["projects"]: new_project = Project() new_project.name = project["name"] s.add(new_project) for storage in project["storage_access"]: provider = s.query(CloudProvider).filter_by(name=storage).first() if provider: new_storage_access = StorageAccess(provider_id=provider.id, project_id=new_project.id) s.add(new_storage_access) for bucket in project["buckets"]: new_bucket = Bucket() new_bucket.name = bucket["name"] provider = s.query(CloudProvider).filter_by( name=bucket["provider"]).first() new_bucket.provider_id = provider.id s.add(new_bucket) s.flush() project_to_bucket = ProjectToBucket() project_to_bucket.bucket_id = new_bucket.id project_to_bucket.project_id = new_project.id s.add(project_to_bucket) s.flush() for user in project["users"]: access = AccessPrivilege() access.user_id = data["users"][user["name"]]["id"] access.project_id = new_project.id s.add(access)
def create_bucket(self, provider, session, bucketname, project): """ this should be exposed via admin endpoint create a bucket owned by a project and store in the database :param project: Project object :param provider: storage provider :param session: sqlalchemy session :param bucketname: name of the bucket """ provider = ( session.query(CloudProvider).filter(CloudProvider.name == provider).one() ) bucket = session.query(Bucket).filter(Bucket.name == bucketname).first() if not bucket: bucket = Bucket(name=bucketname, provider=provider) bucket = session.merge(bucket) if ( not session.query(ProjectToBucket) .filter( ProjectToBucket.bucket_id == bucket.id, ProjectToBucket.project_id == project.id, ) .first() ): project_to_bucket = ProjectToBucket(bucket=bucket, project=project) session.add(project_to_bucket) c = self.clients[provider.name] c.get_or_create_bucket(bucketname)
def test_create_projects(db_session): # setup project_1_id = "123" project_1_name = "my-project-1" project_2_id = "456" project_2_name = "my-project-2" provider_id = "789" bucket_name = "my-bucket-2" cp = CloudProvider( id=provider_id, name=provider_id, endpoint="https://test.com", backend="test_backend", description="description", service="service", ) db_session.add(cp) # only pre-create project 1 p = Project(id=project_1_id, name=project_1_name) db_session.add(p) # only pre-create a StorageAccess for project 1 sa = StorageAccess(project_id=project_1_id, provider_id=provider_id) db_session.add(sa) # only pre-create a Bucket for project 2 b = Bucket(name=bucket_name, provider_id=provider_id) db_session.add(b) # test "fence-create create" projects creation data = { "projects": [ { "id": project_1_id, "auth_id": "phs-project-1", "name": project_1_name, "storage_accesses": [{"name": provider_id, "buckets": ["my-bucket-1"]}], }, { "id": project_2_id, "auth_id": "phs-project-2", "name": project_2_name, "storage_accesses": [{"name": provider_id, "buckets": [bucket_name]}], }, ] } create_projects(db_session, data) projects_in_db = db_session.query(Project).all() assert projects_in_db, "no projects were created" assert len(projects_in_db) == len(data["projects"]) project_names = {p.name for p in projects_in_db} assert project_1_name in project_names assert project_2_name in project_names
def test_patch_service_account_valid_limit( client, app, db_session, encoded_jwt_service_accounts_access, register_user_service_account, user_can_manage_service_account_mock, valid_user_service_account_mock, revoke_user_service_account_from_google_mock, add_user_service_account_to_google_mock, ): """ Test that patching with new project_access returns 204 when SERVICE_ACCOUNT_LIMIT number of projects is registered. """ encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"] service_account = register_user_service_account["service_account"] project_access = [] n_projects = config["SERVICE_ACCOUNT_LIMIT"] for i in range(n_projects): project = Project(id=i, auth_id="auth_id_{}".format(i)) bucket = Bucket(id=i) db_session.add(project) db_session.add(bucket) db_session.commit() project_to_bucket = ProjectToBucket(project_id=i, bucket_id=i) db_session.add(project_to_bucket) db_session.commit() gbag = GoogleBucketAccessGroup(id=i, bucket_id=i, email="*****@*****.**") db_session.add(gbag) db_session.commit() project_access.append("auth_id_{}".format(i)) response = client.patch( "/google/service_accounts/{}".format(quote(service_account.email)), headers={"Authorization": "Bearer " + encoded_creds_jwt}, content_type="application/json", data=json.dumps({"project_access": project_access}), ) assert response.status_code == 204 service_account_accesses = ( db_session.query(ServiceAccountToGoogleBucketAccessGroup).filter_by( service_account_id=service_account.id)).all() assert len(service_account_accesses) == config["SERVICE_ACCOUNT_LIMIT"]
def _setup_service_account_to_google_bucket_access_group(db_session): """ Setup some testing data. """ cloud_provider = CloudProvider( name="test_provider", endpoint="https://test.com", backend="test_backend", description="description", service="service", ) db_session.add(cloud_provider) db_session.add( UserServiceAccount( google_unique_id="test_id1", email="*****@*****.**", google_project_id="efewf444", ) ) db_session.add( UserServiceAccount( google_unique_id="test_id2", email="*****@*****.**", google_project_id="edfwf444", ) ) db_session.commit() bucket1 = Bucket(name="test_bucket1", provider_id=cloud_provider.id) db_session.add(bucket1) db_session.commit() db_session.add( GoogleBucketAccessGroup( bucket_id=bucket1.id, email="*****@*****.**", privileges=["read-storage", "write-storage"], ) ) db_session.add( GoogleBucketAccessGroup( bucket_id=bucket1.id, email="*****@*****.**", privileges=["read-storage"], ) ) db_session.commit()
def create_bucket_on_project(current_session, project_name, bucket_name, provider_name): """ Create a bucket and assign it to a project """ project = ( current_session.query(Project).filter(Project.name == project_name).first() ) if not project: msg = "".join(["Project ", project_name, " not found"]) raise NotFound(msg) provider = ( current_session.query(CloudProvider) .filter(CloudProvider.name == provider_name) .first() ) if not provider: msg = "".join(["Provider ", provider_name, " not found"]) raise NotFound(msg) bucket = ( current_session.query(Bucket) .filter(Bucket.name == bucket_name, Bucket.provider_id == provider.id) .first() ) if not bucket: bucket = Bucket(name=bucket_name, provider_id=provider.id) current_session.add(bucket) current_session.flush() proj_to_bucket = ProjectToBucket( project_id=project.id, bucket_id=bucket.id, privilege=["owner"] ) current_session.add(proj_to_bucket) # Find the users that need to be updated users_in_project = current_session.query(AccessPrivilege).filter( AccessPrivilege.project_id == project.id ) users_to_update = [] for row in users_in_project: usr = current_session.query(User).filter(User.id == row.user_id).first() users_to_update.append((usr, row.privilege)) return { "result": "success", "provider": provider, "bucket": bucket, "users_to_update": users_to_update, } else: raise UserError("Error, name already in use for that storage system")
def load_google_specific_user_data(db_session, test_user_d): """Add Google-specific user data to Fence db.""" gpg = GoogleProxyGroup(id=userd_dict["gpg_id"], email=userd_dict["gpg_email"]) gsak = GoogleServiceAccountKey( id=userd_dict["gsak_id"], key_id=userd_dict["gsak_key_id"], service_account_id=userd_dict["gsa_id"], ) gsa = GoogleServiceAccount( id=userd_dict["gsa_id"], google_unique_id="d_gui", user_id=userd_dict["user_id"], google_project_id="d_gpid", email=userd_dict["gsa_email"], ) bkt = Bucket(id=userd_dict["bucket_id"]) gbag = GoogleBucketAccessGroup( id=userd_dict["gbag_id"], bucket_id=userd_dict["bucket_id"], email=userd_dict["gbag_email"], ) gpg_gbag = GoogleProxyGroupToGoogleBucketAccessGroup( id=userd_dict["gpg_to_gbag_id"], proxy_group_id=userd_dict["gpg_id"], access_group_id=userd_dict["gbag_id"], ) uga = UserGoogleAccount( id=userd_dict["uga_id"], email=userd_dict["uga_email"], user_id=userd_dict["user_id"], ) uga_pg = UserGoogleAccountToProxyGroup( user_google_account_id=userd_dict["uga_id"], proxy_group_id=userd_dict["gpg_id"]) db_session.add_all([gpg, gsak, gsa, bkt, gbag, gpg_gbag, uga, uga_pg]) user = (db_session.query(User).filter_by( username=userd_dict["user_username"]).first()) user.google_proxy_group_id = userd_dict["gpg_id"] db_session.commit()
def test_register_service_account_already_exists( app, db_session, client, encoded_jwt_service_accounts_access, cloud_manager, valid_google_project_patcher, valid_service_account_patcher, ): project = Project(id=1, auth_id="some_auth_id") bucket = Bucket(id=1) db_session.add(project) db_session.add(bucket) db_session.commit() project_to_bucket = ProjectToBucket(project_id=1, bucket_id=1) db_session.add(project_to_bucket) db_session.commit() gbag = GoogleBucketAccessGroup(id=1, bucket_id=1, email="*****@*****.**") db_session.add(gbag) db_session.commit() encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"] project_access = ["some_auth_id"] valid_service_account = { "service_account_email": "*****@*****.**", "google_project_id": "project-id", "project_access": project_access, } (cloud_manager.return_value.__enter__.return_value.get_service_account. return_value) = { "uniqueId": "sa_unique_id", "email": "*****@*****.**" } (cloud_manager.return_value.__enter__.return_value.add_member_to_group. return_value) = { "email": "*****@*****.**" } response = client.post( "/google/service_accounts", headers={"Authorization": "Bearer " + encoded_creds_jwt}, data=json.dumps(valid_service_account), content_type="application/json", ) assert response.status_code == 200 response = client.post( "/google/service_accounts", headers={"Authorization": "Bearer " + encoded_creds_jwt}, data=json.dumps(valid_service_account), content_type="application/json", ) assert response.status_code == 400 assert response.json["errors"]["service_account_email"]["status"] == 409 assert len(db_session.query(UserServiceAccount).all()) == 1 assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 1 assert len( db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 1
def test_valid_service_account_registration_multiple_service_accounts( app, db_session, client, encoded_jwt_service_accounts_access, cloud_manager, valid_google_project_patcher, valid_service_account_patcher, ): """ Test that a valid service account registration request returns 200 and succesfully creates entries in database when the Google project has both another valid service account in the project and a Google-managed system service account. """ proj_patcher = valid_google_project_patcher project = Project(id=1, auth_id="some_auth_id") bucket = Bucket(id=1) db_session.add(project) db_session.add(bucket) db_session.commit() project_to_bucket = ProjectToBucket(project_id=1, bucket_id=1) db_session.add(project_to_bucket) db_session.commit() gbag = GoogleBucketAccessGroup(id=1, bucket_id=1, email="*****@*****.**") db_session.add(gbag) db_session.commit() google_project_id = "project-id" encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"] project_access = ["some_auth_id"] proj_patcher["get_service_account_ids_from_google_members"].return_value = [ "test-{}@test.com".format(google_project_id), "{}@compute-system.iam.gserviceaccount.com".format(google_project_id), ] valid_service_account = { "service_account_email": "*****@*****.**", "google_project_id": google_project_id, "project_access": project_access, } (cloud_manager.return_value.__enter__.return_value.get_service_account. return_value) = { "uniqueId": "sa_unique_id", "email": "*****@*****.**" } (cloud_manager.return_value.__enter__.return_value.add_member_to_group. return_value) = { "email": "*****@*****.**" } assert len(db_session.query(UserServiceAccount).all()) == 0 assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 0 assert len( db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 0 response = client.post( "/google/service_accounts", headers={"Authorization": "Bearer " + encoded_creds_jwt}, data=json.dumps(valid_service_account), content_type="application/json", ) assert response.status_code == 200 assert len(db_session.query(UserServiceAccount).all()) == 1 assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 1 assert len( db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 1
def test_service_account_registration_expires_in( app, db_session, client, encoded_jwt_service_accounts_access, cloud_manager, valid_google_project_patcher, valid_service_account_patcher, ): """ Test that a service account registration with a valid expires_in is successful, and that a registration with an invalid expires_in is not. """ project = Project(id=1, auth_id="some_auth_id") bucket = Bucket(id=1) db_session.add(project) db_session.add(bucket) db_session.commit() project_to_bucket = ProjectToBucket(project_id=1, bucket_id=1) db_session.add(project_to_bucket) db_session.commit() gbag = GoogleBucketAccessGroup(id=1, bucket_id=1, email="*****@*****.**") db_session.add(gbag) db_session.commit() encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"] project_access = ["some_auth_id"] valid_service_account = { "service_account_email": "*****@*****.**", "google_project_id": "project-id", "project_access": project_access, } (cloud_manager.return_value.__enter__.return_value.get_service_account. return_value) = { "uniqueId": "sa_unique_id", "email": "*****@*****.**" } (cloud_manager.return_value.__enter__.return_value.add_member_to_group. return_value) = { "email": "*****@*****.**" } assert len(db_session.query(UserServiceAccount).all()) == 0 assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 0 assert len( db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 0 # valid expires_in: should succeed requested_exp = 60 response = client.post( "/google/service_accounts?expires_in={}".format(requested_exp), headers={"Authorization": "Bearer " + encoded_creds_jwt}, data=json.dumps(valid_service_account), content_type="application/json", ) assert response.status_code == 200 # check if success assert len(db_session.query(UserServiceAccount).all()) == 1 assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 1 sa_to_bucket_entries = db_session.query( ServiceAccountToGoogleBucketAccessGroup).all() assert len(sa_to_bucket_entries) == 1 # make sure the access was granted for the requested time # (allow up to 2 sec for runtime) diff = sa_to_bucket_entries[0].expires - int(time.time()) assert requested_exp - 2 <= diff <= requested_exp # invalid expires_in: should fail requested_exp = "abc" # expires_in must be int >0 response = client.post( "/google/service_accounts?expires_in={}".format(requested_exp), headers={"Authorization": "Bearer " + encoded_creds_jwt}, data=json.dumps(valid_service_account), content_type="application/json", ) assert response.status_code == 400 # check if failure
def test_invalid_project_limit_service_account_registration( app, db_session, client, encoded_jwt_service_accounts_access, cloud_manager, valid_google_project_patcher, ): """ Test that we get a 400 when there are SERVICE_ACCOUNT_LIMIT + 1 number of projects and the databse isn't updated. """ proj_patcher = valid_google_project_patcher project_access = [] n_projects = config["SERVICE_ACCOUNT_LIMIT"] + 1 for i in range(n_projects): project = Project(id=i, auth_id="auth_id_{}".format(i)) bucket = Bucket(id=i) db_session.add(project) db_session.add(bucket) db_session.commit() project_to_bucket = ProjectToBucket(project_id=i, bucket_id=i) db_session.add(project_to_bucket) db_session.commit() gbag = GoogleBucketAccessGroup(id=i, bucket_id=i, email="*****@*****.**") db_session.add(gbag) db_session.commit() project_access.append("auth_id_{}".format(i)) google_project_id = "project-id" encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"] proj_patcher["get_service_account_ids_from_google_members"].return_value = [ "test-{}@test.com".format(google_project_id), "{}@compute-system.iam.gserviceaccount.com".format(google_project_id), ] valid_service_account = { "service_account_email": "*****@*****.**", "google_project_id": google_project_id, "project_access": project_access, } (cloud_manager.return_value.__enter__.return_value.get_service_account. return_value) = { "uniqueId": "sa_unique_id", "email": "*****@*****.**" } (cloud_manager.return_value.__enter__.return_value.add_member_to_group. return_value) = { "email": "*****@*****.**" } assert len(db_session.query(UserServiceAccount).all()) == 0 assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 0 assert len( db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 0 response = client.post( "/google/service_accounts", headers={"Authorization": "Bearer " + encoded_creds_jwt}, data=json.dumps(valid_service_account), content_type="application/json", ) assert response.status_code == 400 assert len(db_session.query(UserServiceAccount).all()) == 0 assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 0 assert len( db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 0
def setup_test_data(db_session): cp = CloudProvider(name="test", endpoint="http://test.endpt") proxy_group_list = [ { "id": "group1", "email": "*****@*****.**" }, { "id": "group2", "email": "*****@*****.**" }, ] user_account_list = [ { "google_unique_id": "test_id1", "email": "*****@*****.**", "google_project_id": "test", }, { "google_unique_id": "test_id2", "email": "*****@*****.**", "google_project_id": "test", }, ] proxy_groups = [] for group in proxy_group_list: proxy_groups.append(GoogleProxyGroup(**group)) db_session.add(proxy_groups[-1]) user_service_accounts = [] for user in user_account_list: user_service_accounts.append(UserServiceAccount(**user)) db_session.add(user_service_accounts[-1]) db_session.commit() bucket1 = Bucket(name="bucket1", provider_id=cp.id) bucket2 = Bucket(name="bucket2", provider_id=cp.id) bucket3 = Bucket(name="bucket3", provider_id=cp.id) db_session.add(bucket1) db_session.add(bucket2) db_session.add(bucket3) db_session.commit() access_grp1 = GoogleBucketAccessGroup(bucket_id=bucket1.id, email="*****@*****.**") db_session.add(access_grp1) db_session.commit() db_session.add( GoogleProxyGroupToGoogleBucketAccessGroup( proxy_group_id=proxy_groups[0].id, access_group_id=access_grp1.id)) db_session.add( ServiceAccountToGoogleBucketAccessGroup( service_account_id=user_service_accounts[0].id, access_group_id=access_grp1.id, )) db_session.commit()
def invalid_service_account_not_exist(db_session): invalid_service_account = "*****@*****.**" user = UserServiceAccount( google_unique_id="invalid_test_id", email=invalid_service_account, google_project_id="test", ) db_session.add(user) db_session.commit() cp = db_session.query(CloudProvider).filter_by(name="test").first() if not cp: cp = CloudProvider(name="test", endpoint="http://test.endpt") db_session.add(cp) db_session.commit() bucket1 = db_session.query(Bucket).filter_by(name="bucket1").first() if not bucket1: bucket1 = Bucket(name="bucket1", provider_id=cp.id) db_session.add(bucket1) db_session.commit() project1 = db_session.query(Project).filter_by(name="test_1").first() if not project1: project1 = Project(name="test_1", auth_id="test_auth_1") db_session.add(project1) db_session.commit() access_grp1 = ( db_session.query(GoogleBucketAccessGroup) .filter_by(email="*****@*****.**") .first() ) if not access_grp1: access_grp1 = GoogleBucketAccessGroup( bucket_id=bucket1.id, email="*****@*****.**" ) db_session.add(access_grp1) db_session.commit() db_session.add( ServiceAccountAccessPrivilege( project_id=project1.id, service_account_id=user.id ) ) db_session.commit() # expiration set to 0 for testing that it gets set current_time = 0 service_account_grp1 = ServiceAccountToGoogleBucketAccessGroup( service_account_id=user.id, access_group_id=access_grp1.id, expires=current_time ) db_session.add(service_account_grp1) db_session.commit() def mock_is_valid(sa_email, *args, **kwargs): if sa_email == invalid_service_account: validity = GoogleServiceAccountValidity("account_id", "project_id") # set overall validity to False # set policy_accessible to False so the SA is removed from the DB validity["policy_accessible"] = False validity._valid = False return validity return True patcher = patch( "fence.scripting.google_monitor._is_valid_service_account", mock_is_valid ) patcher.start() yield { "service_account": user, "projects": [project1], "bucket_access_groups": [access_grp1], } patcher.stop()
def register_user_service_account(db_session): cp = db_session.query(CloudProvider).filter_by(name="test").first() if not cp: cp = CloudProvider(name="test", endpoint="http://test.endpt") db_session.add(cp) db_session.commit() bucket1 = db_session.query(Bucket).filter_by(name="bucket1").first() if not bucket1: bucket1 = Bucket(name="bucket1", provider_id=cp.id) db_session.add(bucket1) db_session.commit() bucket2 = db_session.query(Bucket).filter_by(name="bucket2").first() if not bucket2: bucket2 = Bucket(name="bucket2", provider_id=cp.id) db_session.add(bucket2) db_session.commit() project1 = db_session.query(Project).filter_by(name="test_1").first() if not project1: project1 = Project(name="test_1", auth_id="test_auth_1") db_session.add(project1) db_session.commit() project2 = db_session.query(Project).filter_by(name="test_2").first() if not project2: project2 = Project(name="test_2", auth_id="test_auth_2") db_session.add(project2) db_session.commit() access_grp1 = ( db_session.query(GoogleBucketAccessGroup) .filter_by(email="*****@*****.**") .first() ) if not access_grp1: access_grp1 = GoogleBucketAccessGroup( bucket_id=bucket1.id, email="*****@*****.**" ) db_session.add(access_grp1) db_session.commit() access_grp2 = ( db_session.query(GoogleBucketAccessGroup) .filter_by(email="*****@*****.**") .first() ) if not access_grp2: access_grp2 = GoogleBucketAccessGroup( bucket_id=bucket2.id, email="*****@*****.**" ) db_session.add(access_grp2) db_session.commit() project_to_bucket1 = ( db_session.query(ProjectToBucket).filter_by(project_id=project1.id).first() ) if not project_to_bucket1: project_to_bucket1 = ProjectToBucket( project_id=project1.id, bucket_id=bucket1.id ) db_session.add(project_to_bucket1) db_session.commit() project_to_bucket2 = ( db_session.query(ProjectToBucket).filter_by(project_id=project2.id).first() ) if not project_to_bucket2: project_to_bucket2 = ProjectToBucket( project_id=project2.id, bucket_id=bucket2.id ) db_session.add(project_to_bucket2) db_session.commit() # new service account each time this is called random_string = "".join( random.choice(string.ascii_uppercase + string.digits) for _ in range(6) ) user = UserServiceAccount( google_unique_id="{}".format(random_string), email="{}@test.iam.gserviceaccount.com".format(random_string), google_project_id="test", ) db_session.add(user) db_session.commit() db_session.add( ServiceAccountAccessPrivilege( project_id=project1.id, service_account_id=user.id ) ) db_session.add( ServiceAccountAccessPrivilege( project_id=project2.id, service_account_id=user.id ) ) # expiration set to 0 for testing that it gets set current_time = 0 service_account_grp1 = ServiceAccountToGoogleBucketAccessGroup( service_account_id=user.id, access_group_id=access_grp1.id, expires=current_time ) service_account_grp2 = ServiceAccountToGoogleBucketAccessGroup( service_account_id=user.id, access_group_id=access_grp2.id, expires=current_time ) db_session.add(service_account_grp1) db_session.add(service_account_grp2) db_session.commit() return { "service_account": user, "projects": [project1, project2], "buckets": [bucket1, bucket2], "bucket_access_groups": [access_grp1, access_grp2], }
def setup_data(db_session): cp = CloudProvider(name="test", endpoint="http://test.endpt") user = UserServiceAccount( google_unique_id="test_id", email="*****@*****.**", google_project_id="test" ) user_1 = UserServiceAccount( google_unique_id="test_id", email="*****@*****.**", google_project_id="test" ) user_2 = UserServiceAccount( google_unique_id="test_id", email="*****@*****.**", google_project_id="test" ) user_3 = UserServiceAccount( google_unique_id="test_id", email="*****@*****.**", google_project_id="test" ) db_session.add(user) db_session.add(user_1) db_session.add(user_2) db_session.add(user_3) db_session.add(cp) db_session.commit() bucket = Bucket(name="bucket1", provider_id=cp.id) bucket2 = Bucket(name="bucket2", provider_id=cp.id) bucket3 = Bucket(name="bucket3", provider_id=cp.id) db_session.add(bucket) db_session.add(bucket2) db_session.add(bucket3) db_session.commit() project1 = Project(name="test_1", auth_id="test_auth_1") project2 = Project(name="test_2", auth_id="test_auth_2") project3 = Project(name="test_3", auth_id="test_auth_3") db_session.add(project1) db_session.add(project2) db_session.add(project3) db_session.commit() db_session.add(ProjectToBucket(project_id=project1.id, bucket_id=bucket.id)) db_session.add(ProjectToBucket(project_id=project2.id, bucket_id=bucket2.id)) db_session.add(ProjectToBucket(project_id=project3.id, bucket_id=bucket3.id)) db_session.add( ServiceAccountAccessPrivilege( project_id=project1.id, service_account_id=user.id ) ) db_session.add( ServiceAccountAccessPrivilege( project_id=project2.id, service_account_id=user.id ) ) db_session.add( ServiceAccountAccessPrivilege( project_id=project1.id, service_account_id=user_1.id ) ) db_session.add( ServiceAccountAccessPrivilege( project_id=project1.id, service_account_id=user_2.id ) ) db_session.add( ServiceAccountAccessPrivilege( project_id=project1.id, service_account_id=user_3.id ) ) access_grp = GoogleBucketAccessGroup( bucket_id=bucket.id, email="*****@*****.**" ) access_grp2 = GoogleBucketAccessGroup( bucket_id=bucket2.id, email="*****@*****.**" ) access_grp3 = GoogleBucketAccessGroup( bucket_id=bucket3.id, email="*****@*****.**" ) db_session.add(access_grp) db_session.add(access_grp2) db_session.add(access_grp3) db_session.commit() service_account_grp1 = ServiceAccountToGoogleBucketAccessGroup( service_account_id=user.id, access_group_id=access_grp.id ) service_account_grp2 = ServiceAccountToGoogleBucketAccessGroup( service_account_id=user.id, access_group_id=access_grp2.id ) db_session.add(service_account_grp1) db_session.add(service_account_grp2) db_session.commit()
def _setup_google_access(db_session, access_1_expires=None, access_2_expires=None): """ Setup some testing data. Args: access_1_expires (str, optional): expiration for the Proxy Group -> Google Bucket Access Group for user 1, defaults to None access_2_expires (str, optional): expiration for the Proxy Group -> Google Bucket Access Group for user 2, defaults to None """ cloud_provider = CloudProvider( name="test_provider", endpoint="https://test.com", backend="test_backend", description="description", service="service", ) db_session.add(cloud_provider) db_session.add( UserServiceAccount( google_unique_id="test_id1", email="*****@*****.**", google_project_id="efewf444", )) db_session.add( UserServiceAccount( google_unique_id="test_id2", email="*****@*****.**", google_project_id="edfwf444", )) db_session.commit() bucket1 = Bucket(name="test_bucket1", provider_id=cloud_provider.id) db_session.add(bucket1) db_session.commit() gpg1 = GoogleProxyGroup(id=1, email="*****@*****.**") gpg2 = GoogleProxyGroup(id=2, email="*****@*****.**") db_session.add(gpg1) db_session.add(gpg2) db_session.commit() gbag1 = GoogleBucketAccessGroup( bucket_id=bucket1.id, email="*****@*****.**", privileges=["read-storage", "write-storage"], ) gbag2 = GoogleBucketAccessGroup( bucket_id=bucket1.id, email="*****@*****.**", privileges=["read-storage"], ) db_session.add(gbag1) db_session.add(gbag2) db_session.commit() db_session.add( GoogleProxyGroupToGoogleBucketAccessGroup(proxy_group_id=gpg1.id, access_group_id=gbag1.id, expires=access_1_expires)) db_session.add( GoogleProxyGroupToGoogleBucketAccessGroup(proxy_group_id=gpg2.id, access_group_id=gbag2.id, expires=access_2_expires)) db_session.commit() return {"google_proxy_group_ids": {"1": gpg1.id, "2": gpg2.id}}