def test_bootstrap_ip_whitelist_assignment_new(self): self.mock_now(datetime.datetime(2014, 01, 01)) ret = model.bootstrap_ip_whitelist_assignment( model.Identity(model.IDENTITY_USER, '*****@*****.**'), 'some ip whitelist', 'some comment') self.assertTrue(ret) self.assertEqual( { 'assignments': [ { 'comment': 'some comment', 'created_by': model.get_service_self_identity(), 'created_ts': datetime.datetime(2014, 1, 1), 'identity': model.Identity(model.IDENTITY_USER, '*****@*****.**'), 'ip_whitelist': 'some ip whitelist', }, ], 'auth_db_rev': 1, 'auth_db_prev_rev': None, 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2014, 1, 1), }, model.ip_whitelist_assignments_key().get().to_dict())
def test_group_bootstrap_non_empty(self): ident1 = model.Identity(model.IDENTITY_USER, '*****@*****.**') ident2 = model.Identity(model.IDENTITY_USER, '*****@*****.**') mocked_now = datetime.datetime(2014, 01, 01) self.mock_now(mocked_now) added = model.bootstrap_group('some-group', [ident1, ident2], 'Blah description') self.assertTrue(added) ent = model.group_key('some-group').get() self.assertEqual( { 'auth_db_rev': 1, 'auth_db_prev_rev': None, 'created_by': model.get_service_self_identity(), 'created_ts': mocked_now, 'description': 'Blah description', 'globs': [], 'members': [ident1, ident2], 'modified_by': model.get_service_self_identity(), 'modified_ts': mocked_now, 'nested': [], 'owners': u'administrators', }, ent.to_dict())
def test_group_bootstrap_non_empty(self): ident1 = model.Identity(model.IDENTITY_USER, '*****@*****.**') ident2 = model.Identity(model.IDENTITY_USER, '*****@*****.**') mocked_now = datetime.datetime(2014, 01, 01) self.mock_now(mocked_now) added = model.bootstrap_group( 'some-group', [ident1, ident2], 'Blah description') self.assertTrue(added) ent = model.group_key('some-group').get() self.assertEqual( { 'auth_db_rev': 1, 'auth_db_prev_rev': None, 'created_by': model.get_service_self_identity(), 'created_ts': mocked_now, 'description': 'Blah description', 'globs': [], 'members': [ident1, ident2], 'modified_by': model.get_service_self_identity(), 'modified_ts': mocked_now, 'nested': [], 'owners': u'administrators', }, ent.to_dict())
def test_bootstrap_ip_whitelist_assignment_modify(self): self.mock_now(datetime.datetime(2014, 01, 01)) ret = model.bootstrap_ip_whitelist_assignment( model.Identity(model.IDENTITY_USER, '*****@*****.**'), 'some ip whitelist', 'some comment') self.assertTrue(ret) ret = model.bootstrap_ip_whitelist_assignment( model.Identity(model.IDENTITY_USER, '*****@*****.**'), 'another ip whitelist', 'another comment') self.assertTrue(ret) self.assertEqual( { 'assignments': [ { 'comment': 'another comment', 'created_by': model.get_service_self_identity(), 'created_ts': datetime.datetime(2014, 1, 1), 'identity': model.Identity(model.IDENTITY_USER, '*****@*****.**'), 'ip_whitelist': 'another ip whitelist', }, ], 'auth_db_rev': 2, 'auth_db_prev_rev': 1, 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2014, 1, 1), }, model.ip_whitelist_assignments_key().get().to_dict())
def test_update_oauth_config(self): self.mock_now(datetime.datetime(2014, 1, 2, 3, 4, 5)) @ndb.transactional def run(conf): return config._update_oauth_config( config.Revision('oauth_cfg_rev', 'http://url'), conf) model.AuthGlobalConfig(key=model.root_key()).put() # Pushing empty config to empty state -> no changes. self.assertFalse(run(config_pb2.OAuthConfig())) # Updating config. self.assertTrue(run(config_pb2.OAuthConfig( primary_client_id='a', primary_client_secret='b', client_ids=['c', 'd']))) self.assertEqual({ 'auth_db_rev': 1, 'auth_db_prev_rev': None, 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), 'oauth_additional_client_ids': ['c', 'd'], 'oauth_client_id': 'a', 'oauth_client_secret': 'b', }, model.root_key().get().to_dict()) # Same config again -> no changes. self.assertFalse(run(config_pb2.OAuthConfig( primary_client_id='a', primary_client_secret='b', client_ids=['c', 'd'])))
def test_update_oauth_config(self): self.mock_now(datetime.datetime(2014, 1, 2, 3, 4, 5)) @ndb.transactional def run(conf): return config._update_oauth_config( config.Revision('oauth_cfg_rev', 'http://url'), conf) model.AuthGlobalConfig(key=model.root_key()).put() # Pushing empty config to empty state -> no changes. self.assertFalse(run(config_pb2.OAuthConfig())) # Updating config. self.assertTrue( run( config_pb2.OAuthConfig(primary_client_id='a', primary_client_secret='b', client_ids=['c', 'd']))) self.assertEqual( { 'auth_db_rev': 1, 'auth_db_prev_rev': None, 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), 'oauth_additional_client_ids': ['c', 'd'], 'oauth_client_id': 'a', 'oauth_client_secret': 'b', }, model.root_key().get().to_dict()) # Same config again -> no changes. self.assertFalse( run( config_pb2.OAuthConfig(primary_client_id='a', primary_client_secret='b', client_ids=['c', 'd'])))
def _update_oauth_config(rev, conf): assert ndb.in_transaction(), 'Must be called in AuthDB transaction' existing = model.root_key().get() existing_as_dict = { 'oauth_client_id': existing.oauth_client_id, 'oauth_client_secret': existing.oauth_client_secret, 'oauth_additional_client_ids': list(existing.oauth_additional_client_ids), 'token_server_url': existing.token_server_url, } new_as_dict = { 'oauth_client_id': conf.primary_client_id, 'oauth_client_secret': conf.primary_client_secret, 'oauth_additional_client_ids': list(conf.client_ids), 'token_server_url': conf.token_server_url, } if new_as_dict == existing_as_dict: return False existing.populate(**new_as_dict) existing.record_revision(modified_by=model.get_service_self_identity(), modified_ts=utils.utcnow(), comment='Importing oauth.cfg at rev %s' % rev.revision) existing.put() return True
def update(): existing = ndb.get_multi( model.project_realms_key(rev.project_id) for rev, _ in expanded ) updated = [] metas = [] for (rev, realms), ent in zip(expanded, existing): logging.info('Visiting project "%s"...', rev.project_id) if not ent: logging.info('New realms config in project "%s"', rev.project_id) ent = model.AuthProjectRealms( key=model.project_realms_key(rev.project_id), realms=realms, config_rev=rev.config_rev, perms_rev=db.revision) ent.record_revision( modified_by=model.get_service_self_identity(), comment='New realms config') updated.append(ent) elif ent.realms != realms: logging.info('Updated realms config in project "%s"', rev.project_id) ent.realms = realms ent.config_rev = rev.config_rev ent.perms_rev = db.revision ent.record_revision( modified_by=model.get_service_self_identity(), comment=comment) updated.append(ent) else: logging.info('Realms config in project "%s" are fresh', rev.project_id) # Always update AuthProjectRealmsMeta to match the state we just checked. metas.append(AuthProjectRealmsMeta( key=project_realms_meta_key(rev.project_id), config_rev=rev.config_rev, perms_rev=db.revision, config_digest=rev.config_digest, modified_ts=utils.utcnow(), )) logging.info('Persisting changes...') ndb.put_multi(updated + metas) if updated: model.replicate_auth_db()
def seal_token(subtoken): serialized = subtoken.SerializeToString() signing_key_id, pkcs1_sha256_sig = signature.sign_blob(serialized, 0.5) return delegation_pb2.DelegationToken( serialized_subtoken=serialized, signer_id=model.get_service_self_identity().to_bytes(), signing_key_id=signing_key_id, pkcs1_sha256_sig=pkcs1_sha256_sig)
def test_group_bootstrap_empty(self): mocked_now = datetime.datetime(2014, 01, 01) self.mock_now(mocked_now) added = model.bootstrap_group('some-group', [], 'Blah description') self.assertTrue(added) ent = model.group_key('some-group').get() self.assertEqual( { 'created_by': model.get_service_self_identity(), 'created_ts': mocked_now, 'description': 'Blah description', 'globs': [], 'members': [], 'modified_by': model.get_service_self_identity(), 'modified_ts': mocked_now, 'nested': [] }, ent.to_dict())
def test_bootstrap_ip_whitelist_empty(self): self.assertIsNone(model.ip_whitelist_key('list').get()) mocked_now = datetime.datetime(2014, 01, 01) self.mock_now(mocked_now) ret = model.bootstrap_ip_whitelist('list', [], 'comment') self.assertTrue(ret) ent = model.ip_whitelist_key('list').get() self.assertTrue(ent) self.assertEqual({ 'created_by': model.get_service_self_identity(), 'created_ts': mocked_now, 'description': u'comment', 'modified_by': model.get_service_self_identity(), 'modified_ts': mocked_now, 'subnets': [], }, ent.to_dict())
def test_bootstrap_ip_whitelist_empty(self): self.assertIsNone(model.ip_whitelist_key('list').get()) mocked_now = datetime.datetime(2014, 01, 01) self.mock_now(mocked_now) ret = model.bootstrap_ip_whitelist('list', [], 'comment') self.assertTrue(ret) ent = model.ip_whitelist_key('list').get() self.assertTrue(ent) self.assertEqual( { 'created_by': model.get_service_self_identity(), 'created_ts': mocked_now, 'description': u'comment', 'modified_by': model.get_service_self_identity(), 'modified_ts': mocked_now, 'subnets': [], }, ent.to_dict())
def update_stored(): stored = model.realms_globals_key().get() if not stored: stored = model.AuthRealmsGlobals(key=model.realms_globals_key()) if perms_to_map(stored.permissions) == db.permissions: logging.info('Skipping, already up-to-date') return stored.permissions = sorted(db.permissions.values(), key=lambda p: p.name) stored.record_revision( modified_by=model.get_service_self_identity(), comment='Updating permissions to rev "%s"' % db.revision) stored.put() model.replicate_auth_db()
def test_bootstrap_ip_whitelist(self): self.assertIsNone(model.ip_whitelist_key('list').get()) mocked_now = datetime.datetime(2014, 01, 01) self.mock_now(mocked_now) ret = model.bootstrap_ip_whitelist( 'list', ['192.168.0.0/24', '127.0.0.1/32'], 'comment') self.assertTrue(ret) ent = model.ip_whitelist_key('list').get() self.assertTrue(ent) self.assertEqual({ 'auth_db_rev': 1, 'auth_db_prev_rev': None, 'created_by': model.get_service_self_identity(), 'created_ts': mocked_now, 'description': u'comment', 'modified_by': model.get_service_self_identity(), 'modified_ts': mocked_now, 'subnets': [u'192.168.0.0/24', u'127.0.0.1/32'], }, ent.to_dict())
def apply_import(revision, entities_to_put, entities_to_delete, ts): """Transactionally puts and deletes a bunch of entities.""" # DB changed between transactions, retry. if auth.get_auth_db_revision() != revision: return False # Apply mutations, bump revision number. for e in entities_to_put: e.record_revision(modified_by=model.get_service_self_identity(), modified_ts=ts, comment='External group import') for e in entities_to_delete: e.record_deletion(modified_by=model.get_service_self_identity(), modified_ts=ts, comment='External group import') futures = [] futures.extend(ndb.put_multi_async(entities_to_put)) futures.extend( ndb.delete_multi_async(e.key for e in entities_to_delete)) for f in futures: f.check_success() auth.replicate_auth_db() return True
def apply_import(revision, entities_to_put, entities_to_delete, ts): """Transactionally puts and deletes a bunch of entities.""" # DB changed between transactions, retry. if auth.get_auth_db_revision() != revision: return False # Apply mutations, bump revision number. for e in entities_to_put: e.record_revision( modified_by=model.get_service_self_identity(), modified_ts=ts, comment='External group import') for e in entities_to_delete: e.record_deletion( modified_by=model.get_service_self_identity(), modified_ts=ts, comment='External group import') futures = [] futures.extend(ndb.put_multi_async(entities_to_put)) futures.extend(ndb.delete_multi_async(e.key for e in entities_to_delete)) for f in futures: f.check_success() auth.replicate_auth_db() return True
def delete_realms(project_id): """Performs an AuthDB transaction that deletes all realms of some project. Args: project_id: ID of the project being deleted. """ realms = model.project_realms_key(project_id).get() if not realms: return # already gone realms.record_deletion( modified_by=model.get_service_self_identity(), comment='No longer in the configs') realms.key.delete() project_realms_meta_key(project_id).delete() model.replicate_auth_db()
def _update_authdb_configs(configs): """Pushes new configs to AuthDB entity group. Args: configs: dict {config path -> (Revision tuple, <config>)}. Returns: True if anything has changed since last import. """ # Get model.AuthGlobalConfig entity, to potentially update it. root = model.root_key().get() orig = root.to_dict() revs = _imported_config_revisions_key().get() if not revs: revs = _ImportedConfigRevisions(key=_imported_config_revisions_key(), revisions={}) ingested_revs = {} # path -> Revision for path, (rev, conf) in sorted(configs.items()): dirty = _CONFIG_SCHEMAS[path]['updater'](root, rev, conf) revs.revisions[path] = {'rev': rev.revision, 'url': rev.url} logging.info('Processed %s at rev %s: %s', path, rev.revision, 'updated' if dirty else 'up-to-date') if dirty: ingested_revs[path] = rev if root.to_dict() != orig: assert ingested_revs report = ', '.join('%s@%s' % (p, rev.revision) for p, rev in sorted(ingested_revs.items())) logging.info('Global config has been updated: %s', report) root.record_revision(modified_by=model.get_service_self_identity(), modified_ts=utils.utcnow(), comment='Importing configs: %s' % report) root.put() revs.put() if ingested_revs: model.replicate_auth_db() return bool(ingested_revs)
def import_external_groups(): """Refetches external groups specified via 'tarball' or 'plainlist' entries. Runs as a cron task. Raises BundleImportError in case of import errors. """ config = load_config() if not config: logging.info('Not configured') return # Fetch files specified in the config in parallel. entries = list(config.tarball) + list(config.plainlist) futures = [fetch_file_async(e.url, e.oauth_scopes) for e in entries] # {system name -> group name -> list of identities} bundles = {} for e, future in zip(entries, futures): # Unpack tarball into {system name -> group name -> list of identities}. if isinstance(e, config_pb2.GroupImporterConfig.TarballEntry): fetched = load_tarball(future.get_result(), e.systems, e.groups, e.domain) assert not (set(fetched) & set(bundles)), (fetched.keys(), bundles.keys()) bundles.update(fetched) continue # Add plainlist group to 'external/*' bundle. if isinstance(e, config_pb2.GroupImporterConfig.PlainlistEntry): group = load_group_file(future.get_result(), e.domain) name = 'external/%s' % e.group if 'external' not in bundles: bundles['external'] = {} assert name not in bundles['external'], name bundles['external'][name] = group continue assert False, 'Unreachable' import_bundles(bundles, model.get_service_self_identity(), 'External group import')
def _update_oauth_config(rev, conf): assert ndb.in_transaction(), 'Must be called in AuthDB transaction' existing = model.root_key().get() existing_as_dict = { 'oauth_client_id': existing.oauth_client_id, 'oauth_client_secret': existing.oauth_client_secret, 'oauth_additional_client_ids': list(existing.oauth_additional_client_ids), } new_as_dict = { 'oauth_client_id': conf.primary_client_id, 'oauth_client_secret': conf.primary_client_secret, 'oauth_additional_client_ids': list(conf.client_ids), } if new_as_dict == existing_as_dict: return False existing.populate(**new_as_dict) existing.record_revision( modified_by=model.get_service_self_identity(), modified_ts=utils.utcnow(), comment='Importing oauth.cfg at rev %s' % rev.revision) existing.put() return True
def test_update_oauth_config(self): def run(conf): return config._update_authdb_configs({ 'oauth.cfg': (config.Revision('oauth_cfg_rev', 'http://url'), conf), }) # Pushing empty config to empty state -> no changes. self.assertFalse(run(config_pb2.OAuthConfig())) # Updating config. self.assertTrue( run( config_pb2.OAuthConfig( primary_client_id='a', primary_client_secret='b', client_ids=['c', 'd'], token_server_url='https://token-server'))) self.assertEqual( { 'auth_db_rev': 1, 'auth_db_prev_rev': 0, 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), 'oauth_additional_client_ids': [u'c', u'd'], 'oauth_client_id': u'a', 'oauth_client_secret': u'b', 'security_config': None, 'token_server_url': u'https://token-server', }, model.root_key().get().to_dict()) # Same config again -> no changes. self.assertFalse( run( config_pb2.OAuthConfig( primary_client_id='a', primary_client_secret='b', client_ids=['c', 'd'], token_server_url='https://token-server')))
def _update_ip_whitelist_config(rev, conf): assert ndb.in_transaction(), 'Must be called in AuthDB transaction' now = utils.utcnow() # Existing whitelist entities. existing_ip_whitelists = { e.key.id(): e for e in model.AuthIPWhitelist.query(ancestor=model.root_key()) } # Whitelists being imported (name => IPWhitelist proto msg). imported_ip_whitelists = {msg.name: msg for msg in conf.ip_whitelists} to_put = [] to_delete = [] # New or modified IP whitelists. for wl_proto in imported_ip_whitelists.itervalues(): # Convert proto magic list to a regular list. subnets = list(wl_proto.subnets) # Existing whitelist and it hasn't changed? wl = existing_ip_whitelists.get(wl_proto.name) if wl and wl.subnets == subnets: continue # Update existing (to preserve auth_db_prev_rev) or create a new one. if not wl: wl = model.AuthIPWhitelist( key=model.ip_whitelist_key(wl_proto.name), created_ts=now, created_by=model.get_service_self_identity()) wl.subnets = subnets wl.description = 'Imported from ip_whitelist.cfg at rev %s' % rev.revision to_put.append(wl) # Removed IP whitelists. for wl in existing_ip_whitelists.itervalues(): if wl.key.id() not in imported_ip_whitelists: to_delete.append(wl) # Update assignments. Don't touch created_ts and created_by for existing ones. ip_whitelist_assignments = ( model.ip_whitelist_assignments_key().get() or model.AuthIPWhitelistAssignments( key=model.ip_whitelist_assignments_key())) existing = { (a.identity.to_bytes(), a.ip_whitelist): a for a in ip_whitelist_assignments.assignments } updated = [] for a in conf.assignments: key = (a.identity, a.ip_whitelist_name) if key in existing: updated.append(existing[key]) else: new_one = model.AuthIPWhitelistAssignments.Assignment( identity=model.Identity.from_bytes(a.identity), ip_whitelist=a.ip_whitelist_name, comment='Imported from ip_whitelist.cfg at rev %s' % rev.revision, created_ts=now, created_by=model.get_service_self_identity()) updated.append(new_one) # Something has changed? updated_keys = [ (a.identity.to_bytes(), a.ip_whitelist) for a in updated ] if set(updated_keys) != set(existing): ip_whitelist_assignments.assignments = updated to_put.append(ip_whitelist_assignments) if not to_put and not to_delete: return False comment = 'Importing ip_whitelist.cfg at rev %s' % rev.revision for e in to_put: e.record_revision( modified_by=model.get_service_self_identity(), modified_ts=now, comment=comment) for e in to_delete: e.record_deletion( modified_by=model.get_service_self_identity(), modified_ts=now, comment=comment) futures = [] futures.extend(ndb.put_multi_async(to_put)) futures.extend(ndb.delete_multi_async(e.key for e in to_delete)) for f in futures: f.check_success() return True
def test_prepare_import(self): existing_groups = [ group('normal-group', [], ['ldap/cleared']), group('not-ldap/some', []), group('ldap/updated', ['a']), group('ldap/unchanged', ['a']), group('ldap/deleted', ['a']), group('ldap/cleared', ['a']), ] imported_groups = { 'ldap/new': [ident('a')], 'ldap/updated': [ident('a'), ident('b')], 'ldap/unchanged': [ident('a')], } to_put, to_delete = importer.prepare_import( 'ldap', existing_groups, imported_groups, datetime.datetime(2010, 1, 2, 3, 4, 5, 6)) expected_to_put = { 'ldap/cleared': { 'auth_db_rev': None, 'auth_db_prev_rev': None, 'created_by': ident('admin'), 'created_ts': datetime.datetime(1999, 1, 2, 3, 4, 5, 6), 'description': '', 'globs': [], 'members': [], 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'nested': [], 'owners': u'administrators', }, 'ldap/new': { 'auth_db_rev': None, 'auth_db_prev_rev': None, 'created_by': model.get_service_self_identity(), 'created_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'description': '', 'globs': [], 'members': [ident('a')], 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'nested': [], 'owners': u'administrators', }, 'ldap/updated': { 'auth_db_rev': None, 'auth_db_prev_rev': None, 'created_by': ident('admin'), 'created_ts': datetime.datetime(1999, 1, 2, 3, 4, 5, 6), 'description': '', 'globs': [], 'members': [ident('a'), ident('b')], 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'nested': [], 'owners': u'administrators', }, } self.assertEqual(expected_to_put, {x.key.id(): x.to_dict() for x in to_put}) self.assertEqual( [model.group_key('ldap/deleted')], [x.key for x in to_delete])
def test_import_external_groups(self): self.mock_now(datetime.datetime(2010, 1, 2, 3, 4, 5, 6)) importer.write_config(""" tarball { domain: "example.com" groups: "ldap/new" oauth_scopes: "scope" systems: "ldap" url: "https://fake_tarball" } plainlist { group: "external_1" oauth_scopes: "scope" url: "https://fake_external_1" } plainlist { domain: "example.com" group: "external_2" oauth_scopes: "scope" url: "https://fake_external_2" } """) self.mock_urlfetch({ 'https://fake_tarball': build_tar_gz({ 'ldap/new': 'a\nb', }), 'https://fake_external_1': '[email protected]\[email protected]\n', 'https://fake_external_2': '123\n456', }) # Should be deleted during import, since not in a imported bundle. group('ldap/deleted', []).put() # Should be updated. group('external/external_1', ['x', 'y']).put() # Should be removed, since not in list of external groups. group('external/deleted', []).put() # Run the import. initial_auth_db_rev = model.get_auth_db_revision() importer.import_external_groups() self.assertEqual(initial_auth_db_rev + 1, model.get_auth_db_revision()) # Verify final state. expected_groups = { 'ldap/new': { 'auth_db_rev': 1, 'auth_db_prev_rev': None, 'created_by': model.get_service_self_identity(), 'created_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'description': u'', 'globs': [], 'members': [ident('a'), ident('b')], 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'nested': [], 'owners': u'administrators', }, 'external/external_1': { 'auth_db_rev': 1, 'auth_db_prev_rev': None, 'created_by': ident('admin'), 'created_ts': datetime.datetime(1999, 1, 2, 3, 4, 5, 6), 'description': u'', 'globs': [], 'members': [ident('*****@*****.**'), ident('*****@*****.**')], 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'nested': [], 'owners': u'administrators', }, 'external/external_2': { 'auth_db_rev': 1, 'auth_db_prev_rev': None, 'created_by': model.get_service_self_identity(), 'created_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'description': u'', 'globs': [], 'members': [ident('123'), ident('456')], 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'nested': [], 'owners': u'administrators', }, } self.assertEqual(expected_groups, fetch_groups())
def test_prepare_import(self): existing_groups = [ group('normal-group', [], ['ldap/cleared']), group('not-ldap/some', []), group('ldap/updated', ['a']), group('ldap/unchanged', ['a']), group('ldap/deleted', ['a']), group('ldap/cleared', ['a']), ] imported_groups = { 'ldap/new': [ident('a')], 'ldap/updated': [ident('a'), ident('b')], 'ldap/unchanged': [ident('a')], } to_put, to_delete = importer.prepare_import( 'ldap', existing_groups, imported_groups, datetime.datetime(2010, 1, 2, 3, 4, 5, 6)) expected_to_put = { 'ldap/cleared': { 'auth_db_rev': None, 'auth_db_prev_rev': None, 'created_by': ident('admin'), 'created_ts': datetime.datetime(1999, 1, 2, 3, 4, 5, 6), 'description': '', 'globs': [], 'members': [], 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'nested': [], 'owners': u'administrators', }, 'ldap/new': { 'auth_db_rev': None, 'auth_db_prev_rev': None, 'created_by': model.get_service_self_identity(), 'created_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'description': '', 'globs': [], 'members': [ident('a')], 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'nested': [], 'owners': u'administrators', }, 'ldap/updated': { 'auth_db_rev': None, 'auth_db_prev_rev': None, 'created_by': ident('admin'), 'created_ts': datetime.datetime(1999, 1, 2, 3, 4, 5, 6), 'description': '', 'globs': [], 'members': [ident('a'), ident('b')], 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), 'nested': [], 'owners': u'administrators', }, } self.assertEqual(expected_to_put, {x.key.id(): x.to_dict() for x in to_put}) self.assertEqual([model.group_key('ldap/deleted')], [x.key for x in to_delete])
def mock_get_trusted_signers(self): # We use testbed own identity in tests, see 'seal_token'. own_app_id = model.get_service_self_identity().to_bytes() return {own_app_id: signature.get_own_public_certificates()}
def test_default_works(self): checker = delegation.get_signature_checker() self_id = model.get_service_self_identity().to_bytes() self.assertTrue(checker.is_trusted_signer(self_id)) # 'key' is name of fake key in the testbed. self.assertTrue(checker.get_x509_certificate_pem(self_id, 'key'))
def test_update_ip_whitelist_config(self): @ndb.transactional def run(conf): return config._update_ip_whitelist_config( config.Revision('ip_whitelist_cfg_rev', 'http://url'), conf) # Pushing empty config to empty DB -> no changes. self.assertFalse(run(config_pb2.IPWhitelistConfig())) # Added a bunch of IP whitelists and assignments. conf = config_pb2.IPWhitelistConfig( ip_whitelists=[ config_pb2.IPWhitelistConfig.IPWhitelist( name='abc', subnets=['0.0.0.1/32']), config_pb2.IPWhitelistConfig.IPWhitelist( name='bots', subnets=['0.0.0.2/32']), config_pb2.IPWhitelistConfig.IPWhitelist(name='empty'), ], assignments=[ config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='abc'), config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='bots'), config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='bots'), ]) self.mock_now(datetime.datetime(2014, 1, 2, 3, 4, 5)) self.assertTrue(run(conf)) # Verify everything is there. self.assertEqual( { 'assignments': [ { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'abc', }, { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'bots', }, { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'bots', }, ], 'auth_db_rev': 1, 'auth_db_prev_rev': None, 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), }, model.ip_whitelist_assignments_key().get().to_dict()) self.assertEqual( { 'abc': { 'created_by': 'service:sample-app', 'created_ts': 1388631845000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1388631845000000, 'subnets': [u'0.0.0.1/32'], }, 'bots': { 'created_by': 'service:sample-app', 'created_ts': 1388631845000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1388631845000000, 'subnets': [u'0.0.0.2/32'], }, 'empty': { 'created_by': 'service:sample-app', 'created_ts': 1388631845000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1388631845000000, 'subnets': [], }, }, { x.key.id(): x.to_serializable_dict() for x in model.AuthIPWhitelist.query(ancestor=model.root_key()) }) # Exact same config a bit later -> no changes applied. self.mock_now(datetime.datetime(2014, 2, 2, 3, 4, 5)) self.assertFalse(run(conf)) # Modify whitelist, add new one, remove some. Same for assignments. conf = config_pb2.IPWhitelistConfig( ip_whitelists=[ config_pb2.IPWhitelistConfig.IPWhitelist( name='abc', subnets=['0.0.0.3/32']), config_pb2.IPWhitelistConfig.IPWhitelist( name='bots', subnets=['0.0.0.2/32']), config_pb2.IPWhitelistConfig.IPWhitelist(name='another'), ], assignments=[ config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='abc'), config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='another'), config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='bots'), ]) self.mock_now(datetime.datetime(2014, 3, 2, 3, 4, 5)) self.assertTrue(run(conf)) # Verify everything is there. self.assertEqual( { 'assignments': [ { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'abc', }, { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 3, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'another', }, { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 3, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'bots', }, ], 'auth_db_rev': 1, 'auth_db_prev_rev': 1, # replicate_auth_db is mocked, so no version bump 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2014, 3, 2, 3, 4, 5), }, model.ip_whitelist_assignments_key().get().to_dict()) self.assertEqual( { 'abc': { 'created_by': 'service:sample-app', 'created_ts': 1388631845000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1393729445000000, 'subnets': [u'0.0.0.3/32'], }, 'bots': { 'created_by': 'service:sample-app', 'created_ts': 1388631845000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1388631845000000, 'subnets': [u'0.0.0.2/32'], }, 'another': { 'created_by': 'service:sample-app', 'created_ts': 1393729445000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1393729445000000, 'subnets': [], }, }, { x.key.id(): x.to_serializable_dict() for x in model.AuthIPWhitelist.query(ancestor=model.root_key()) })
def test_bad_key_id(self): checker = delegation.get_signature_checker() self_id = model.get_service_self_identity().to_bytes() with self.assertRaises(signature.CertificateError): checker.get_x509_certificate_pem(self_id, 'bad key id')
def test_update_ip_whitelist_config(self): @ndb.transactional def run(conf): return config._update_ip_whitelist_config( config.Revision('ip_whitelist_cfg_rev', 'http://url'), conf) # Pushing empty config to empty DB -> no changes. self.assertFalse(run(config_pb2.IPWhitelistConfig())) # Added a bunch of IP whitelists and assignments. conf = config_pb2.IPWhitelistConfig( ip_whitelists=[ config_pb2.IPWhitelistConfig.IPWhitelist( name='abc', subnets=['0.0.0.1/32']), config_pb2.IPWhitelistConfig.IPWhitelist( name='bots', subnets=['0.0.0.2/32']), config_pb2.IPWhitelistConfig.IPWhitelist(name='empty'), ], assignments=[ config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='abc'), config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='bots'), config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='bots'), ]) self.mock_now(datetime.datetime(2014, 1, 2, 3, 4, 5)) self.assertTrue(run(conf)) # Verify everything is there. self.assertEqual({ 'assignments': [ { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'abc', }, { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'bots', }, { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'bots', }, ], 'auth_db_rev': 1, 'auth_db_prev_rev': None, 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), }, model.ip_whitelist_assignments_key().get().to_dict()) self.assertEqual( { 'abc': { 'created_by': 'service:sample-app', 'created_ts': 1388631845000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1388631845000000, 'subnets': [u'0.0.0.1/32'], }, 'bots': { 'created_by': 'service:sample-app', 'created_ts': 1388631845000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1388631845000000, 'subnets': [u'0.0.0.2/32'], }, 'empty': { 'created_by': 'service:sample-app', 'created_ts': 1388631845000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1388631845000000, 'subnets': [], }, }, { x.key.id(): x.to_serializable_dict() for x in model.AuthIPWhitelist.query(ancestor=model.root_key()) }) # Exact same config a bit later -> no changes applied. self.mock_now(datetime.datetime(2014, 2, 2, 3, 4, 5)) self.assertFalse(run(conf)) # Modify whitelist, add new one, remove some. Same for assignments. conf = config_pb2.IPWhitelistConfig( ip_whitelists=[ config_pb2.IPWhitelistConfig.IPWhitelist( name='abc', subnets=['0.0.0.3/32']), config_pb2.IPWhitelistConfig.IPWhitelist( name='bots', subnets=['0.0.0.2/32']), config_pb2.IPWhitelistConfig.IPWhitelist(name='another'), ], assignments=[ config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='abc'), config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='another'), config_pb2.IPWhitelistConfig.Assignment( identity='user:[email protected]', ip_whitelist_name='bots'), ]) self.mock_now(datetime.datetime(2014, 3, 2, 3, 4, 5)) self.assertTrue(run(conf)) # Verify everything is there. self.assertEqual({ 'assignments': [ { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 1, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'abc', }, { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 3, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'another', }, { 'comment': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'created_by': model.Identity(kind='service', name='sample-app'), 'created_ts': datetime.datetime(2014, 3, 2, 3, 4, 5), 'identity': model.Identity(kind='user', name='*****@*****.**'), 'ip_whitelist': u'bots', }, ], 'auth_db_rev': 1, 'auth_db_prev_rev': 1, # replicate_auth_db is mocked, so no version bump 'modified_by': model.get_service_self_identity(), 'modified_ts': datetime.datetime(2014, 3, 2, 3, 4, 5), }, model.ip_whitelist_assignments_key().get().to_dict()) self.assertEqual( { 'abc': { 'created_by': 'service:sample-app', 'created_ts': 1388631845000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1393729445000000, 'subnets': [u'0.0.0.3/32'], }, 'bots': { 'created_by': 'service:sample-app', 'created_ts': 1388631845000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1388631845000000, 'subnets': [u'0.0.0.2/32'], }, 'another': { 'created_by': 'service:sample-app', 'created_ts': 1393729445000000, 'description': u'Imported from ip_whitelist.cfg at rev ip_whitelist_cfg_rev', 'modified_by': 'service:sample-app', 'modified_ts': 1393729445000000, 'subnets': [], }, }, { x.key.id(): x.to_serializable_dict() for x in model.AuthIPWhitelist.query(ancestor=model.root_key()) })
def _update_ip_whitelist_config(rev, conf): assert ndb.in_transaction(), 'Must be called in AuthDB transaction' now = utils.utcnow() # Existing whitelist entities. existing_ip_whitelists = { e.key.id(): e for e in model.AuthIPWhitelist.query(ancestor=model.root_key()) } # Whitelists being imported (name => IPWhitelist proto msg). imported_ip_whitelists = {msg.name: msg for msg in conf.ip_whitelists} to_put = [] to_delete = [] # New or modified IP whitelists. for wl_proto in imported_ip_whitelists.itervalues(): # Convert proto magic list to a regular list. subnets = list(wl_proto.subnets) # Existing whitelist and it hasn't changed? wl = existing_ip_whitelists.get(wl_proto.name) if wl and wl.subnets == subnets: continue # Update existing (to preserve auth_db_prev_rev) or create a new one. if not wl: wl = model.AuthIPWhitelist( key=model.ip_whitelist_key(wl_proto.name), created_ts=now, created_by=model.get_service_self_identity()) wl.subnets = subnets wl.description = 'Imported from ip_whitelist.cfg at rev %s' % rev.revision to_put.append(wl) # Removed IP whitelists. for wl in existing_ip_whitelists.itervalues(): if wl.key.id() not in imported_ip_whitelists: to_delete.append(wl) # Update assignments. Don't touch created_ts and created_by for existing ones. ip_whitelist_assignments = (model.ip_whitelist_assignments_key().get() or model.AuthIPWhitelistAssignments( key=model.ip_whitelist_assignments_key())) existing = {(a.identity.to_bytes(), a.ip_whitelist): a for a in ip_whitelist_assignments.assignments} updated = [] for a in conf.assignments: key = (a.identity, a.ip_whitelist_name) if key in existing: updated.append(existing[key]) else: new_one = model.AuthIPWhitelistAssignments.Assignment( identity=model.Identity.from_bytes(a.identity), ip_whitelist=a.ip_whitelist_name, comment='Imported from ip_whitelist.cfg at rev %s' % rev.revision, created_ts=now, created_by=model.get_service_self_identity()) updated.append(new_one) # Something has changed? updated_keys = [(a.identity.to_bytes(), a.ip_whitelist) for a in updated] if set(updated_keys) != set(existing): ip_whitelist_assignments.assignments = updated to_put.append(ip_whitelist_assignments) if not to_put and not to_delete: return False comment = 'Importing ip_whitelist.cfg at rev %s' % rev.revision for e in to_put: e.record_revision(modified_by=model.get_service_self_identity(), modified_ts=now, comment=comment) for e in to_delete: e.record_deletion(modified_by=model.get_service_self_identity(), modified_ts=now, comment=comment) futures = [] futures.extend(ndb.put_multi_async(to_put)) futures.extend(ndb.delete_multi_async(e.key for e in to_delete)) for f in futures: f.check_success() return True