def test_validate_acl_cfg(self): cfg = """ invalid_field: "admins" """ result = validation.validate_config(config.self_config_set(), "acl.cfg", cfg) self.assertEqual(len(result.messages), 1) self.assertEqual(result.messages[0].severity, logging.ERROR) self.assertTrue(result.messages[0].text.startswith("Could not parse config")) cfg = """ project_access_group: "admins" """ result = validation.validate_config(config.self_config_set(), "acl.cfg", cfg) self.assertEqual(len(result.messages), 0)
def test_validate_acl_cfg(self): cfg = ''' invalid_field: "admins" ''' result = validation.validate_config( config.self_config_set(), 'acl.cfg', cfg) self.assertEqual(len(result.messages), 1) self.assertEqual(result.messages[0].severity, logging.ERROR) self.assertTrue('no field named "invalid_field"' in result.messages[0].text) cfg = ''' project_access_group: "admins" ''' result = validation.validate_config( config.self_config_set(), 'acl.cfg', cfg) self.assertEqual(len(result.messages), 0)
def test_validate_services_registry(self): cfg = ''' services { id: "a" access: "*****@*****.**" access: "user:[email protected]" access: "group:abc" } services { owners: "not an email" metadata_url: "not an url" access: "**&" access: "group:**&" access: "a:b" } services { id: "b" } services { id: "a-unsorted" } ''' result = validation.validate_config(config.self_config_set(), 'services.cfg', cfg) self.assertEqual([m.text for m in result.messages], [ 'Service #2: id is not specified', 'Service #2: invalid email: "not an email"', 'Service #2: metadata_url: hostname not specified', 'Service #2: metadata_url: scheme must be "https"', 'Service #2: access #1: invalid email: "**&"', 'Service #2: access #2: invalid group: **&', 'Service #2: access #3: Identity has invalid format: b', 'Services are not sorted by id. First offending id: a-unsorted', ])
def test_validate_refs(self): cfg = ''' refs { name: "refs/heads/master" } # Invalid configs refs { } refs { name: "refs/heads/master" config_path: "non_default" } refs { name: "does_not_start_with_ref" config_path: "../bad/path" } ''' result = validation.validate_config('projects/x', 'refs.cfg', cfg) self.assertEqual( [m.text for m in result.messages], [ 'Ref #2: name is not specified', 'Ref #3: duplicate ref: refs/heads/master', 'Ref #4: name does not start with "refs/": does_not_start_with_ref', 'Ref #4: must not contain ".." or "." components: ../bad/path' ], )
def _read_and_validate_archive(config_set, rev_key, archive, location): """Reads an archive, validates all files, imports blobs and returns files. If all files are valid, saves contents to Blob entities and returns files with their hashes. Return: (files, validation_result) tuple. """ logging.info('%s archive size: %d bytes' % (config_set, len(archive))) stream = StringIO.StringIO(archive) blob_futures = [] with tarfile.open(mode='r|gz', fileobj=stream) as tar: files = {} ctx = config.validation.Context() for item in tar: if not item.isreg(): # pragma: no cover continue logging.info('Found file "%s"', item.name) with contextlib.closing(tar.extractfile(item)) as extracted: content = extracted.read() files[item.name] = content with ctx.prefix(item.name + ': '): validation.validate_config(config_set, item.name, content, ctx=ctx) if ctx.result().has_errors: return [], ctx.result() entities = [] for name, content in files.iteritems(): content_hash = storage.compute_hash(content) blob_futures.append( storage.import_blob_async(content=content, content_hash=content_hash)) entities.append( storage.File(id=name, parent=rev_key, content_hash=content_hash, url=str(location.join(name)))) # Wait for Blobs to be imported before proceeding. ndb.Future.wait_all(blob_futures) return entities, ctx.result()
def test_validate_acl_cfg(self): cfg = ''' invalid_field: "admins" ''' result = validation.validate_config(config.self_config_set(), 'acl.cfg', cfg) self.assertEqual(len(result.messages), 1) self.assertEqual(result.messages[0].severity, logging.ERROR) self.assertTrue( result.messages[0].text.startswith('Could not parse config')) cfg = ''' project_access_group: "admins" ''' result = validation.validate_config(config.self_config_set(), 'acl.cfg', cfg) self.assertEqual(len(result.messages), 0)
def test_validate_project_metadata(self): cfg = """ name: "Chromium" access: "group:all" access: "*****@*****.**" """ result = validation.validate_config("projects/x", "project.cfg", cfg) self.assertEqual(len(result.messages), 0)
def test_validate_project_metadata(self): cfg = ''' name: "Chromium" access: "group:all" access: "*****@*****.**" ''' result = validation.validate_config('projects/x', 'project.cfg', cfg) self.assertEqual(len(result.messages), 0)
def test_validate_refs(self): cfg = ''' refs { name: "refs/heads/master" } ''' result = validation.validate_config('projects/x', 'refs.cfg', cfg) self.assertEqual( [m.text for m in result.messages], ['refs.cfg is not used since 2019 and must be deleted'])
def _read_and_validate_archive(config_set, rev_key, archive): """Reads an archive, validates all files, imports blobs and returns files. If all files are valid, saves contents to Blob entities and returns files with their hashes. Return: (files, validation_result) tuple. """ logging.info('%s archive size: %d bytes' % (config_set, len(archive))) stream = StringIO.StringIO(archive) blob_futures = [] with tarfile.open(mode='r|gz', fileobj=stream) as tar: files = {} ctx = config.validation.Context() for item in tar: if not item.isreg(): # pragma: no cover continue with contextlib.closing(tar.extractfile(item)) as extracted: content = extracted.read() files[item.name] = content validation.validate_config(config_set, item.name, content, ctx=ctx) if ctx.result().has_errors: return [], ctx.result() entities = [] for name, content in files.iteritems(): content_hash = storage.compute_hash(content) blob_futures.append(storage.import_blob_async( content=content, content_hash=content_hash)) entities.append( storage.File( id=name, parent=rev_key, content_hash=content_hash) ) # Wait for Blobs to be imported before proceeding. ndb.Future.wait_all(blob_futures) return entities, ctx.result()
def test_validate_schemas(self): cfg = ''' schemas { name: "services/config:foo" url: "https://foo" } schemas { name: "projects:foo" url: "https://foo" } schemas { name: "projects/refs:foo" url: "https://foo" } # Invalid schemas. schemas { } schemas { name: "services/config:foo" url: "https://foo" } schemas { name: "no_colon" url: "http://foo" } schemas { name: "bad_prefix:foo" url: "https://foo" } schemas { name: "projects:foo/../a.cfg" url: "https://foo" } ''' result = validation.validate_config( config.self_config_set(), 'schemas.cfg', cfg) self.assertEqual( [m.text for m in result.messages], [ 'Schema #4: name is not specified', 'Schema #4: url: not specified', 'Schema services/config:foo: duplicate schema name', 'Schema no_colon: name must contain ":"', 'Schema no_colon: url: scheme must be "https"', ( 'Schema bad_prefix:foo: left side of ":" must be a service config ' 'set, "projects" or "projects/refs"'), ( 'Schema projects:foo/../a.cfg: ' 'must not contain ".." or "." components: foo/../a.cfg'), ] )
def test_validate_project_registry(self): cfg = ''' projects { id: "a" gitiles_location { repo: "https://a.googlesource.com/ok" ref: "refs/heads/main" path: "infra/config/generated" } } projects { id: "b" } projects { id: "a" gitiles_location { repo: "https://a.googlesource.com/project/" ref: "refs/heads/infra/config" path: "/generated" } } projects { gitiles_location { repo: "https://a.googlesource.com/project.git" ref: "branch" } } projects { id: "c" gitiles_location { repo: "https://a.googlesource.com/missed/ref" } } ''' result = validation.validate_config(config.self_config_set(), 'projects.cfg', cfg) self.assertEqual( [m.text for m in result.messages], [ 'Project b: gitiles_location: repo: not specified', 'Project b: gitiles_location: ref is not set', 'Project a: id is not unique', 'Project a: gitiles_location: repo: must not end with "/"', 'Project a: gitiles_location: path must not start with "/"', 'Project #4: id is not specified', 'Project #4: gitiles_location: repo: must not end with ".git"', 'Project #4: gitiles_location: ref must start with "refs/"', 'Project c: gitiles_location: ref is not set', 'Projects are not sorted by id. First offending id: a', ], )
def test_validate_services_registry(self): cfg = ''' services { id: "a" access: "*****@*****.**" access: "user:[email protected]" access: "group:abc" } services { owners: "not an email" config_location { storage_type: GITILES url: "../some" } metadata_url: "not an url" access: "**&" access: "group:**&" access: "a:b" } services { id: "b" config_location { storage_type: GITILES url: "https://gitiles.host.com/project" } } services { id: "a-unsorted" } ''' result = validation.validate_config( config.self_config_set(), 'services.cfg', cfg) self.assertEqual( [m.text for m in result.messages], [ 'Service #2: id is not specified', ('Service #2: config_location: ' 'storage_type must not be set if relative url is used'), 'Service #2: invalid email: "not an email"', 'Service #2: metadata_url: hostname not specified', 'Service #2: metadata_url: scheme must be "https"', 'Service #2: access #1: invalid email: "**&"', 'Service #2: access #2: invalid group: **&', 'Service #2: access #3: Identity has invalid format: b', 'Services are not sorted by id. First offending id: a-unsorted', ] )
def test_validate_project_registry(self): cfg = ''' projects { id: "a" config_location { storage_type: GITILES url: "https://a.googlesource.com/project/+/refs/heads/master" } } projects { id: "b" } projects { id: "a" config_location { storage_type: GITILES url: "https://no-project.googlesource.com" } } projects { config_location { storage_type: GITILES url: "https://example.googlesource.com/bad_plus/+" } } projects { id: "c" config_location { storage_type: GITILES url: "https://example.googlesource.com/no_ref/" } } ''' result = validation.validate_config(config.self_config_set(), 'projects.cfg', cfg) self.assertEqual([m.text for m in result.messages], [ 'Project b: config_location: storage_type is not set', 'Project a: id is not unique', ('Project a: config_location: Invalid Gitiles repo url: ' 'https://no-project.googlesource.com'), 'Project #4: id is not specified', ('Project #4: config_location: Invalid Gitiles repo url: ' 'https://example.googlesource.com/bad_plus/+'), 'Project c: config_location: ref/commit is not specified', 'Projects are not sorted by id. First offending id: a', ])
def test_validate_project_registry(self): cfg = """ projects { id: "a" config_location { storage_type: GITILES url: "https://a.googlesource.com/project" } } projects { id: "b" } projects { id: "a" config_location { storage_type: GITILES url: "https://no-project.googlesource.com" } } projects { config_location { storage_type: GITILES url: "https://no-project.googlesource.com/bad_plus/+" } } """ result = validation.validate_config(config.self_config_set(), "projects.cfg", cfg) self.assertEqual( [m.text for m in result.messages], [ "Project b: config_location: storage_type is not set", "Project a: id is not unique", ("Project a: config_location: Invalid Gitiles repo url: " "https://no-project.googlesource.com"), "Project #4: id is not specified", ( "Project #4: config_location: Invalid Gitiles repo url: " "https://no-project.googlesource.com/bad_plus/+" ), "Projects are not sorted by id. First offending id: a", ], )
def test_validate_project_registry(self): cfg = ''' projects { id: "a" config_location { storage_type: GITILES url: "https://a.googlesource.com/project" } } projects { id: "b" } projects { id: "a" config_location { storage_type: GITILES url: "https://no-project.googlesource.com" } } projects { config_location { storage_type: GITILES url: "https://no-project.googlesource.com/bad_plus/+" } } ''' result = validation.validate_config( config.self_config_set(), 'projects.cfg', cfg) self.assertEqual( [m.text for m in result.messages], [ 'Project b: config_location: storage_type is not set', 'Project a: id is not unique', ('Project a: config_location: Invalid Gitiles repo url: ' 'https://no-project.googlesource.com'), 'Project #4: id is not specified', ('Project #4: config_location: Invalid Gitiles repo url: ' 'https://no-project.googlesource.com/bad_plus/+'), 'Projects are not sorted by id. First offending id: a', ] )
def import_revision( config_set, base_location, revision, create_config_set=False): """Imports a referenced Gitiles revision into a config set. |base_location| will be used to set storage.ConfigSet.location. If |create_config_set| is True and Revision entity does not exist, then creates ConfigSet with latest_revision set to |location.treeish|. """ assert re.match('[0-9a-f]{40}', revision), ( '"%s" is not a valid sha' % revision ) logging.debug('Importing revision %s @ %s', config_set, revision) rev_key = ndb.Key( storage.ConfigSet, config_set, storage.Revision, revision) updated_config_set = storage.ConfigSet( id=config_set, latest_revision=revision, location=str(base_location)) if rev_key.get(): if create_config_set: updated_config_set.put() return # Fetch archive, extract files and save them to Blobs outside ConfigSet # transaction. location = base_location._replace(treeish=revision) archive = location.get_archive( deadline=get_gitiles_config().fetch_archive_deadline) if not archive: logging.error( 'Could not import %s: configuration does not exist', config_set) return logging.info('%s archive size: %d bytes' % (config_set, len(archive))) entites_to_put = [storage.Revision(key=rev_key)] if create_config_set: entites_to_put.append(updated_config_set) stream = StringIO.StringIO(archive) blob_futures = [] with tarfile.open(mode='r|gz', fileobj=stream) as tar: for item in tar: if not item.isreg(): # pragma: no cover continue with contextlib.closing(tar.extractfile(item)) as extracted: content = extracted.read() ctx = config.validation.Context.logging() validation.validate_config(config_set, item.name, content, ctx=ctx) if ctx.result().has_errors: logging.error('Invalid revision %s@%s', config_set, revision) return content_hash = storage.compute_hash(content) blob_futures.append(storage.import_blob_async( content=content, content_hash=content_hash)) entites_to_put.append( storage.File( id=item.name, parent=rev_key, content_hash=content_hash) ) # Wait for Blobs to be imported before proceeding. ndb.Future.wait_all(blob_futures) @ndb.transactional def do_import(): if not rev_key.get(): ndb.put_multi(entites_to_put) do_import() logging.info('Imported revision %s/%s', config_set, location.treeish)
def test_validation_by_service_async(self): cfg = "# a config" cfg_b64 = base64.b64encode(cfg) self.services = [ service_config_pb2.Service(id="a"), service_config_pb2.Service(id="b"), service_config_pb2.Service(id="c"), ] @ndb.tasklet def get_metadata_async(service_id): if service_id == "a": raise ndb.Return( service_config_pb2.ServiceDynamicMetadata( validation=service_config_pb2.Validator( patterns=[service_config_pb2.ConfigPattern(config_set="services/foo", path="bar.cfg")], url="https://bar.verifier", ) ) ) if service_id == "b": raise ndb.Return( service_config_pb2.ServiceDynamicMetadata( validation=service_config_pb2.Validator( patterns=[ service_config_pb2.ConfigPattern( config_set=r"regex:projects/[^/]+", path=r"regex:.+\.cfg" ) ], url="https://bar2.verifier", ) ) ) if service_id == "c": raise ndb.Return( service_config_pb2.ServiceDynamicMetadata( validation=service_config_pb2.Validator( patterns=[service_config_pb2.ConfigPattern(config_set=r"regex:.+", path=r"regex:.+")], url="https://ultimate.verifier", ) ) ) return None self.mock(services, "get_metadata_async", mock.Mock()) services.get_metadata_async.side_effect = get_metadata_async @ndb.tasklet def json_request_async(url, **kwargs): raise ndb.Return( { "messages": [ { "text": "OK from %s" % url, # default severity } ] } ) self.mock(net, "json_request_async", mock.Mock(side_effect=json_request_async)) ############################################################################ result = validation.validate_config("services/foo", "bar.cfg", cfg) self.assertEqual( result.messages, [ validation_context.Message(text="OK from https://bar.verifier", severity=logging.INFO), validation_context.Message(text="OK from https://ultimate.verifier", severity=logging.INFO), ], ) net.json_request_async.assert_any_call( "https://bar.verifier", method="POST", payload={"config_set": "services/foo", "path": "bar.cfg", "content": cfg_b64}, scopes=net.EMAIL_SCOPE, ) net.json_request_async.assert_any_call( "https://ultimate.verifier", method="POST", payload={"config_set": "services/foo", "path": "bar.cfg", "content": cfg_b64}, scopes=net.EMAIL_SCOPE, ) ############################################################################ result = validation.validate_config("projects/foo", "bar.cfg", cfg) self.assertEqual( result.messages, [ validation_context.Message(text="OK from https://bar2.verifier", severity=logging.INFO), validation_context.Message(text="OK from https://ultimate.verifier", severity=logging.INFO), ], ) net.json_request_async.assert_any_call( "https://bar2.verifier", method="POST", payload={"config_set": "projects/foo", "path": "bar.cfg", "content": cfg_b64}, scopes=net.EMAIL_SCOPE, ) net.json_request_async.assert_any_call( "https://ultimate.verifier", method="POST", payload={"config_set": "projects/foo", "path": "bar.cfg", "content": cfg_b64}, scopes=net.EMAIL_SCOPE, ) ############################################################################ # Error found net.json_request_async.side_effect = None net.json_request_async.return_value = ndb.Future() net.json_request_async.return_value.set_result({"messages": [{"text": "error", "severity": "ERROR"}]}) result = validation.validate_config("projects/baz/refs/x", "qux.cfg", cfg) self.assertEqual(result.messages, [validation_context.Message(text="error", severity=logging.ERROR)]) ############################################################################ # Less-expected responses res = {"messages": [{"severity": "invalid severity"}, {}, []]} net.json_request_async.return_value = ndb.Future() net.json_request_async.return_value.set_result(res) result = validation.validate_config("projects/baz/refs/x", "qux.cfg", cfg) self.assertEqual( result.messages, [ validation_context.Message( severity=logging.CRITICAL, text=( "Error during external validation: invalid response: " "unexpected message severity: invalid severity\n" "url: https://ultimate.verifier\n" "config_set: projects/baz/refs/x\n" "path: qux.cfg\n" "response: %r" % res ), ), validation_context.Message(severity=logging.INFO, text=""), validation_context.Message( severity=logging.CRITICAL, text=( "Error during external validation: invalid response: " "message is not a dict: []\n" "url: https://ultimate.verifier\n" "config_set: projects/baz/refs/x\n" "path: qux.cfg\n" "response: %r" % res ), ), ], )
def test_validation_by_service_async(self): cfg = '# a config' cfg_b64 = base64.b64encode(cfg) self.services = [ service_config_pb2.Service(id='a'), service_config_pb2.Service(id='b'), service_config_pb2.Service(id='c'), ] @ndb.tasklet def get_metadata_async(service_id): if service_id == 'a': raise ndb.Return( service_config_pb2.ServiceDynamicMetadata( validation=service_config_pb2.Validator( patterns=[ service_config_pb2.ConfigPattern( config_set='services/foo', path='bar.cfg', ) ], url='https://bar.verifier', ))) if service_id == 'b': raise ndb.Return( service_config_pb2.ServiceDynamicMetadata( validation=service_config_pb2.Validator( patterns=[ service_config_pb2.ConfigPattern( config_set=r'regex:projects/[^/]+', path=r'regex:.+\.cfg', ) ], url='https://bar2.verifier', ))) if service_id == 'c': raise ndb.Return( service_config_pb2.ServiceDynamicMetadata( validation=service_config_pb2.Validator( patterns=[ service_config_pb2.ConfigPattern( config_set=r'regex:.+', path=r'regex:.+', ) ], url='https://ultimate.verifier', ))) return None self.mock(services, 'get_metadata_async', mock.Mock()) services.get_metadata_async.side_effect = get_metadata_async @ndb.tasklet def json_request_async(url, **kwargs): raise ndb.Return({ 'messages': [{ 'text': 'OK from %s' % url, # default severity }], }) self.mock(net, 'json_request_async', mock.Mock(side_effect=json_request_async)) ############################################################################ result = validation.validate_config('services/foo', 'bar.cfg', cfg) self.assertEqual(result.messages, [ validation_context.Message(text='OK from https://bar.verifier', severity=logging.INFO), validation_context.Message( text='OK from https://ultimate.verifier', severity=logging.INFO) ]) net.json_request_async.assert_any_call( 'https://bar.verifier', method='POST', payload={ 'config_set': 'services/foo', 'path': 'bar.cfg', 'content': cfg_b64, }, scopes=net.EMAIL_SCOPE, ) net.json_request_async.assert_any_call( 'https://ultimate.verifier', method='POST', payload={ 'config_set': 'services/foo', 'path': 'bar.cfg', 'content': cfg_b64, }, scopes=net.EMAIL_SCOPE, ) ############################################################################ result = validation.validate_config('projects/foo', 'bar.cfg', cfg) self.assertEqual(result.messages, [ validation_context.Message(text='OK from https://bar2.verifier', severity=logging.INFO), validation_context.Message( text='OK from https://ultimate.verifier', severity=logging.INFO) ]) net.json_request_async.assert_any_call( 'https://bar2.verifier', method='POST', payload={ 'config_set': 'projects/foo', 'path': 'bar.cfg', 'content': cfg_b64, }, scopes=net.EMAIL_SCOPE, ) net.json_request_async.assert_any_call( 'https://ultimate.verifier', method='POST', payload={ 'config_set': 'projects/foo', 'path': 'bar.cfg', 'content': cfg_b64, }, scopes=net.EMAIL_SCOPE, ) ############################################################################ # Error found net.json_request_async.side_effect = None net.json_request_async.return_value = ndb.Future() net.json_request_async.return_value.set_result( {'messages': [{ 'text': 'error', 'severity': 'ERROR' }]}) result = validation.validate_config('projects/baz/refs/x', 'qux.cfg', cfg) self.assertEqual( result.messages, [validation_context.Message(text='error', severity=logging.ERROR)]) ############################################################################ # Less-expected responses res = {'messages': [{'severity': 'invalid severity'}, {}, []]} net.json_request_async.return_value = ndb.Future() net.json_request_async.return_value.set_result(res) result = validation.validate_config('projects/baz/refs/x', 'qux.cfg', cfg) self.assertEqual( result.messages, [ validation_context.Message( severity=logging.CRITICAL, text=( 'Error during external validation: invalid response: ' 'unexpected message severity: invalid severity\n' 'url: https://ultimate.verifier\n' 'config_set: projects/baz/refs/x\n' 'path: qux.cfg\n' 'response: %r' % res), ), validation_context.Message(severity=logging.INFO, text=''), validation_context.Message( severity=logging.CRITICAL, text=( 'Error during external validation: invalid response: ' 'message is not a dict: []\n' 'url: https://ultimate.verifier\n' 'config_set: projects/baz/refs/x\n' 'path: qux.cfg\n' 'response: %r' % res), ), ], )
def test_validation_by_service_async(self): cfg = '# a config' cfg_b64 = base64.b64encode(cfg) self.services = [ service_config_pb2.Service(id='a'), service_config_pb2.Service(id='b'), service_config_pb2.Service(id='c'), ] @ndb.tasklet def get_metadata_async(service_id): if service_id == 'a': raise ndb.Return(service_config_pb2.ServiceDynamicMetadata( validation=service_config_pb2.Validator( patterns=[service_config_pb2.ConfigPattern( config_set='services/foo', path='bar.cfg', )], url='https://bar.verifier', ) )) if service_id == 'b': raise ndb.Return(service_config_pb2.ServiceDynamicMetadata( validation=service_config_pb2.Validator( patterns=[service_config_pb2.ConfigPattern( config_set=r'regex:projects/[^/]+', path=r'regex:.+\.cfg', )], url='https://bar2.verifier', ))) if service_id == 'c': raise ndb.Return(service_config_pb2.ServiceDynamicMetadata( validation=service_config_pb2.Validator( patterns=[service_config_pb2.ConfigPattern( config_set=r'regex:.+', path=r'regex:.+', )], url='https://ultimate.verifier', ))) return None self.mock(services, 'get_metadata_async', mock.Mock()) services.get_metadata_async.side_effect = get_metadata_async @ndb.tasklet def json_request_async(url, **kwargs): raise ndb.Return({ 'messages': [{ 'text': 'OK from %s' % url, # default severity }], }) self.mock( net, 'json_request_async', mock.Mock(side_effect=json_request_async)) ############################################################################ result = validation.validate_config('services/foo', 'bar.cfg', cfg) self.assertEqual( result.messages, [ validation_context.Message( text='OK from https://bar.verifier', severity=logging.INFO), validation_context.Message( text='OK from https://ultimate.verifier', severity=logging.INFO) ]) net.json_request_async.assert_any_call( 'https://bar.verifier', method='POST', payload={ 'config_set': 'services/foo', 'path': 'bar.cfg', 'content': cfg_b64, }, scopes=net.EMAIL_SCOPE, ) net.json_request_async.assert_any_call( 'https://ultimate.verifier', method='POST', payload={ 'config_set': 'services/foo', 'path': 'bar.cfg', 'content': cfg_b64, }, scopes=net.EMAIL_SCOPE, ) ############################################################################ result = validation.validate_config('projects/foo', 'bar.cfg', cfg) self.assertEqual( result.messages, [ validation_context.Message( text='OK from https://bar2.verifier', severity=logging.INFO), validation_context.Message( text='OK from https://ultimate.verifier', severity=logging.INFO) ]) net.json_request_async.assert_any_call( 'https://bar2.verifier', method='POST', payload={ 'config_set': 'projects/foo', 'path': 'bar.cfg', 'content': cfg_b64, }, scopes=net.EMAIL_SCOPE, ) net.json_request_async.assert_any_call( 'https://ultimate.verifier', method='POST', payload={ 'config_set': 'projects/foo', 'path': 'bar.cfg', 'content': cfg_b64, }, scopes=net.EMAIL_SCOPE, ) ############################################################################ # Error found net.json_request_async.side_effect = None net.json_request_async.return_value = ndb.Future() net.json_request_async.return_value.set_result({ 'messages': [{ 'text': 'error', 'severity': 'ERROR' }] }) result = validation.validate_config('projects/baz/refs/x', 'qux.cfg', cfg) self.assertEqual( result.messages, [ validation_context.Message(text='error', severity=logging.ERROR) ]) ############################################################################ # Less-expected responses res = { 'messages': [ {'severity': 'invalid severity'}, {}, [] ] } net.json_request_async.return_value = ndb.Future() net.json_request_async.return_value.set_result(res) result = validation.validate_config('projects/baz/refs/x', 'qux.cfg', cfg) self.assertEqual( result.messages, [ validation_context.Message( severity=logging.CRITICAL, text=( 'Error during external validation: invalid response: ' 'unexpected message severity: invalid severity\n' 'url: https://ultimate.verifier\n' 'config_set: projects/baz/refs/x\n' 'path: qux.cfg\n' 'response: %r' % res), ), validation_context.Message(severity=logging.INFO, text=''), validation_context.Message( severity=logging.CRITICAL, text=( 'Error during external validation: invalid response: ' 'message is not a dict: []\n' 'url: https://ultimate.verifier\n' 'config_set: projects/baz/refs/x\n' 'path: qux.cfg\n' 'response: %r' % res), ), ], )