def test_active_subjects(self): '''GET /tasr/collection/subjects/active - gets _active_ subjects (that is, ones with at least one schema), as expected''' # reg two vers for target subject and one for an alt subject self.register_subject(self.event_type) alt_subject_name = 'bob' self.register_subject(alt_subject_name) # now get all and check the headers all_url = "%s/collection/subjects/all" % self.url_prefix resp = self.tasr_app.request(all_url, method='GET') self.abort_diff_status(resp, 200) meta_dict = SubjectHeaderBot.extract_metadata(resp) # we should have a GroupMetadata object for each group in the headers for sub_name in [self.event_type, alt_subject_name]: self.assertIn(sub_name, meta_dict.keys(), 'missing subject') subj = meta_dict[sub_name] self.assertEqual(sub_name, subj.name, 'bad subject name') # now get the ACTIVE subjects, which should be empty so far active_url = "%s/collection/subjects/active" % self.url_prefix resp = self.tasr_app.request(active_url, method='GET') self.abort_diff_status(resp, 200) meta_dict = SubjectHeaderBot.extract_metadata(resp) # we should have no GroupMetadata objects for sub_name in [self.event_type, alt_subject_name]: self.assertNotIn(sub_name, meta_dict.keys(), 'unexpected subject') # now register a schema for the base subject and recheck resp = self.register_schema(self.event_type, self.schema_str) self.abort_diff_status(resp, 201) # the get_all should be unchanged, the get_active should have one resp = self.tasr_app.request(all_url, method='GET') self.abort_diff_status(resp, 200) meta_dict = SubjectHeaderBot.extract_metadata(resp) # we should have a GroupMetadata object for each group in the headers for sub_name in [self.event_type, alt_subject_name]: self.assertIn(sub_name, meta_dict.keys(), 'missing subject') subj = meta_dict[sub_name] self.assertEqual(sub_name, subj.name, 'bad subject name') # now get the ACTIVE subjects, which should be empty so far resp = self.tasr_app.request(active_url, method='GET') self.abort_diff_status(resp, 200) meta_dict = SubjectHeaderBot.extract_metadata(resp) # we should have a GroupMetadata object for one group in the headers self.assertNotIn(alt_subject_name, meta_dict.keys(), 'unexpected obj') # the event_type should be there self.assertIn(self.event_type, meta_dict.keys(), 'missing subject') subj = meta_dict[self.event_type] self.assertEqual(self.event_type, subj.name, 'bad subject name') # lastly check the body buff = StringIO.StringIO(resp.body) group_names = [] for topic_line in buff: group_names.append(topic_line.strip()) buff.close() self.assertListEqual(sorted(group_names), sorted(meta_dict.keys()), 'Expected group_names in body to match headers.')
def test_get_all_topics(self): '''GET /tasr/topic - as expected''' # reg two vers for target topic and one for an alt topic self.register_schema(self.schema_str) schema_str_2 = self.schema_str.replace('tagged.events', 'tagged.events.alt', 1) self.register_schema(schema_str_2) alt_topic = 'bob' alt_url = '%s/topic/%s' % (self.url_prefix, alt_topic) self.tasr_app.request(alt_url, method='PUT', content_type=self.content_type, body=self.schema_str) # now get all with versions and check the headers url = "%s/topic" % (self.url_prefix) resp = self.tasr_app.request(url, method='GET') self.abort_diff_status(resp, 200) # we expect a list of SubjectMetadata objects here meta_dict = SubjectHeaderBot.extract_metadata(resp) self.assertEqual(2, meta_dict[self.event_type].current_version, 'bad ver') self.assertEqual(1, meta_dict[alt_topic].current_version, 'bad ver') # lastly check the body buff = StringIO.StringIO(resp.body) group_names = [] for topic_line in buff: group_names.append(topic_line.strip()) buff.close() self.assertListEqual(sorted(group_names), sorted(meta_dict.keys()), 'Expected group_names in body to match headers.')
def get_all_subject_schema_ids(subject_name, host=TASR_HOST, port=TASR_PORT, timeout=TIMEOUT): ''' GET /tasr/subject/<subject name>/all_ids Retrieves a list of the SHA256 multi-type IDs for all the schema versions registered for a subject, in version order. ''' url = 'http://%s:%s/tasr/subject/%s/all_ids' % (host, port, subject_name) resp = requests.get(url, timeout=timeout) if resp == None: raise TASRError('Timeout for get all subject IDs request.') if resp.status_code != 200: raise TASRError('Failed to get all subject IDs (status code: %s)' % resp.status_code) meta = SubjectHeaderBot.extract_metadata(resp)[subject_name] # check that the sha256_list matches the body list buff = StringIO.StringIO(resp.content) sha256_ids = [] for line in buff: sha256_ids.append(line.strip()) buff.close() if len(meta.sha256_id_list) != len(sha256_ids): raise TASRError('Header-body mismatch for sha256_id lists.') if meta.sha256_id_list != sha256_ids: raise TASRError('Header-body mismatch for sha256_id lists.') return meta.sha256_id_list
def test_all_subject_names(self): '''GET /tasr/collection/subjects/all - get _all_ registered subjects''' # reg two vers for target subject and one for an alt subject self.register_subject(self.event_type) alt_subject_name = 'bob' self.register_subject(alt_subject_name) # now get all and check the headers get_url = '%s/collection/subjects/all' % self.url_prefix resp = self.tasr_app.request(get_url, method='GET') self.abort_diff_status(resp, 200) meta_dict = SubjectHeaderBot.extract_metadata(resp) self.assertIn(self.event_type, meta_dict.keys(), 'missing subject') subj = meta_dict[self.event_type] self.assertEqual(self.event_type, subj.name, 'bad subject name') self.assertIn(alt_subject_name, meta_dict.keys(), 'missing subject') alt_subj = meta_dict[alt_subject_name] self.assertEqual(alt_subject_name, alt_subj.name, 'bad subject name') # lastly check the body buff = StringIO.StringIO(resp.body) group_names = [] for topic_line in buff: group_names.append(topic_line.strip()) buff.close() self.assertListEqual(sorted(group_names), sorted(meta_dict.keys()), 'Expected group_names in body to match headers.')
def get_all_subject_schemas(subject_name, host=TASR_HOST, port=TASR_PORT, timeout=TIMEOUT): ''' GET /tasr/subject/<subject name>/all_schemas Retrieves all the (canonical) schema versions registered for a subject, in version order, one per line in the response body. The multi-type IDs are included in the headers for confirmation. ''' url = ('http://%s:%s/tasr/subject/%s/all_schemas' % (host, port, subject_name)) resp = requests.get(url, timeout=timeout) if resp == None: raise TASRError('Timeout for get all subject schemas request.') if resp.status_code != 200: raise TASRError('Failed to get all subject schemas (status code: %s)' % resp.status_code) meta = SubjectHeaderBot.extract_metadata(resp)[subject_name] buff = StringIO.StringIO(resp.content) schemas = [] version = 1 for schema_str in buff: ras = RegisteredAvroSchema() ras.schema_str = schema_str.strip() ras.gv_dict[subject_name] = version if ras.sha256_id != meta.sha256_id_list[version - 1]: raise TASRError('Generated SHA256 ID did not match passed ID.') schemas.append(ras) version += 1 buff.close() return schemas
def get_all_subject_names(host=TASR_HOST, port=TASR_PORT, timeout=TIMEOUT): ''' GET /tasr/collection/subjects/all Retrieves all the registered subject names, both as X-TASR header fields and as plain text, one per line, in the response body. This method returns a list of subject name strings. ''' url = 'http://%s:%s/tasr/collection/subjects/all' % (host, port) resp = requests.get(url, timeout=timeout) if resp == None: raise TASRError('Timeout for get all subjects request.') if resp.status_code != 200: raise TASRError('Failed to get all subjects (status code: %s)' % resp.status_code) subject_metas = SubjectHeaderBot.extract_metadata(resp) # check that subject_metas.keys() matches the body list buff = StringIO.StringIO(resp.content) name_list = [] for line in buff: name_list.append(line.strip()) buff.close() if len(subject_metas.keys()) != len(name_list): raise TASRError('Header-body mismatch for subject name lists.') if sorted(subject_metas.keys()) != sorted(name_list): raise TASRError('Header-body mismatch for subject name lists.') return subject_metas.keys()
def test_register_subject(self): '''PUT /tasr/subject - registers the subject (not the schema)''' resp = self.register_subject(self.event_type) self.abort_diff_status(resp, 201) metas = SubjectHeaderBot.extract_metadata(resp) self.assertEqual(self.event_type, metas[self.event_type].name, 'unexpected subject name')
def test_lookup_subject(self): '''GET /tasr/subject/<subject> - lookup the subject by name''' self.register_subject(self.event_type) resp = self.tasr_app.request(self.subject_url, method='GET') self.abort_diff_status(resp, 200) metas = SubjectHeaderBot.extract_metadata(resp) self.assertEqual(self.event_type, metas[self.event_type].name, 'unexpected subject name')
def test_reg_and_rereg_subject(self): '''PUT /tasr/subject - registers the subject (not the schema), then re-registers the same subject. The second reg should return a 200.''' resp = self.register_subject(self.event_type) self.abort_diff_status(resp, 201) resp = self.register_subject(self.event_type) self.abort_diff_status(resp, 200) metas = SubjectHeaderBot.extract_metadata(resp) self.assertEqual(self.event_type, metas[self.event_type].name, 'unexpected subject name')
def test_active_subjects__accept_json(self): '''GET /tasr/collection/subjects/active - gets _active_ subjects (that is, ones with at least one schema), as expected''' # reg two vers for target subject and one for an alt subject self.register_subject(self.event_type) alt_subject_name = 'bob' self.register_subject(alt_subject_name) # now get the ACTIVE subjects, which should be empty so far active_url = "%s/collection/subjects/active" % self.url_prefix resp = self.tasr_app.request(active_url, method='GET', accept='text/json') self.abort_diff_status(resp, 200) meta_dict = SubjectHeaderBot.extract_metadata(resp) # we should have no GroupMetadata objects for sub_name in [self.event_type, alt_subject_name]: self.assertNotIn(sub_name, meta_dict.keys(), 'unexpected subject') # we should have received an empty dict as the content body active_dict = json.loads(resp.body) self.assertDictEqual({}, active_dict, 'expected empty dict') # now register a schema for the base subject and recheck resp = self.register_schema(self.event_type, self.schema_str) self.abort_diff_status(resp, 201) # now get the ACTIVE subjects, which should be empty so far resp = self.tasr_app.request(active_url, method='GET', accept='text/json') self.abort_diff_status(resp, 200) meta_dict = SubjectHeaderBot.extract_metadata(resp) # we should have a GroupMetadata object for one group in the headers self.assertNotIn(alt_subject_name, meta_dict.keys(), 'unexpected obj') # the event_type should be there self.assertIn(self.event_type, meta_dict.keys(), 'missing subject') subj = meta_dict[self.event_type] self.assertEqual(self.event_type, subj.name, 'bad subject name') # and check the expected content body JSON active_dict = json.loads(resp.body) self.assertListEqual(sorted(active_dict.keys()), sorted(meta_dict.keys()), 'Expected group_names in body to match headers.')
def get_all_topics(host=TASR_HOST, port=TASR_PORT, timeout=TIMEOUT): ''' GET /tasr/topic Retrieves available metadata for all the topics (i.e. -- groups) with registered schemas. A dict of <topic name>:<topic metadata> is returned. ''' url = 'http://%s:%s/tasr/topic' % (host, port) resp = requests.get(url, timeout=timeout) if resp == None: raise TASRError('Timeout for request to %s' % url) if not 200 == resp.status_code: raise TASRError('Failed request to %s (status code: %s)' % (url, resp.status_code)) topic_metas = SubjectHeaderBot.extract_metadata(resp) return topic_metas
def test_register_subject__accept_json(self): '''PUT /tasr/subject - registers the subject (not the schema)''' url = '%s/subject/%s' % (self.url_prefix, self.event_type) dummy_config = {'dummy_config_key': 'dummy_config_val'} resp = self.tasr_app.put(url, dummy_config, {'Accept': 'text/json'}) self.abort_diff_status(resp, 201) metas = SubjectHeaderBot.extract_metadata(resp) self.assertEqual(self.event_type, metas[self.event_type].name, 'unexpected subject name') # check the returned JSON to ensure it worked json_sub = json.loads(resp.body) self.assertEqual(self.event_type, json_sub["subject_name"], "bad subject name") self.assertEqual(dummy_config, json_sub["config"], "bad config")
def register_subject(subject_name, config_dict=None, host=TASR_HOST, port=TASR_PORT, timeout=TIMEOUT): ''' PUT /tasr/subject/<subject name> Registers a _subject_ (not a schema), ensuring that the group can be established before associating schemas with it. Note that if a form is sent as the PUT body, it should be used to set the subject config map. Returns a GroupMetadata object on success. ''' url = 'http://%s:%s/tasr/subject/%s' % (host, port, subject_name) resp = requests.put(url, data=config_dict, timeout=timeout) if resp == None: raise TASRError('Timeout for register subject request.') if not resp.status_code in [200, 201]: raise TASRError('Failed to register subject. (status code: %s)' % resp.status_code) subject_metas = SubjectHeaderBot.extract_metadata(resp) if subject_metas and len(subject_metas) > 0: return subject_metas[subject_name]
def test_all_subject_names__accept_json(self): '''GET /tasr/collection/subjects/all - get _all_ registered subjects''' # reg two vers for target subject and one for an alt subject self.register_subject(self.event_type) alt_subject_name = 'bob' self.register_subject(alt_subject_name) # now get all and check the headers get_url = '%s/collection/subjects/all' % self.url_prefix resp = self.tasr_app.request(get_url, method='GET', accept='text/json') self.abort_diff_status(resp, 200) meta_dict = SubjectHeaderBot.extract_metadata(resp) self.assertIn(self.event_type, meta_dict.keys(), 'missing subject') subj = meta_dict[self.event_type] self.assertEqual(self.event_type, subj.name, 'bad subject name') self.assertIn(alt_subject_name, meta_dict.keys(), 'missing subject') alt_subj = meta_dict[alt_subject_name] self.assertEqual(alt_subject_name, alt_subj.name, 'bad subject name') # the body should be a JSON dict of subject objects keyed by name sub_dict = json.loads(resp.body) self.assertListEqual(sorted(sub_dict.keys()), sorted(meta_dict.keys()), 'Expected group_names in body to match headers.')