def _listing_test(self, count=10, limit=10, marker=None, detailed=False): # NOTE(cpp-cabrera): delete initial pool - it will interfere # with listing tests self.simulate_delete(self.pool) query = '?limit={0}&detailed={1}'.format(limit, detailed) if marker: query += '&marker={0}'.format(marker) with pools(self, count, self.doc['uri'], 'my-group') as expected: result = self.simulate_get(self.url_prefix + '/pools', query_string=query) self.assertEqual(self.srmock.status, falcon.HTTP_200) results = jsonutils.loads(result[0]) self.assertIsInstance(results, dict) self.assertIn('pools', results) self.assertIn('links', results) pool_list = results['pools'] link = results['links'][0] self.assertEqual('next', link['rel']) href = falcon.uri.parse_query_string(link['href']) self.assertIn('marker', href) self.assertEqual(href['limit'], str(limit)) self.assertEqual(href['detailed'], str(detailed).lower()) next_query_string = ('?marker={marker}&limit={limit}' '&detailed={detailed}').format(**href) next_result = self.simulate_get(link['href'].split('?')[0], query_string=next_query_string) self.assertEqual(self.srmock.status, falcon.HTTP_200) next_pool = jsonutils.loads(next_result[0]) next_pool_list = next_pool['pools'] self.assertIn('links', next_pool) if limit < count: self.assertEqual(len(next_pool_list), min(limit, count-limit)) else: # NOTE(jeffrey4l): when limit >= count, there will be no # pools in the 2nd page. self.assertTrue(len(next_pool_list) == 0) self.assertEqual(len(pool_list), min(limit, count)) for s in pool_list + next_pool_list: # NOTE(flwang): It can't assumed that both sqlalchemy and # mongodb can return query result with the same order. Just # like the order they're inserted. Actually, sqlalchemy can't # guarantee that. So we're leveraging the relationship between # pool weight and the index of pools fixture to get the # right pool to verify. expect = expected[s['weight']] path, weight, group = expect[:3] self._pool_expect(s, path, weight, self.doc['uri']) if detailed: self.assertIn('options', s) self.assertEqual(s['options'], expect[-1]) else: self.assertNotIn('options', s)
def test_update_metadata(self): xyz_queue_path = self.url_prefix + '/queues/xyz' xyz_queue_path_metadata = xyz_queue_path + '/metadata' # Create project_id = '480924' self.simulate_put(xyz_queue_path, project_id) self.assertEqual(self.srmock.status, falcon.HTTP_201) # Set meta doc1 = '{"messages": {"ttl": 600}}' self.simulate_put(xyz_queue_path_metadata, project_id, body=doc1) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Update doc2 = '{"messages": {"ttl": 100}}' self.simulate_put(xyz_queue_path_metadata, project_id, body=doc2) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Get result = self.simulate_get(xyz_queue_path_metadata, project_id) result_doc = jsonutils.loads(result[0]) self.assertEqual(result_doc, jsonutils.loads(doc2)) self.assertEqual(self.srmock.headers_dict['Content-Location'], xyz_queue_path_metadata)
def test_update_metadata(self): self.skip("This should use patch instead") xyz_queue_path = self.url_prefix + '/queues/xyz' xyz_queue_path_metadata = xyz_queue_path # Create self.simulate_put(xyz_queue_path, headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_201) # Set meta doc1 = '{"messages": {"ttl": 600}}' self.simulate_put(xyz_queue_path_metadata, headers=self.headers, body=doc1) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Update doc2 = '{"messages": {"ttl": 100}}' self.simulate_put(xyz_queue_path_metadata, headers=self.headers, body=doc2) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Get result = self.simulate_get(xyz_queue_path_metadata, headers=self.headers) result_doc = jsonutils.loads(result[0]) self.assertEqual(result_doc, jsonutils.loads(doc2))
def test_simple(self): self.headers = { 'Client-ID': str(uuid.uuid4()), 'X-Project-ID': '338730984abc_1' } gumshoe_queue_path = self.url_prefix + '/queues/gumshoe' doc = '{"messages": {"ttl": 600}}' self.simulate_put(gumshoe_queue_path, headers=self.headers, body=doc) self.assertEqual(self.srmock.status, falcon.HTTP_503) location = ('Location', gumshoe_queue_path) self.assertNotIn(location, self.srmock.headers) result = self.simulate_get(gumshoe_queue_path, headers=self.headers) result_doc = jsonutils.loads(result[0]) self.assertEqual(self.srmock.status, falcon.HTTP_503) self.assertNotEqual(result_doc, jsonutils.loads(doc)) self.simulate_get(gumshoe_queue_path + '/stats', headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_503) self.simulate_get(self.url_prefix + '/queues', headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_503) self.simulate_delete(gumshoe_queue_path, headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_503)
def test_basics_thoroughly(self, project_id): gumshoe_queue_path_metadata = self.gumshoe_queue_path + '/metadata' gumshoe_queue_path_stats = self.gumshoe_queue_path + '/stats' # Stats not found - queue not created yet self.simulate_get(gumshoe_queue_path_stats, project_id) self.assertEqual(self.srmock.status, falcon.HTTP_404) # Metadata not found - queue not created yet self.simulate_get(gumshoe_queue_path_metadata, project_id) self.assertEqual(self.srmock.status, falcon.HTTP_404) # Create self.simulate_put(self.gumshoe_queue_path, project_id) self.assertEqual(self.srmock.status, falcon.HTTP_201) location = self.srmock.headers_dict['Location'] self.assertEqual(location, self.gumshoe_queue_path) # Ensure queue existence self.simulate_head(self.gumshoe_queue_path, project_id) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Add metadata doc = '{"messages": {"ttl": 600}}' self.simulate_put(gumshoe_queue_path_metadata, project_id, body=doc) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Fetch metadata result = self.simulate_get(gumshoe_queue_path_metadata, project_id) result_doc = jsonutils.loads(result[0]) self.assertEqual(self.srmock.status, falcon.HTTP_200) self.assertEqual(result_doc, jsonutils.loads(doc)) # Stats empty queue self.simulate_get(gumshoe_queue_path_stats, project_id) self.assertEqual(self.srmock.status, falcon.HTTP_200) # Delete self.simulate_delete(self.gumshoe_queue_path, project_id) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Get non-existent queue self.simulate_get(self.gumshoe_queue_path, project_id) self.assertEqual(self.srmock.status, falcon.HTTP_404) # Get non-existent stats self.simulate_get(gumshoe_queue_path_stats, project_id) self.assertEqual(self.srmock.status, falcon.HTTP_404) # Get non-existent metadata self.simulate_get(gumshoe_queue_path_metadata, project_id) self.assertEqual(self.srmock.status, falcon.HTTP_404)
def test_list(self): path = self.queue_path + '/messages' self._post_messages(path, repeat=10) query_string = 'limit=3&echo=true' body = self.simulate_get(path, query_string=query_string, headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_200) self.assertEqual(self.srmock.headers_dict['Content-Location'], path + '?' + query_string) cnt = 0 while jsonutils.loads(body[0])['messages'] != []: contents = jsonutils.loads(body[0]) [target, params] = contents['links'][0]['href'].split('?') for msg in contents['messages']: self.simulate_get(msg['href'], headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_200) body = self.simulate_get(target, query_string=params, headers=self.headers) cnt += 1 self.assertEqual(cnt, 4) self.assertEqual(self.srmock.status, falcon.HTTP_200) self._empty_message_list(body) # Stats body = self.simulate_get(self.queue_path + '/stats', headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_200) message_stats = jsonutils.loads(body[0])['messages'] self.assertEqual(self.srmock.headers_dict['Content-Location'], self.queue_path + '/stats') # NOTE(kgriffs): The other parts of the stats are tested # in tests.storage.base and so are not repeated here. expected_pattern = self.queue_path + '/messages/[^/]+$' for message_stat_name in ('oldest', 'newest'): self.assertThat(message_stats[message_stat_name]['href'], matchers.MatchesRegex(expected_pattern)) # NOTE(kgriffs): Try to get messages for a missing queue body = self.simulate_get(self.url_prefix + '/queues/nonexistent/messages', headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_200) self._empty_message_list(body)
def _listing_test(self, count=10, limit=10, marker=None, detailed=False): # NOTE(cpp-cabrera): delete initial flavor - it will interfere # with listing tests self.simulate_delete(self.flavor_path) query = '?limit={0}&detailed={1}'.format(limit, detailed) if marker: query += '&marker={2}'.format(marker) with flavors(self, count, self.doc['pool']) as expected: result = self.simulate_get(self.url_prefix + '/flavors', query_string=query) self.assertEqual(self.srmock.status, falcon.HTTP_200) results = jsonutils.loads(result[0]) self.assertIsInstance(results, dict) self.assertIn('flavors', results) self.assertIn('links', results) flavors_list = results['flavors'] link = results['links'][0] self.assertEqual('next', link['rel']) href = falcon.uri.parse_query_string(link['href']) self.assertIn('marker', href) self.assertEqual(href['limit'], str(limit)) self.assertEqual(href['detailed'], str(detailed).lower()) next_query_string = ('?marker={marker}&limit={limit}' '&detailed={detailed}').format(**href) next_result = self.simulate_get(link['href'].split('?')[0], query_string=next_query_string) next_flavors = jsonutils.loads(next_result[0]) next_flavors_list = next_flavors['flavors'] self.assertEqual(self.srmock.status, falcon.HTTP_200) self.assertIn('links', next_flavors) if limit < count: self.assertEqual(len(next_flavors_list), min(limit, count-limit)) else: self.assertTrue(len(next_flavors_list) == 0) self.assertEqual(len(flavors_list), min(limit, count)) for i, s in enumerate(flavors_list + next_flavors_list): expect = expected[i] path, capabilities = expect[:2] self._flavor_expect(s, path, self.doc['pool']) if detailed: self.assertIn('capabilities', s) self.assertEqual(s['capabilities'], capabilities) else: self.assertNotIn('capabilities', s)
def test_get_claimed_contains_claim_id_in_href(self): path = self.queue_path res = self._post_messages(path + '/messages', repeat=5) for url in jsonutils.loads(res[0])['resources']: message = self.simulate_get(url) self.assertNotIn('claim_id', jsonutils.loads(message[0])['href']) self.simulate_post(path + '/claims', body='{"ttl": 100, "grace": 100}', headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_201) for url in jsonutils.loads(res[0])['resources']: message = self.simulate_get(url) self.assertIn('claim_id', jsonutils.loads(message[0])['href'])
def _listing_test(self, count=10, limit=10, marker=None, detailed=False): # NOTE(cpp-cabrera): delete initial flavor - it will interfere # with listing tests self.simulate_delete(self.flavor_path) query = '?limit={0}&detailed={1}'.format(limit, detailed) if marker: query += '&marker={2}'.format(marker) with flavors(self, count, self.doc['pool']) as expected: result = self.simulate_get(self.url_prefix + '/flavors', query_string=query) self.assertEqual(self.srmock.status, falcon.HTTP_200) results = jsonutils.loads(result[0]) self.assertIsInstance(results, dict) self.assertIn('flavors', results) flavors_list = results['flavors'] self.assertEqual(len(flavors_list), min(limit, count)) for i, s in enumerate(flavors_list): expect = expected[i] path, capabilities = expect[:2] self._flavor_expect(s, path, self.doc['pool']) if detailed: self.assertIn('capabilities', s) self.assertEqual(s['capabilities'], capabilities) else: self.assertNotIn('capabilities', s)
def test_empty_listing(self): self.simulate_delete(self.flavor_path) result = self.simulate_get(self.url_prefix + '/flavors') results = jsonutils.loads(result[0]) self.assertEqual(self.srmock.status, falcon.HTTP_200) self.assertTrue(len(results['flavors']) == 0) self.assertIn('links', results)
def _listing_test(self, count=10, limit=10, marker=None, detailed=False): # NOTE(cpp-cabrera): delete initial pool - it will interfere # with listing tests self.simulate_delete(self.pool) query = '?limit={0}&detailed={1}'.format(limit, detailed) if marker: query += '&marker={2}'.format(marker) with pools(self, count, self.doc['uri']) as expected: result = self.simulate_get(self.url_prefix + '/pools', query_string=query) self.assertEqual(self.srmock.status, falcon.HTTP_200) results = jsonutils.loads(result[0]) self.assertIsInstance(results, dict) self.assertIn('pools', results) pool_list = results['pools'] self.assertEqual(len(pool_list), min(limit, count)) for s in pool_list: # NOTE(flwang): It can't assumed that both sqlalchemy and # mongodb can return query result with the same order. Just # like the order they're inserted. Actually, sqlalchemy can't # guarantee that. So we're leveraging the relationship between # pool weight and the index of pools fixture to get the # right pool to verify. expect = expected[s['weight']] path, weight = expect[:2] self._pool_expect(s, path, weight, self.doc['uri']) if detailed: self.assertIn('options', s) self.assertEqual(s['options'], expect[-1]) else: self.assertNotIn('options', s)
def test_detailed_get_works(self): result = self.simulate_get(self.flavor_path, query_string='?detailed=True') self.assertEqual(self.srmock.status, falcon.HTTP_200) pool = jsonutils.loads(result[0]) self._flavor_expect(pool, self.flavor_path, self.doc['pool']) self.assertIn('capabilities', pool) self.assertEqual(pool['capabilities'], {})
def test_pop_empty_queue(self): query_string = 'pop=1' result = self.simulate_delete(self.messages_path, self.project_id, query_string=query_string) self.assertEqual(self.srmock.status, falcon.HTTP_200) result_doc = jsonutils.loads(result[0]) self.assertEqual(result_doc['messages'], [])
def test_detailed_get_works(self): result = self.simulate_get(self.pool, query_string='?detailed=True') self.assertEqual(self.srmock.status, falcon.HTTP_200) pool = jsonutils.loads(result[0]) self._pool_expect(pool, self.pool, self.doc['weight'], self.doc['uri']) self.assertIn('options', pool) self.assertEqual(pool['options'], {})
def test_basic(self): path = self.url_prefix + '/health' body = self.simulate_get(path) health = jsonutils.loads(body[0]) self.assertEqual(self.srmock.status, falcon.HTTP_200) self.assertTrue(health['storage_reachable']) self.assertIsNotNone(health['message_volume']) for op in health['operation_status']: self.assertTrue(health['operation_status'][op]['succeeded'])
def test_message_listing(self): self._prepare_messages(storage.DEFAULT_MESSAGES_PER_PAGE + 1) result = self.simulate_get(self.messages_path, headers={'Client-ID': str(uuid.uuid4())}) self.assertEqual(self.srmock.status, falcon.HTTP_200) messages = jsonutils.loads(result[0])['messages'] self.assertEqual(len(messages), storage.DEFAULT_MESSAGES_PER_PAGE)
def test_custom_metadata(self): # Set doc = '{{"messages": {{"ttl": 600}}, "padding": "{pad}"}}' max_size = self.transport_cfg.max_queue_metadata padding_len = max_size - (len(doc) - 2) doc = doc.format(pad='x' * padding_len) self.simulate_put(self.fizbat_queue_path, headers=self.headers, body=doc) self.assertEqual(self.srmock.status, falcon.HTTP_201) # Get result = self.simulate_get(self.fizbat_queue_path, headers=self.headers) result_doc = jsonutils.loads(result[0]) self.assertEqual(result_doc, jsonutils.loads(doc)) self.assertEqual(self.srmock.status, falcon.HTTP_200)
def test_delete_message_with_invalid_claim_doesnt_delete_message(self): path = self.queue_path resp = self._post_messages(path + '/messages', 1) location = jsonutils.loads(resp[0])['resources'][0] self.simulate_delete(location, query_string='claim_id=invalid') self.assertEqual(self.srmock.status, falcon.HTTP_204) self.simulate_get(location, self.project_id) self.assertEqual(self.srmock.status, falcon.HTTP_200)
def test_claim_creation(self): self._prepare_messages(storage.DEFAULT_MESSAGES_PER_CLAIM + 1) result = self.simulate_post(self.claims_path, body='{"ttl": 60, "grace": 60}') self.assertEqual(self.srmock.status, falcon.HTTP_201) messages = jsonutils.loads(result[0]) self.assertEqual(len(messages), storage.DEFAULT_MESSAGES_PER_CLAIM)
def test_listing_marker_is_respected(self): self.simulate_delete(self.pool) with pools(self, 10, self.doc['uri']) as expected: result = self.simulate_get(self.url_prefix + '/pools', query_string='?marker=3') self.assertEqual(self.srmock.status, falcon.HTTP_200) pool_list = jsonutils.loads(result[0])['pools'] self.assertEqual(len(pool_list), 6) path, weight = expected[4][:2] self._pool_expect(pool_list[0], path, weight, self.doc['uri'])
def test_put_existing_overwrites(self): # NOTE(cabrera): setUp creates default pool expect = self.doc self.simulate_put(self.pool, body=jsonutils.dumps(expect)) self.assertEqual(self.srmock.status, falcon.HTTP_201) result = self.simulate_get(self.pool) self.assertEqual(self.srmock.status, falcon.HTTP_200) doc = jsonutils.loads(result[0]) self.assertEqual(doc['weight'], expect['weight']) self.assertEqual(doc['uri'], expect['uri'])
def test_listing_marker_is_respected(self): self.simulate_delete(self.flavor_path) with flavors(self, 10, self.doc['pool']) as expected: result = self.simulate_get(self.url_prefix + '/flavors', query_string='?marker=3') self.assertEqual(self.srmock.status, falcon.HTTP_200) flavor_list = jsonutils.loads(result[0])['flavors'] self.assertEqual(len(flavor_list), 6) path, capabilities = expected[4][:2] self._flavor_expect(flavor_list[0], path, self.doc['pool'])
def test_default_ttl_and_grace(self): self.simulate_post(self.claims_path, body='{}', headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_201) body = self.simulate_get(self.srmock.headers_dict['location'], headers=self.headers) claim = jsonutils.loads(body[0]) self.assertEqual(self.default_claim_ttl, claim['ttl'])
def test_queue_listing(self): # 2 queues to list self.simulate_put(self.queue_path + '/q2', headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_201) with self._prepare_queues(storage.DEFAULT_QUEUES_PER_PAGE + 1): result = self.simulate_get(self.queue_path, headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_200) queues = jsonutils.loads(result[0])['queues'] self.assertEqual(len(queues), storage.DEFAULT_QUEUES_PER_PAGE)
def test_put_existing_overwrites(self): # NOTE(cabrera): setUp creates default flavor expect = self.doc self.simulate_put(self.flavor_path, body=jsonutils.dumps(expect)) self.assertEqual(self.srmock.status, falcon.HTTP_201) result = self.simulate_get(self.flavor_path) self.assertEqual(self.srmock.status, falcon.HTTP_200) doc = jsonutils.loads(result[0]) self.assertEqual(doc['pool'], expect['pool'])
def _patch_test(self, doc): self.simulate_patch(self.flavor_path, body=jsonutils.dumps(doc)) self.assertEqual(self.srmock.status, falcon.HTTP_200) result = self.simulate_get(self.flavor_path, query_string='?detailed=True') self.assertEqual(self.srmock.status, falcon.HTTP_200) pool = jsonutils.loads(result[0]) self._flavor_expect(pool, self.flavor_path, doc['pool']) self.assertEqual(pool['capabilities'], doc['capabilities'])
def test_listing_marker_is_respected(self): self.simulate_delete(self.pool) with pools(self, 10, self.doc['uri'], 'my-group') as expected: result = self.simulate_get(self.url_prefix + '/pools', query_string='?marker=3') self.assertEqual(self.srmock.status, falcon.HTTP_200) pool_list = jsonutils.loads(result[0])['pools'] self.assertEqual(len(pool_list), 6) path, weight = expected[4][:2] self._pool_expect(pool_list[0], path, weight, self.doc['uri'])
def test_claim_creation(self): self._prepare_messages(storage.DEFAULT_MESSAGES_PER_CLAIM + 1) result = self.simulate_post(self.claims_path, body='{"ttl": 60, "grace": 60}', headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_201) messages = jsonutils.loads(result[0])['messages'] self.assertEqual(len(messages), storage.DEFAULT_MESSAGES_PER_CLAIM)
def _patch_test(self, doc): self.simulate_patch(self.pool, body=jsonutils.dumps(doc)) self.assertEqual(self.srmock.status, falcon.HTTP_200) result = self.simulate_get(self.pool, query_string='?detailed=True') self.assertEqual(self.srmock.status, falcon.HTTP_200) pool = jsonutils.loads(result[0]) self._pool_expect(pool, self.pool, doc['weight'], doc['uri']) self.assertEqual(pool['options'], doc['options'])
def test_delete_message_with_invalid_claim_doesnt_delete_message(self): path = self.queue_path resp = self._post_messages(path + '/messages', 1) location = jsonutils.loads(resp[0])['resources'][0] self.simulate_delete(location, query_string='claim_id=invalid', headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_400) self.simulate_get(location, headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_200)
def test_simple(self): gumshoe_queue_path = self.url_prefix + '/queues/gumshoe' doc = '{"messages": {"ttl": 600}}' self.simulate_put(gumshoe_queue_path, '480924', body=doc) self.assertEqual(self.srmock.status, falcon.HTTP_503) location = ('Location', gumshoe_queue_path) self.assertNotIn(location, self.srmock.headers) result = self.simulate_get(gumshoe_queue_path + '/metadata', '480924') result_doc = jsonutils.loads(result[0]) self.assertEqual(self.srmock.status, falcon.HTTP_503) self.assertNotEqual(result_doc, jsonutils.loads(doc)) self.simulate_get(gumshoe_queue_path + '/stats', '480924') self.assertEqual(self.srmock.status, falcon.HTTP_503) self.simulate_get(self.url_prefix + '/queues', '480924') self.assertEqual(self.srmock.status, falcon.HTTP_503) self.simulate_delete(gumshoe_queue_path, '480924') self.assertEqual(self.srmock.status, falcon.HTTP_503)
def test_message_listing_different_id(self): self._prepare_messages(storage.DEFAULT_MESSAGES_PER_PAGE + 1) headers = self.headers.copy() headers['Client-ID'] = str(uuid.uuid4()) result = self.simulate_get(self.messages_path, headers=headers, query_string='echo=false') self.assertEqual(self.srmock.status, falcon.HTTP_200) messages = jsonutils.loads(result[0])['messages'] self.assertEqual(len(messages), storage.DEFAULT_MESSAGES_PER_PAGE)
def test_post_optional_ttl(self): sample_messages = { 'messages': [ {'body': 239}, {'body': {'key': 'value'}, 'ttl': 200}, ], } # Manually check default TTL is max from config sample_doc = jsonutils.dumps(sample_messages) result = self.simulate_post(self.messages_path, body=sample_doc, headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_201) result_doc = jsonutils.loads(result[0]) href = result_doc['resources'][0] result = self.simulate_get(href, headers=self.headers) message = jsonutils.loads(result[0]) self.assertEqual(self.default_message_ttl, message['ttl'])
def test_basics_thoroughly(self, project_id): headers = { 'Client-ID': str(uuid.uuid4()), 'X-Project-ID': project_id } gumshoe_queue_path_stats = self.gumshoe_queue_path + '/stats' # Stats are empty - queue not created yet self.simulate_get(gumshoe_queue_path_stats, headers=headers) self.assertEqual(self.srmock.status, falcon.HTTP_200) # Create doc = '{"messages": {"ttl": 600}}' self.simulate_put(self.gumshoe_queue_path, headers=headers, body=doc) self.assertEqual(self.srmock.status, falcon.HTTP_201) location = self.srmock.headers_dict['Location'] self.assertEqual(location, self.gumshoe_queue_path) # Fetch metadata result = self.simulate_get(self.gumshoe_queue_path, headers=headers) result_doc = jsonutils.loads(result[0]) self.assertEqual(self.srmock.status, falcon.HTTP_200) self.assertEqual(result_doc, jsonutils.loads(doc)) # Stats empty queue self.simulate_get(gumshoe_queue_path_stats, headers=headers) self.assertEqual(self.srmock.status, falcon.HTTP_200) # Delete self.simulate_delete(self.gumshoe_queue_path, headers=headers) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Get non-existent stats self.simulate_get(gumshoe_queue_path_stats, headers=headers) self.assertEqual(self.srmock.status, falcon.HTTP_200)
def test_operation_status(self, mock_messages_delete): mock_messages_delete.side_effect = errors.NotPermitted() path = self.url_prefix + '/health' body = self.simulate_get(path) health = jsonutils.loads(body[0]) self.assertEqual(self.srmock.status, falcon.HTTP_200) op_status = health['operation_status'] for op in op_status.keys(): if op == 'delete_messages': self.assertFalse(op_status[op]['succeeded']) self.assertIsNotNone(op_status[op]['ref']) else: self.assertTrue(op_status[op]['succeeded'])
def test_no_duplicated_messages_path_in_href(self): """Test for bug 1240897.""" path = self.queue_path + '/messages' self._post_messages(path, repeat=1) msg_id = self._get_msg_id(self.srmock.headers_dict) query_string = 'ids=%s' % msg_id body = self.simulate_get(path, query_string=query_string, headers=self.headers) messages = jsonutils.loads(body[0]) self.assertNotIn(self.queue_path + '/messages/messages', messages['messages'][0]['href'])
def test_message_volume(self, mock_driver_get): def _health(): KPI = {} KPI['message_volume'] = {'free': 1, 'claimed': 2, 'total': 3} return KPI mock_driver_get.side_effect = _health path = self.url_prefix + '/health' body = self.simulate_get(path) health = jsonutils.loads(body[0]) self.assertEqual(self.srmock.status, falcon.HTTP_200) message_volume = health['message_volume'] self.assertEqual(message_volume['free'], 1) self.assertEqual(message_volume['claimed'], 2) self.assertEqual(message_volume['total'], 3)
def test_message_listing_same_id(self): self._prepare_messages(storage.DEFAULT_MESSAGES_PER_PAGE + 1) result = self.simulate_get(self.messages_path, headers=self.headers, query_string='echo=false') self.assertEqual(self.srmock.status, falcon.HTTP_200) self._empty_message_list(result) self._prepare_messages(storage.DEFAULT_MESSAGES_PER_PAGE + 1) result = self.simulate_get(self.messages_path, headers=self.headers, query_string='echo=true') messages = jsonutils.loads(result[0])['messages'] self.assertEqual(len(messages), storage.DEFAULT_MESSAGES_PER_PAGE)
def test_href_template(self): body = self.simulate_get(self.url_prefix) self.assertEqual(self.srmock.status, falcon.HTTP_200) resp = jsonutils.loads(body[0]) queue_href_template = resp['resources']['rel/queue']['href-template'] path_1 = 'https://zaqar.example.com' + self.url_prefix path_2 = 'https://zaqar.example.com' + self.url_prefix + '/' # Verify all the href template start with the correct version prefix for resource in list(resp['resources']): self.assertTrue( resp['resources'][resource]['href-template'].startswith( self.url_prefix)) url = urlparse.urljoin(path_1, queue_href_template) expected = ('https://zaqar.example.com' + self.url_prefix + '/queues/foo') self.assertEqual(url.format(queue_name='foo'), expected) url = urlparse.urljoin(path_2, queue_href_template) self.assertEqual(url.format(queue_name='foo'), expected)
def test_pop(self, message_count): self._post_messages(self.messages_path, repeat=message_count) msg_id = self._get_msg_id(self.srmock.headers_dict) target = self.messages_path + '/' + msg_id self.simulate_get(target, self.project_id) self.assertEqual(self.srmock.status, falcon.HTTP_200) query_string = 'pop=' + str(message_count) result = self.simulate_delete(self.messages_path, self.project_id, query_string=query_string) self.assertEqual(self.srmock.status, falcon.HTTP_200) result_doc = jsonutils.loads(result[0]) self.assertEqual(len(result_doc['messages']), message_count) self.simulate_get(target, self.project_id) self.assertEqual(self.srmock.status, falcon.HTTP_404)
def test_get_works(self): result = self.simulate_get(self.pool) self.assertEqual(self.srmock.status, falcon.HTTP_200) pool = jsonutils.loads(result[0]) self._pool_expect(pool, self.pool, self.doc['weight'], self.doc['uri'])
def _empty_message_list(self, body): self.assertEqual(jsonutils.loads(body[0])['messages'], [])
def test_list(self): arbitrary_number = 644079696574693 project_id = str(arbitrary_number) # NOTE(kgriffs): It's important that this one sort after the one # above. This is in order to prove that bug/1236605 is fixed, and # stays fixed! alt_project_id = str(arbitrary_number + 1) # List empty self.simulate_get(self.queue_path, project_id) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Payload exceeded self.simulate_get(self.queue_path, project_id, query_string='limit=21') self.assertEqual(self.srmock.status, falcon.HTTP_400) # Create some def create_queue(name, project_id, body): uri = self.queue_path + '/' + name self.simulate_put(uri, project_id) self.simulate_put(uri + '/metadata', project_id, body=body) create_queue('g1', None, '{"answer": 42}') create_queue('g2', None, '{"answer": 42}') create_queue('q1', project_id, '{"node": 31}') create_queue('q2', project_id, '{"node": 32}') create_queue('q3', project_id, '{"node": 33}') create_queue('q3', alt_project_id, '{"alt": 1}') # List (global queues) result = self.simulate_get(self.queue_path, None, query_string='limit=2&detailed=true') result_doc = jsonutils.loads(result[0]) queues = result_doc['queues'] self.assertEqual(len(queues), 2) for queue in queues: self.assertEqual(queue['metadata'], {'answer': 42}) # List (limit) result = self.simulate_get(self.queue_path, project_id, query_string='limit=2') result_doc = jsonutils.loads(result[0]) self.assertEqual(len(result_doc['queues']), 2) # List (no metadata, get all) result = self.simulate_get(self.queue_path, project_id, query_string='limit=5') result_doc = jsonutils.loads(result[0]) [target, params] = result_doc['links'][0]['href'].split('?') self.assertEqual(self.srmock.status, falcon.HTTP_200) self.assertEqual(self.srmock.headers_dict['Content-Location'], self.queue_path + '?limit=5') # Ensure we didn't pick up the queue from the alt project. queues = result_doc['queues'] self.assertEqual(len(queues), 3) for queue in queues: self.simulate_get(queue['href'] + '/metadata', project_id) self.assertEqual(self.srmock.status, falcon.HTTP_200) self.simulate_get(queue['href'] + '/metadata', 'imnothere') self.assertEqual(self.srmock.status, falcon.HTTP_404) self.assertNotIn('metadata', queue) # List with metadata result = self.simulate_get(self.queue_path, project_id, query_string='detailed=true') self.assertEqual(self.srmock.status, falcon.HTTP_200) result_doc = jsonutils.loads(result[0]) [target, params] = result_doc['links'][0]['href'].split('?') queue = result_doc['queues'][0] result = self.simulate_get(queue['href'] + '/metadata', project_id) result_doc = jsonutils.loads(result[0]) self.assertEqual(result_doc, queue['metadata']) self.assertEqual(result_doc, {'node': 31}) # List tail self.simulate_get(target, project_id, query_string=params) self.assertEqual(self.srmock.status, falcon.HTTP_204) # List manually-constructed tail self.simulate_get(target, project_id, query_string='marker=zzz') self.assertEqual(self.srmock.status, falcon.HTTP_204)
def _test_post(self, sample_messages): sample_doc = jsonutils.dumps({'messages': sample_messages}) result = self.simulate_post(self.messages_path, body=sample_doc, headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_201) result_doc = jsonutils.loads(result[0]) msg_ids = self._get_msg_ids(self.srmock.headers_dict) self.assertEqual(len(msg_ids), len(sample_messages)) expected_resources = [ six.text_type(self.messages_path + '/' + id) for id in msg_ids ] self.assertEqual(expected_resources, result_doc['resources']) # NOTE(kgriffs): As of v1.1, "partial" is no longer given # in the response document. self.assertNotIn('partial', result_doc) self.assertEqual(len(msg_ids), len(sample_messages)) lookup = dict([(m['ttl'], m['body']) for m in sample_messages]) # Test GET on the message resource directly # NOTE(cpp-cabrera): force the passing of time to age a message timeutils_utcnow = 'zaqar.openstack.common.timeutils.utcnow' now = timeutils.utcnow() + datetime.timedelta(seconds=10) with mock.patch(timeutils_utcnow) as mock_utcnow: mock_utcnow.return_value = now for msg_id in msg_ids: message_uri = self.messages_path + '/' + msg_id headers = self.headers.copy() headers['X-Project-ID'] = '777777' # Wrong project ID self.simulate_get(message_uri, headers=headers) self.assertEqual(self.srmock.status, falcon.HTTP_404) # Correct project ID result = self.simulate_get(message_uri, headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_200) # Check message properties message = jsonutils.loads(result[0]) self.assertEqual(message['href'], message_uri) self.assertEqual(message['body'], lookup[message['ttl']]) self.assertEqual(msg_id, message['id']) # no negative age # NOTE(cpp-cabrera): testtools lacks GreaterThanEqual on py26 self.assertThat(message['age'], matchers.GreaterThan(-1)) # Test bulk GET query_string = 'ids=' + ','.join(msg_ids) result = self.simulate_get(self.messages_path, query_string=query_string, headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_200) result_doc = jsonutils.loads(result[0]) expected_ttls = set(m['ttl'] for m in sample_messages) actual_ttls = set(m['ttl'] for m in result_doc['messages']) self.assertFalse(expected_ttls - actual_ttls) actual_ids = set(m['id'] for m in result_doc['messages']) self.assertFalse(set(msg_ids) - actual_ids)
def test_lifecycle(self): doc = '{"ttl": 100, "grace": 60}' # First, claim some messages body = self.simulate_post(self.claims_path, self.project_id, body=doc) self.assertEqual(self.srmock.status, falcon.HTTP_201) claimed = jsonutils.loads(body[0]) claim_href = self.srmock.headers_dict['Location'] message_href, params = claimed[0]['href'].split('?') # No more messages to claim self.simulate_post(self.claims_path, self.project_id, body=doc, query_string='limit=3') self.assertEqual(self.srmock.status, falcon.HTTP_204) headers = { 'Client-ID': str(uuid.uuid4()), } # Listing messages, by default, won't include claimed body = self.simulate_get(self.messages_path, self.project_id, headers=headers) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Include claimed messages this time body = self.simulate_get(self.messages_path, self.project_id, query_string='include_claimed=true', headers=headers) listed = jsonutils.loads(body[0]) self.assertEqual(self.srmock.status, falcon.HTTP_200) self.assertEqual(len(listed['messages']), len(claimed)) now = timeutils.utcnow() + datetime.timedelta(seconds=10) timeutils_utcnow = 'zaqar.openstack.common.timeutils.utcnow' with mock.patch(timeutils_utcnow) as mock_utcnow: mock_utcnow.return_value = now body = self.simulate_get(claim_href, self.project_id) claim = jsonutils.loads(body[0]) self.assertEqual(self.srmock.status, falcon.HTTP_200) self.assertEqual(self.srmock.headers_dict['Content-Location'], claim_href) self.assertEqual(claim['ttl'], 100) # NOTE(cpp-cabrera): verify that claim age is non-negative self.assertThat(claim['age'], matchers.GreaterThan(-1)) # Try to delete the message without submitting a claim_id self.simulate_delete(message_href, self.project_id) self.assertEqual(self.srmock.status, falcon.HTTP_403) # Delete the message and its associated claim self.simulate_delete(message_href, self.project_id, query_string=params) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Try to get it from the wrong project self.simulate_get(message_href, 'bogus_project', query_string=params) self.assertEqual(self.srmock.status, falcon.HTTP_404) # Get the message self.simulate_get(message_href, self.project_id, query_string=params) self.assertEqual(self.srmock.status, falcon.HTTP_404) # Update the claim new_claim_ttl = '{"ttl": 60}' creation = timeutils.utcnow() self.simulate_patch(claim_href, self.project_id, body=new_claim_ttl) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Get the claimed messages (again) body = self.simulate_get(claim_href, self.project_id) query = timeutils.utcnow() claim = jsonutils.loads(body[0]) message_href, params = claim['messages'][0]['href'].split('?') self.assertEqual(claim['ttl'], 60) estimated_age = timeutils.delta_seconds(creation, query) self.assertTrue(estimated_age > claim['age']) # Delete the claim self.simulate_delete(claim['href'], 'bad_id') self.assertEqual(self.srmock.status, falcon.HTTP_204) self.simulate_delete(claim['href'], self.project_id) self.assertEqual(self.srmock.status, falcon.HTTP_204) # Try to delete a message with an invalid claim ID self.simulate_delete(message_href, self.project_id, query_string=params) self.assertEqual(self.srmock.status, falcon.HTTP_403) # Make sure it wasn't deleted! self.simulate_get(message_href, self.project_id, query_string=params) self.assertEqual(self.srmock.status, falcon.HTTP_200) # Try to get a claim that doesn't exist self.simulate_get(claim['href']) self.assertEqual(self.srmock.status, falcon.HTTP_404) # Try to update a claim that doesn't exist self.simulate_patch(claim['href'], body=doc) self.assertEqual(self.srmock.status, falcon.HTTP_404)
def _test_post(self, sample_messages): sample_doc = jsonutils.dumps(sample_messages) result = self.simulate_post(self.messages_path, self.project_id, body=sample_doc, headers=self.headers) self.assertEqual(self.srmock.status, falcon.HTTP_201) result_doc = jsonutils.loads(result[0]) msg_ids = self._get_msg_ids(self.srmock.headers_dict) self.assertEqual(len(msg_ids), len(sample_messages)) expected_resources = [ six.text_type(self.messages_path + '/' + id) for id in msg_ids ] self.assertEqual(expected_resources, result_doc['resources']) # NOTE(kgriffs): As of the Icehouse release, drivers are # required to either completely succeed, or completely fail # to enqueue the entire batch of messages. self.assertFalse(result_doc['partial']) self.assertEqual(len(msg_ids), len(sample_messages)) lookup = dict([(m['ttl'], m['body']) for m in sample_messages]) # Test GET on the message resource directly # NOTE(cpp-cabrera): force the passing of time to age a message timeutils_utcnow = 'zaqar.openstack.common.timeutils.utcnow' now = timeutils.utcnow() + datetime.timedelta(seconds=10) with mock.patch(timeutils_utcnow) as mock_utcnow: mock_utcnow.return_value = now for msg_id in msg_ids: message_uri = self.messages_path + '/' + msg_id # Wrong project ID self.simulate_get(message_uri, '777777') self.assertEqual(self.srmock.status, falcon.HTTP_404) # Correct project ID result = self.simulate_get(message_uri, self.project_id) self.assertEqual(self.srmock.status, falcon.HTTP_200) self.assertEqual(self.srmock.headers_dict['Content-Location'], message_uri) # Check message properties message = jsonutils.loads(result[0]) self.assertEqual(message['href'], message_uri) self.assertEqual(message['body'], lookup[message['ttl']]) # no negative age # NOTE(cpp-cabrera): testtools lacks GreaterThanEqual on py26 self.assertThat(message['age'], matchers.GreaterThan(-1)) # Test bulk GET query_string = 'ids=' + ','.join(msg_ids) result = self.simulate_get(self.messages_path, self.project_id, query_string=query_string) self.assertEqual(self.srmock.status, falcon.HTTP_200) result_doc = jsonutils.loads(result[0]) expected_ttls = set(m['ttl'] for m in sample_messages) actual_ttls = set(m['ttl'] for m in result_doc) self.assertFalse(expected_ttls - actual_ttls)
def json(self): return jsonutils.loads(self._body)
def test_list(self): arbitrary_number = 644079696574693 project_id = str(arbitrary_number) client_id = str(uuid.uuid4()) header = { 'X-Project-ID': project_id, 'Client-ID': client_id } # NOTE(kgriffs): It's important that this one sort after the one # above. This is in order to prove that bug/1236605 is fixed, and # stays fixed! alt_project_id = str(arbitrary_number + 1) # List empty result = self.simulate_get(self.queue_path, headers=header) self.assertEqual(self.srmock.status, falcon.HTTP_200) results = jsonutils.loads(result[0]) self.assertEqual(results['queues'], []) self.assertIn('links', results) link = results['links'][0] self.assertEqual('next', link['rel']) href = falcon.uri.parse_query_string(link['href']) self.assertNotIn('marker', href) # Payload exceeded self.simulate_get(self.queue_path, headers=header, query_string='limit=21') self.assertEqual(self.srmock.status, falcon.HTTP_400) # Create some def create_queue(name, project_id, body): altheader = {'Client-ID': client_id} if project_id is not None: altheader['X-Project-ID'] = project_id uri = self.queue_path + '/' + name self.simulate_put(uri, headers=altheader, body=body) create_queue('q1', project_id, '{"node": 31}') create_queue('q2', project_id, '{"node": 32}') create_queue('q3', project_id, '{"node": 33}') create_queue('q3', alt_project_id, '{"alt": 1}') # List (limit) result = self.simulate_get(self.queue_path, headers=header, query_string='limit=2') result_doc = jsonutils.loads(result[0]) self.assertEqual(len(result_doc['queues']), 2) # List (no metadata, get all) result = self.simulate_get(self.queue_path, headers=header, query_string='limit=5') result_doc = jsonutils.loads(result[0]) [target, params] = result_doc['links'][0]['href'].split('?') self.assertEqual(self.srmock.status, falcon.HTTP_200) # Ensure we didn't pick up the queue from the alt project. queues = result_doc['queues'] self.assertEqual(len(queues), 3) # List with metadata result = self.simulate_get(self.queue_path, headers=header, query_string='detailed=true') self.assertEqual(self.srmock.status, falcon.HTTP_200) result_doc = jsonutils.loads(result[0]) [target, params] = result_doc['links'][0]['href'].split('?') queue = result_doc['queues'][0] result = self.simulate_get(queue['href'], headers=header) result_doc = jsonutils.loads(result[0]) self.assertEqual(result_doc, queue['metadata']) self.assertEqual(result_doc, {'node': 31}) # List tail self.simulate_get(target, headers=header, query_string=params) self.assertEqual(self.srmock.status, falcon.HTTP_200) # List manually-constructed tail self.simulate_get(target, headers=header, query_string='marker=zzz') self.assertEqual(self.srmock.status, falcon.HTTP_200)
def test_get_works(self): result = self.simulate_get(self.flavor_path) self.assertEqual(self.srmock.status, falcon.HTTP_200) pool = jsonutils.loads(result[0]) self._flavor_expect(pool, self.flavor_path, self.doc['pool'])
def json_decode(binary): return jsonutils.loads(binary, 'utf-8')