def test_create_bag(self): with open(filepath('test-bag.json')) as f: bag_json = f.read() with open(filepath('test-bag.jsonld')) as f: bag_jsonld = f.read() with self.client as client: id = UUID('6f2c64e2-c65f-4e2d-b028-f89dfb71ce69') res = client.put( '/bags/%s' % id, data=bag_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) self.assertEqual(res.status_code, http.client.CREATED) bag_url = urlparse(res.headers['Location']) self.assertEqual('/bags/%s' % id, bag_url.path) self.assertEqual('version=0', bag_url.query) res = client.get(res.headers['Location']) self.assertTrue('Last-Modified' in res.headers) self.assertEqual( res.headers['Etag'], 'W/"bag-6f2c64e2-c65f-4e2d-b028-f89dfb71ce69-version-0"') self.maxDiff = None self.assertEqual( json.loads(bag_jsonld), json.loads(res.get_data(as_text=True))) context = json.loads(res.get_data(as_text=True))['@context'] self.assertEqual(context, [ 'http://localhost.localdomain:5000/c', {'@base': 'http://n2t.net/ark:/99152/'}]) res = client.get('/bags/') self.assertEqual(res.status_code, http.client.OK) self.assertEqual( ['http://localhost.localdomain:5000/bags/%s' % id], json.loads(res.get_data(as_text=True)))
def test_put_graph(self): with open(filepath('test-graph.json')) as f: graph_json = f.read() with self.client as client: id = 'places/us-states' res = client.put( '/graphs/%s' % id, data=graph_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.CREATED) graph_url = urlparse(res.headers['Location']) self.assertEqual('/graphs/%s' % id, graph_url.path) self.assertEqual('version=0', graph_url.query) res = client.get(res.headers['Location']) self.assertTrue('Last-Modified' in res.headers) self.assertEqual( res.headers['Etag'], 'W/"graph-places/us-states-version-0"') res = client.get('/graphs/') self.assertEqual(res.status_code, http.client.OK) data = json.loads(res.get_data(as_text=True)) self.assertEqual( {'http://localhost.localdomain:5000/d/', 'http://localhost.localdomain:5000/graphs/%s' % id}, set(data['graphs'].keys()))
def test_update_graph(self): with open(filepath('test-graph.json')) as f: graph_json = f.read() with open(filepath('test-graph-updated.json')) as f: updated_graph_json = f.read() with self.client as client: id = 'places/us-states' res = client.put( '/graphs/%s' % id, data=graph_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.CREATED) graph_url_v0 = urlparse(res.headers['Location']) self.assertEqual('/graphs/%s' % id, graph_url_v0.path) self.assertEqual('version=0', graph_url_v0.query) res = client.put( '/graphs/%s' % id, data=updated_graph_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.CREATED) graph_url_v1 = urlparse(res.headers['Location']) self.assertEqual('/graphs/%s' % id, graph_url_v1.path) self.assertEqual('version=1', graph_url_v1.query) res = client.get('/graphs/%s' % id) self.assertEqual( json.loads(res.get_data(as_text=True))[ 'features'][0]['names'][0]['toponym'], 'Minnesooooooota') self.assertEqual( res.headers['Content-Disposition'], 'attachment; filename="periodo-graph-places-us-states.json"') res = client.get('/graphs/%s?version=0' % id) self.assertEqual( json.loads(res.get_data(as_text=True))[ 'features'][0]['names'][0]['toponym'], 'Minnesota') self.assertEqual( res.headers['Content-Disposition'], 'attachment; ' + 'filename="periodo-graph-places-us-states-v0.json"') res = client.get('/graphs/%s?version=1' % id) self.assertEqual( json.loads(res.get_data(as_text=True))[ 'features'][0]['names'][0]['toponym'], 'Minnesooooooota') self.assertEqual( res.headers['Content-Disposition'], 'attachment; ' + 'filename="periodo-graph-places-us-states-v1.json"')
def get_color(client) : color_specs = client.get("config/color_specs")['body'].decode('UTF-8') name = Menu.prompt("Enter color {}: ".format(color_specs)) if name not in color_specs : print("Color must be one of {}".format(color_specs)) else : colorspec = json.loads(client.get("config/color_specs/{}?all=true".format(name))['body'].decode('UTF-8')) print("RED magnitude: {:3d} period: {:3d} amplitude: {:3d}".format(colorspec['r']['m'], colorspec['r']['p'], colorspec['r']['a'])) print("BLUE magnitude: {:3d} period: {:3d} amplitude: {:3d}".format(colorspec['g']['m'], colorspec['g']['p'], colorspec['g']['a'])) print("GREEN magnitude: {:3d} period: {:3d} amplitude: {:3d}".format(colorspec['b']['m'], colorspec['b']['p'], colorspec['b']['a']))
def test_update_bag_using_jsonld(self): with open(filepath('test-bag.json')) as f: bag_json = f.read() with open(filepath('test-bag.jsonld')) as f: bag_jsonld = f.read() with open(filepath('test-bag-updated.jsonld')) as f: updated_bag_jsonld = f.read() with self.client as client: id = UUID('6f2c64e2-c65f-4e2d-b028-f89dfb71ce69') res = client.put( '/bags/%s' % id, data=bag_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) self.assertEqual(res.status_code, http.client.CREATED) bag_url_v0 = urlparse(res.headers['Location']) self.assertEqual('/bags/%s' % id, bag_url_v0.path) self.assertEqual('version=0', bag_url_v0.query) res = client.put( '/bags/%s' % id, data=updated_bag_jsonld, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) self.assertEqual(res.status_code, http.client.CREATED) bag_url_v1 = urlparse(res.headers['Location']) self.assertEqual('/bags/%s' % id, bag_url_v1.path) self.assertEqual('version=1', bag_url_v1.query) res = client.get('/bags/%s' % id) self.maxDiff = None self.assertEqual( json.loads(updated_bag_jsonld), json.loads(res.get_data(as_text=True))) context = json.loads(res.get_data(as_text=True))['@context'] self.assertEqual(context, [ 'http://localhost.localdomain:5000/c', { '@base': 'http://n2t.net/ark:/99152/', "foo": "http://example.org/foo" }]) res = client.get('/bags/%s?version=0' % id) self.maxDiff = None self.assertEqual( json.loads(bag_jsonld), json.loads(res.get_data(as_text=True))) res = client.get('/bags/%s?version=1' % id) self.maxDiff = None self.assertEqual( json.loads(updated_bag_jsonld), json.loads(res.get_data(as_text=True)))
def app_test(): rq = client.get(config.www.app_prefix, follow_redirects=True) eq_(rq.status_code, http.client.OK) rq = client.get(config.www.app_prefix + "?lang=ru") #eq_(rq.status_code, http.client.OK) ok_("Set-Cookie" in rq.headers) rq = client.get(config.www.app_prefix + "/index.html") eq_(rq.status_code, http.client.OK) rq = client.get(config.www.app_prefix + "/index.html?" "author=Wilson&" "title=Ecossoise&" "year_from=1800&" "year_to=1900") eq_(rq.status_code, http.client.OK) rq = client.get(config.www.app_prefix + "/all.html") eq_(rq.status_code, http.client.OK) rq = client.get(config.www.app_prefix + "/book/dodworth_1844_indian_hunter") eq_(rq.status_code, http.client.OK) rq = client.get(config.www.app_prefix + "/keywords") eq_(rq.status_code, http.client.OK) json.loads(rq.data.decode()) rq = client.get(config.www.app_prefix + "/langid") eq_(rq.status_code, http.client.OK) json.loads(rq.data.decode())
def test_update_bag(self): with open(filepath('test-bag.json')) as f: bag_json = f.read() with open(filepath('test-bag.jsonld')) as f: bag_jsonld = f.read() with open(filepath('test-bag-updated.json')) as f: updated_bag_json = f.read() with open(filepath('test-bag-updated.jsonld')) as f: updated_bag_jsonld = f.read() with self.client as client: id = UUID('6f2c64e2-c65f-4e2d-b028-f89dfb71ce69') res = client.put( '/bags/%s' % id, data=bag_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) self.assertEqual(res.status_code, http.client.CREATED) bag_url_v0 = urlparse(res.headers['Location']) self.assertEqual('/bags/%s' % id, bag_url_v0.path) self.assertEqual('version=0', bag_url_v0.query) res = client.put( '/bags/%s' % id, data=updated_bag_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) self.assertEqual(res.status_code, http.client.CREATED) bag_url_v1 = urlparse(res.headers['Location']) self.assertEqual('/bags/%s' % id, bag_url_v1.path) self.assertEqual('version=1', bag_url_v1.query) res = client.get('/bags/%s' % id) self.maxDiff = None self.assertEqual( json.loads(updated_bag_jsonld), json.loads(res.get_data(as_text=True))) res = client.get('/bags/%s?version=0' % id) self.maxDiff = None self.assertEqual( json.loads(bag_jsonld), json.loads(res.get_data(as_text=True))) res = client.get('/bags/%s?version=1' % id) self.maxDiff = None self.assertEqual( json.loads(updated_bag_jsonld), json.loads(res.get_data(as_text=True)))
def test_admin_home(client): """Tests the /admin route.""" with client.session_transaction() as session: utils.login(session) utils.add_permission(session, Permissions.ADMIN) response = client.get(flask.url_for('admin.admin_home')) assert response.status_code == http.client.OK
def set_colorspec(client) : color_specs = client.get("config/color_specs")['body'].decode('UTF-8') response = Menu.prompt("Enter colorspec {}: ".format(color_specs)) if response not in color_specs : print("Mode must be one of {}".format(color_specs)) else : client.post("lamp?color_name={}".format(response))
def test_comment_on_patch(self): with open(filepath('test-patch-adds-items.json')) as f: patch = f.read() with self.client as client: res = client.patch( '/d/', data=patch, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) patch_id = int(res.headers['Location'].split('/')[-2]) patch_url = urlparse(res.headers['Location']).path res = client.post( patch_url + 'messages', data=json.dumps({'message': 'This is a comment'}), content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'} ) self.assertEqual(res.status_code, http.client.OK) self.assertEqual(patch_url, urlparse(res.headers['Location']).path) row = database.query_db( 'SELECT * FROM patch_request_comment WHERE patch_request_id=?', (patch_id,), one=True) self.assertEqual('https://orcid.org/1234-5678-9101-112X', row['author']) self.assertEqual(patch_id, row['patch_request_id']) self.assertEqual('This is a comment', row['message']) res = client.get(patch_url) comments = json.loads(res.get_data(as_text=True)).get('comments') self.assertEqual(1, len(comments))
def test_run_hassle(client): """Tests /hassle route.""" with client.session_transaction() as session: utils.login(session) utils.add_permission(session, Permissions.HASSLE) response = client.get(flask.url_for('hassle.run_hassle')) assert response.status_code == http.client.OK
def test_view_profile(client): """Tests that viewing a user works.""" username = "******" with client.session_transaction() as session: utils.login(session) response = client.get( flask.url_for('users.view_profile', username=username)) assert response.status_code == http.client.OK
def test_remove_period(self): with open(filepath('test-patch-remove-period.json')) as f: patch1 = f.read() with self.client as client: res = client.patch( '/d/', data=patch1, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) patch_url = urlparse(res.headers['Location']).path res = client.post( patch_url + 'merge', buffered=True, headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.NO_CONTENT) removed_entities = database.get_removed_entity_keys() self.assertEqual(removed_entities, set(['p0trgkvwbjd'])) res = client.get('/trgkvwbjd', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.GONE) res = client.get('/trgkvwbjd.json', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.GONE) res = client.get('/trgkvwbjd?version=0', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.NOT_FOUND) res = client.get('/trgkvwbjd.json?version=0', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.NOT_FOUND) res = client.get('/trgkvwbjd?version=1', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.OK) res = client.get('/trgkvwbjd.json?version=1', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.OK) res = client.get('/history.jsonld?inline-context') self.assertEqual( res.headers['Cache-Control'], 'public, max-age=0') self.assertEqual( res.headers['X-Accel-Expires'], '{}'.format(cache.MEDIUM_TIME)) g = ConjunctiveGraph() g.parse(format='json-ld', data=res.get_data(as_text=True)) generated = list(g.objects(subject=HOST['h#change-2'], predicate=PROV.generated)) self.assertEqual(len(generated), 1) self.assertIn(HOST['d?version=2'], generated)
def test_remove_definition(self): with open(filepath('test-patch-remove-definition.json')) as f: patch1 = f.read() with self.client as client: res = client.patch( '/d/', data=patch1, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) patch_url = urlparse(res.headers['Location']).path res = client.post( patch_url + 'merge', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.NO_CONTENT) removed_entities = database.get_removed_entity_keys() self.assertEqual(removed_entities, set(['p0trgkvwbjd'])) res = client.get('/trgkvwbjd', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.GONE) res = client.get('/trgkvwbjd.json', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.GONE) res = client.get('/trgkvwbjd?version=0', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.NOT_FOUND) res = client.get('/trgkvwbjd.json?version=0', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.NOT_FOUND) res = client.get('/trgkvwbjd?version=1', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.OK) res = client.get('/trgkvwbjd.json?version=1', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.OK) res = client.get('/h') g = ConjunctiveGraph() g.parse(format='json-ld', data=res.get_data(as_text=True)) invalidated = g.value(subject=PERIODO['p0h#change-2'], predicate=PROV.invalidated, any=False) self.assertEqual(invalidated, PERIODO['p0trgkvwbjd']) generated = list(g.objects(subject=PERIODO['p0h#change-2'], predicate=PROV.generated)) self.assertEqual(len(generated), 2) self.assertIn(PERIODO['p0d?version=2'], generated) self.assertIn(PERIODO['p0trgkv?version=2'], generated)
def test_get_sprint(client, init_database): # creando sprint temporal sprint0 = add_temp_sprint(1, "test description", 1, False, "01/01/2020") # se llama al servicio rv = client.get("/sprint/" + str(sprint0[0])) # se deserializa el response sprint = json.loads(rv.data.decode("utf-8")) # se verifica numero de elementos y si los temporales estan entre ellos assert sprint0[0] in [elem["id"] for elem in sprint] assert len(sprint) == 1 app.db.session.commit()
def test_get_me_articles_not_found(client): user_payload = { 'userid': fake.random_int(2, 100), 'username': fake.user_name(), 'email': fake.email() } header = token_validation.generate_token_header(user_payload, PRIVATE_KEY) headers = { 'Authorization': header, } response = client.get('/api/me/articles/', headers=headers) assert http.client.NOT_FOUND == response.status_code
def test_use_site_url_if_endpoint_unset(self): self.config_fixture.config(public_endpoint=None) for app in (self.public_app, ): client = TestClient(app) resp = client.get('/') self.assertEqual(300, resp.status_int) data = jsonutils.loads(resp.body) expected = VERSIONS_RESPONSE for version in expected['versions']['values']: # localhost happens to be the site url for tests if version['id'].startswith('v3'): self._paste_in_port(version, 'http://localhost/v3/') self.assertThat(data, _VersionsEqual(expected))
def _test_json_home(self, path, exp_json_home_data): client = TestClient(self.public_app) resp = client.get(path, headers={'Accept': 'application/json-home'}) self.assertThat(resp.status, tt_matchers.Equals('200 OK')) self.assertThat(resp.headers['Content-Type'], tt_matchers.Equals('application/json-home')) maxDiff = self.maxDiff self.maxDiff = None # NOTE(morgan): Changed from tt_matchers.Equals to make it easier to # determine issues. Reset maxDiff to the original value at the end # of the assert. self.assertDictEqual(exp_json_home_data, jsonutils.loads(resp.body)) self.maxDiff = maxDiff
def test_list_me_offers(client, offer_fixture): username = fake.name() title = fake.text(150) description = fake.text(240) category = fake.text(50) latitude = fake.latitude() longitude = fake.longitude() picture_url = fake.text(150) # Create a new offer new_offer = { 'title': title, 'description': description, 'category': category, 'latitude': latitude, 'longitude': longitude, 'picture_url': picture_url, } header = token_validation.generate_token_header(username, PRIVATE_KEY) headers = { 'Authorization': header, } response = client.post('/api/me/offers/', data=new_offer, headers=headers) result = response.json assert http.client.CREATED == response.status_code # Get the offers of the user response = client.get('/api/me/offers/', headers=headers) results = response.json assert http.client.OK == response.status_code assert len(results) == 1 result = results[0] expected_result = { 'id': ANY, 'username': username, 'title': title, 'description': description, 'category': category, 'picture_url': picture_url, 'latitude': ANY, 'longitude': ANY, 'timestamp': ANY, } assert result == expected_result
def test_list_location(client, product_fixture, locations_fixture): product_id = product_fixture[0] response = client.get(f'/api/product/{product_id}/location/') results = response.json assert http.client.OK == response.status_code assert len(results) == 4 expected = { 'latitude': ANY, 'longitude': ANY, 'elevation': ANY, 'timestamp': ANY, } for result in results: assert result == expected
def get_stats(client) : stats = json.loads(client.get("stats")['body'].decode('UTF-8')) gcd = stats['gcd'] print("~~~") print(DeviceMenu.get_basic_stats("GC", gcd)) print(DeviceMenu.get_basic_stats("NTP", stats['ntpd'])) print(DeviceMenu.get_basic_stats("Lamp", stats['lamp'])) print(DeviceMenu.get_basic_stats("Scheduler", stats['scheduler'])) print("~~~") print("GC stats: max: {} min: {} avg: {}".format(gcd['max_collected'], gcd['min_collected'], int(gcd['sum_collected']/gcd['num_collections']))) mem_alloc = gcd['mem_alloc'] mem_free = gcd['mem_free'] capacity = mem_alloc + mem_free print("Memory: allocated: {} bytes free: {} bytes ({}%)".format(mem_alloc, mem_free, int(((capacity - mem_free) / capacity) * 100.0))) (days, hours, minutes, seconds, millis) = DeviceMenu.get_duration(stats['uptime_ms']) print("Uptime: {}d {}h {}m {}.{}s".format(days, hours, minutes, seconds, millis)) print("~~~")
def test_get_artist_album(client, album_fixture): username, user_id = get_user() headers = get_headers(username, user_id) response_new_album, new_album = create_test_album(client, username, user_id, headers) album_id = response_new_album.json['id'] assert http.client.CREATED == response_new_album.status_code response = client.get(f'/api/artist/albums/{album_id}/', headers=headers) album = response.json assert http.client.OK == response.status_code assert is_album_keys_valid(album) response = delete_test_album(client, album_id, headers) assert http.client.NO_CONTENT == response.status_code
def test_update_product(client, product_fixture): product_id = product_fixture[0] new_description = { 'product_description': 'New description', } response = client.put(f'/api/product/{product_id}', data=new_description) result = response.json assert http.client.OK == response.status_code assert result['product_description'] == 'New description' # The change is persistent response = client.get(f'/api/product/{product_id}') result = response.json assert http.client.OK == response.status_code assert result['product_description'] == 'New description'
def test_list_products(client, product_fixture): response = client.get('/api/product/') result = response.json assert http.client.OK == response.status_code assert len(result) > 0 # Check that the ids are increasing previous_id = -1 for product in result: expected = { 'product_id': ANY, 'product_description': ANY, } assert expected == product assert product['product_id'] > previous_id previous_id = product['product_id']
def test_get_tests_by_story(client, init_database): # creando sprint e historias temporales asociadas story0 = add_temp_story("test_description", 1, StoryPriority.high, False) a_test0 = add_temp_accepttests(1, "Test description1", story0[0], False) a_test1 = add_temp_accepttests(1, "Test description2", story0[0], False) # se llama al servicio rv = client.get("/tests/getbystory/" + str(story0[0])) # se deserializa el response sprint_stories = json.loads(rv.data.decode("utf-8")) # se verifica numero de elementos y si los temporales estan entre ellos assert (a_test0[0] in [ elem["id"] for elem in sprint_stories ]) and (a_test1[0] in [elem["id"] for elem in sprint_stories]) assert len(sprint_stories) == 2 app.db.session.commit()
def _test_version(self, app_name): app = self.loadapp(app_name) client = TestClient(app) resp = client.get('/') self.assertEqual(300, resp.status_int) data = jsonutils.loads(resp.body) expected = VERSIONS_RESPONSE url_with_port = 'http://localhost:%s/v3/' % self.public_port for version in expected['versions']['values']: # TODO(morgan): Eliminate the need to do the "paste-in-port" part # of the tests. Ultimately, this is very hacky and shows we are # not setting up the test case sanely. if version['id'].startswith('v3'): self._paste_in_port(version, url_with_port) # Explicitly check that a location header is set and it is pointing # to v3 (The preferred location for now)! self.assertIn('Location', resp.headers) self.assertEqual(url_with_port, resp.headers['Location']) self.assertThat(data, _VersionsEqual(expected))
def test_if_none_match(self): with open(filepath('test-bag.json')) as f: bag_json = f.read() with self.client as client: id = UUID('6f2c64e2-c65f-4e2d-b028-f89dfb71ce69') res = client.put( '/bags/%s' % id, data=bag_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) self.assertEqual(res.status_code, http.client.CREATED) res = client.get( res.headers['Location'], buffered=True, headers={ 'If-None-Match': 'W/"bag-6f2c64e2-c65f-4e2d-b028-f89dfb71ce69-version-0"'}) self.assertEqual(res.status_code, http.client.NOT_MODIFIED)
def test_if_none_match(self): with open(filepath('test-graph.json')) as f: graph_json = f.read() with self.client as client: id = 'places/us-states' res = client.put( '/graphs/%s' % id, data=graph_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.CREATED) res = client.get( res.headers['Location'], buffered=True, headers={ 'If-None-Match': 'W/"graph-places/us-states-version-0"'}) self.assertEqual(res.status_code, http.client.NOT_MODIFIED)
def test_list_thoughts(client, thought_fixture): response = client.get('/api/thoughts/') result = response.json assert http.client.OK == response.status_code assert len(result) > 0 # Check that the ids are increasing previous_id = -1 for thought in result: expected = { 'text': ANY, 'username': ANY, 'id': ANY, 'timestamp': ANY, } assert expected == thought assert thought['id'] > previous_id previous_id = thought['id']
async def covid_by_state(state: covid_state): MY_APP_TOKEN = str(os.getenv("COVID_API")) client = Socrata('data.cdc.gov', MY_APP_TOKEN) q = ''' SELECT * ORDER BY submission_date DESC LIMIT 1000 ''' results = client.get("9mfq-cb36", query=q) df = pd.DataFrame.from_records(results) # state_requested = pd.DataFrame([state]) # state = state_requested.iloc[0] last_week = str(datetime.date.today() - timedelta(days=7)) # filter df for above info and get the total new = df[(df['state'] == state.state) & (df['submission_date'] > last_week)] new_cases = new['new_case'].astype('float').sum() return new_cases
def test_get_sprints_by_project(client, init_database): # creando sprint temporal sprint0 = add_temp_sprint(1, "test description", 1, False, "01/01/2020") sprint1 = add_temp_sprint(1, "test description", 1, False, "01/01/2021") # tomamos el id del proyecto project_id = sprint0[1] # se llama al servicio rv = client.get("/sprint/getbyproject/" + str(project_id)) # se deserializa el response sprint = json.loads(rv.data.decode("utf-8")) # se verifica numero de elementos y si los temporales estan entre ellos assert (sprint0[0] in [elem["id"] for elem in sprint]) and (sprint1[0] in [ elem["id"] for elem in sprint ]) assert len(sprint) == 2 app.db.session.commit()
def test_get_list_album_songs(client, album_fixture): album_id = get_album_id_from_fixture(album_fixture) response = client.get(f'/api/albums/{album_id}/songs/') songs = response.json assert http.client.OK == response.status_code assert len(songs) > 0 # Check that the ids are increasing previous_id = -1 for song in songs: expected = { 'id': ANY, 'name': ANY, 'album_id': album_id, 'artist_name': ANY, 'artist_id': ANY, } assert expected == song assert song['id'] > previous_id previous_id = song['id']
def test_get_stories_by_sprint(client, init_database): # creando sprint e historias temporales asociadas sprint0 = add_temp_sprint(1, "test description", 1, False, "01/01/2020") story0 = add_temp_story("test_description", 1, StoryPriority.high, False, sprint0[0]) story1 = add_temp_story("test_description", 1, StoryPriority.medium, False, sprint0[0]) # se llama al servicio rv = client.get("/sprint/getstories/" + str(sprint0[0])) # se deserializa el response sprint_stories = json.loads(rv.data.decode("utf-8")) # se verifica numero de elementos y si los temporales estan entre ellos assert (story0[0] in [ elem["id"] for elem in sprint_stories ]) and (story1[0] in [elem["id"] for elem in sprint_stories]) assert len(sprint_stories) == 2 app.db.session.commit()
def test_list_areas(client, area_fixture): response = client.get('/api/areas/') result = response.json assert http.client.OK == response.status_code assert len(result) > 0 # Check that the ids are increasing previous_id = -1 for area in result: expected = { 'areacode': ANY, 'area': ANY, 'zonecode': ANY, 'username': ANY, 'id': ANY, 'timestamp': ANY, } assert expected == area assert area['id'] > previous_id previous_id = area['id']
def test_get_list_all_albums_search(client, album_fixture): username, user_id = get_user() headers = get_headers(username, user_id) response_new_album, new_album = create_test_album( client, username, user_id, headers, name='This is a TestAlbumName') assert http.client.CREATED == response_new_album.status_code response = client.get('/api/albums/?search=testalbumname') albums = response.json assert http.client.OK == response.status_code assert len(albums) > 0 for album in albums: assert is_album_keys_valid(album) assert 'testalbumname' in album['name'].lower() response = delete_test_album(client, response_new_album.json['id'], headers) assert http.client.NO_CONTENT == response.status_code
def test_list_offers_search(client, offer_fixture): username = fake.name() new_offer = { 'username': username, 'title': 'A tale', 'description': 'A tale about a Platypus', 'category': 'Home', 'latitude': 0, 'longitude': 0, 'picture_url': 'localhost', } header = token_validation.generate_token_header(username, PRIVATE_KEY) headers = { 'Authorization': header, } response = client.post('/api/me/offers/', data=new_offer, headers=headers) assert http.client.CREATED == response.status_code response = client.get('/api/offers/?search=platypus') result = response.json assert http.client.OK == response.status_code assert len(result) > 0 # Check that the returned values contain "platypus" for offer in result: expected = { 'description': ANY, 'title': ANY, 'username': username, 'id': ANY, 'category': ANY, 'latitude': ANY, 'longitude': ANY, 'picture_url': ANY, 'timestamp': ANY, } assert expected == offer assert 'platypus' in offer['description'].lower()
def test_list_articles(client, article_fixture): response = client.get('/api/articles') result = response.json assert http.client.OK == response.status_code assert len(result) > 0 # Check that the ids are increasing previous_id = -1 for article in result: expected = { 'id': ANY, 'slug': ANY, 'title': ANY, 'content': ANY, 'author_id': ANY, 'category_id': ANY, 'status': ANY, 'published': ANY } assert expected == article assert article['id'] > previous_id previous_id = article['id']
def test_get_artist_song(client, album_fixture): username, user_id = get_user() headers = get_headers(username, user_id) response_new_album, new_album = create_test_album(client, username, user_id, headers) assert http.client.CREATED == response_new_album.status_code album_id = response_new_album.json['id'] response_new_song, new_song = create_test_song(client, username, user_id, album_id, headers) assert http.client.CREATED == response_new_song.status_code song_id = response_new_song.json['id'] response = client.get(f'/api/artist/songs/{song_id}/', headers=headers) result = response.json assert http.client.OK == response.status_code assert is_song_keys_valid(result) response = delete_test_album(client, album_id, headers) assert http.client.NO_CONTENT == response.status_code
def get_stats(client): stats = json.loads(client.get("stats")['body'].decode('UTF-8')) gcd = stats['gcd'] print("~~~") print(DeviceMenu.get_basic_stats("GC", gcd)) print(DeviceMenu.get_basic_stats("NTP", stats['ntpd'])) print(DeviceMenu.get_basic_stats("Lamp", stats['lamp'])) print(DeviceMenu.get_basic_stats("Scheduler", stats['scheduler'])) print("~~~") print("GC stats: max: {} min: {} avg: {}".format( gcd['max_collected'], gcd['min_collected'], int(gcd['sum_collected'] / gcd['num_collections']))) mem_alloc = gcd['mem_alloc'] mem_free = gcd['mem_free'] capacity = mem_alloc + mem_free print("Memory: allocated: {} bytes free: {} bytes ({}%)".format( mem_alloc, mem_free, int( ((capacity - mem_free) / capacity) * 100.0))) (days, hours, minutes, seconds, millis) = DeviceMenu.get_duration(stats['uptime_ms']) print("Uptime: {}d {}h {}m {}.{}s".format(days, hours, minutes, seconds, millis)) print("~~~")
def test_list_me_areas(client, area_fixture): username = fake.name() areacode = fake.text(240) area = fake.text(240) zonecode = fake.text(240) # Create a new area new_area = { 'areacode': areacode, 'area': area, 'zonecode': zonecode, } header = token_validation.generate_token_header(username, PRIVATE_KEY) headers = { 'Authorization': header, } response = client.post('/api/me/areas/', data=new_area, headers=headers) result = response.json assert http.client.CREATED == response.status_code # Get the areas of the user response = client.get('/api/me/areas/', headers=headers) results = response.json assert http.client.OK == response.status_code assert len(results) == 1 result = results[0] expected_result = { 'id': ANY, 'username': username, 'areacode': areacode, 'area': area, 'zonecode': zonecode, 'timestamp': ANY, } assert result == expected_result
def search_soundcloud(self): """ Fetches results from Soundcloud for the given query. """ # Create a client object with my app credentials. client = soundcloud.Client(client_id="2eaab453dce03a7bca4b475e4132a163") # Create a list of TrackInfo objects. matches = [] try: # Find all sounds for the given query. tracks = client.get("/tracks", q=self.user_query.bare_query) # Stop after 10 results. if len(tracks) > 10: results = 10 else: results = len(tracks) for i in range(results): words = [comp.strip() for comp in tracks[i].title.replace(u"\u2013", "-").split(" - ")] if len(words) > 1: artist = words[0] title = words[1] else: artist = tracks[i].user["username"] title = tracks[i].title # If the sound has a release date, use it for the year attribute, else use the # date the sound was created. if tracks[i].release_year: year = tracks[i].release_year else: year = tracks[i].created_at.split("/")[0] match = TrackInfo(title, artist, "Soundcloud", old_div(tracks[i].duration, 1000), year) if tracks[i].artwork_url: match.cover = tracks[i].artwork_url.replace("large", "t500x500") matches.append(match) return matches except Exception as e: print(colored.red(u"Something went wrong with the request: {0}".format(e)))
def test_delete_graph(self): with open(filepath('test-graph.json')) as f: graph_json = f.read() with self.client as client: id = 'places/us-states' res = client.put( '/graphs/%s' % id, data=graph_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.CREATED) res = client.delete( '/graphs/%s' % id, buffered=True, headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.NO_CONTENT) res = client.get('/graphs/%s' % id) self.assertEqual(res.status_code, http.client.NOT_FOUND) res = client.get('/graphs/%s?version=0' % id) self.assertEqual(res.status_code, http.client.OK) res = client.get('/graphs/') self.assertEqual(res.status_code, http.client.OK) data = json.loads(res.get_data(as_text=True)) self.assertEqual( {'http://localhost.localdomain:5000/d/'}, set(data['graphs'].keys())) res = client.put( '/graphs/%s' % id, data=graph_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.CREATED) graph_url_v1 = urlparse(res.headers['Location']) self.assertEqual('/graphs/%s' % id, graph_url_v1.path) self.assertEqual('version=1', graph_url_v1.query) res = client.get('/graphs/%s' % id) self.assertEqual(res.status_code, http.client.OK) res = client.get('/graphs/%s?version=0' % id) self.assertEqual(res.status_code, http.client.OK) res = client.get('/graphs/%s?version=1' % id) self.assertEqual(res.status_code, http.client.OK)
def test_get_list_artist_album_songs(client, album_fixture): username, user_id = get_user() headers = get_headers(username, user_id) response_new_album, new_album = create_test_album( client, username, user_id, headers ) assert http.client.CREATED == response_new_album.status_code album_id = response_new_album.json['id'] response_new_song, new_song = create_test_song( client, username, user_id, album_id, headers ) assert http.client.CREATED == response_new_song.status_code response = client.get( f'/api/artist/albums/{album_id}/songs/', headers=headers ) songs = response.json assert http.client.OK == response.status_code assert len(songs) == 1 result = songs[0] expected_result = { 'id': ANY, 'name': new_song['name'], 'album_id': album_id, 'artist_name': username, 'artist_id': user_id, } assert result == expected_result response = delete_test_album(client, album_id, headers) assert http.client.NO_CONTENT == response.status_code
def test_list_offers(client, offer_fixture): response = client.get('/api/offers/') result = response.json assert http.client.OK == response.status_code assert len(result) > 0 # Check that the ids are increasing previous_id = -1 for offer in result: expected = { 'description': ANY, 'title': ANY, 'username': ANY, 'id': ANY, 'category': ANY, 'latitude': ANY, 'longitude': ANY, 'picture_url': ANY, 'timestamp': ANY, } assert expected == offer assert offer['id'] > previous_id previous_id = offer['id']
async def fetch_diff(client, url): async with client.get(url) as response: if response.status >= 300: msg = f'unexpected response for {response.url!r}: {response.status}' raise http.client.HTTPException(msg) return (await response.text())
def test_get_history(self): with open(filepath('test-patch-adds-items.json')) as f: patch = f.read() with self.client as client: res1 = client.patch( '/d/', data=patch, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) patch_url = urlparse(res1.headers['Location']).path client.post( patch_url + 'messages', data='{"message": "Here is my patch"}', content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) client.post( patch_url + 'messages', data='{"message": "Looks good to me"}', content_type='application/json', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) client.post( patch_url + 'merge', buffered=True, headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) res3 = client.get('/h', headers={'Accept': 'application/ld+json'}) self.assertEqual(res3.status_code, http.client.SEE_OTHER) self.assertEqual( urlparse(res3.headers['Location']).path, '/h.jsonld') res4 = client.get('/history.jsonld?inline-context') self.assertEqual(res4.status_code, http.client.OK) self.assertEqual( res4.headers['Content-Type'], 'application/ld+json') jsonld = res4.get_data(as_text=True) g = ConjunctiveGraph() g.parse(format='json-ld', data=jsonld) # Initial data load self.assertIn( # None means any (HOST['h#change-1'], PROV.endedAtTime, None), g) self.assertIn( (HOST['h#change-1'], PROV.used, HOST['d?version=0']), g) self.assertIn( (HOST['d?version=0'], PROV.specializationOf, HOST['d']), g) self.assertIn( (HOST['h#change-1'], RDFS.seeAlso, HOST['h#patch-request-1']), g) self.assertIn( (HOST['h#patch-request-1'], FOAF.page, HOST['patches/1/']), g) self.assertNotIn( (HOST['h#patch-request-1'], AS.replies, HOST['h#patch-request-1-comments']), g) self.assertIn( (HOST['h#change-1'], PROV.used, HOST['h#patch-1']), g) self.assertIn( (HOST['h#patch-1'], FOAF.page, HOST['patches/1/patch.jsonpatch']), g) self.assertIn( (HOST['h#change-1'], PROV.generated, HOST['d?version=1']), g) self.assertIn( (HOST['d?version=1'], PROV.specializationOf, HOST['d']), g) # Change from first submitted patch self.assertIn( # None means any (HOST['h#change-2'], PROV.startedAtTime, None), g) self.assertIn( # None means any (HOST['h#change-2'], PROV.endedAtTime, None), g) start = g.value( subject=HOST['h#change-2'], predicate=PROV.startedAtTime) self.assertEqual(start.datatype, XSD.dateTime) self.assertRegex(start.value.isoformat(), W3CDTF) end = g.value( subject=HOST['h#change-2'], predicate=PROV.endedAtTime) self.assertEqual(end.datatype, XSD.dateTime) self.assertRegex(end.value.isoformat(), W3CDTF) self.assertIn( (HOST['h#change-2'], PROV.wasAssociatedWith, URIRef('https://orcid.org/1234-5678-9101-112X')), g) self.assertIn( (HOST['h#change-2'], PROV.wasAssociatedWith, URIRef('https://orcid.org/1211-1098-7654-321X')), g) for association in g.subjects( predicate=PROV.agent, object=URIRef('https://orcid.org/1234-5678-9101-112X')): role = g.value(subject=association, predicate=PROV.hadRole) self.assertIn(role, (HOST['v#submitted'], HOST['v#updated'])) merger = g.value( predicate=PROV.agent, object=URIRef('https://orcid.org/1211-1098-7654-321X')) self.assertIn( (HOST['h#change-2'], PROV.qualifiedAssociation, merger), g) self.assertIn( (merger, PROV.hadRole, HOST['v#merged']), g) self.assertIn( (HOST['h#change-2'], PROV.used, HOST['d?version=1']), g) self.assertIn( (HOST['d?version=1'], PROV.specializationOf, HOST['d']), g) self.assertIn( (HOST['h#change-2'], RDFS.seeAlso, HOST['h#patch-request-2']), g) self.assertIn( (HOST['h#patch-request-2'], FOAF.page, HOST['patches/2/']), g) self.assertIn( (HOST['h#patch-request-2'], AS.replies, HOST['h#patch-request-2-comments']), g) commentCount = g.value( subject=HOST['h#patch-request-2-comments'], predicate=AS.totalItems) self.assertEqual(commentCount.value, 2) self.assertIn( (HOST['h#patch-request-2-comments'], AS.first, HOST['h#patch-request-2-comment-1']), g) self.assertIn( (HOST['h#patch-request-2-comments'], AS.last, HOST['h#patch-request-2-comment-2']), g) self.assertIn( (HOST['h#patch-request-2-comments'], AS.items, HOST['h#patch-request-2-comment-1']), g) self.assertIn( (HOST['h#patch-request-2-comments'], AS.items, HOST['h#patch-request-2-comment-2']), g) self.assertIn( (HOST['h#patch-request-2-comment-1'], RDF.type, AS.Note), g) self.assertIn( (HOST['h#patch-request-2-comment-1'], AS.attributedTo, URIRef('https://orcid.org/1234-5678-9101-112X')), g) self.assertIn( # None means any (HOST['h#patch-request-2-comment-1'], AS.published, None), g) comment1_media_type = g.value( subject=HOST['h#patch-request-2-comment-1'], predicate=AS.mediaType) self.assertEqual(comment1_media_type.value, 'text/plain') comment1_content = g.value( subject=HOST['h#patch-request-2-comment-1'], predicate=AS.content) self.assertEqual(comment1_content.value, 'Here is my patch') self.assertIn( (HOST['h#patch-request-2-comment-2'], RDF.type, AS.Note), g) self.assertIn( (HOST['h#patch-request-2-comment-2'], AS.attributedTo, URIRef('https://orcid.org/1211-1098-7654-321X')), g) self.assertIn( # None means any (HOST['h#patch-request-2-comment-2'], AS.published, None), g) comment2_media_type = g.value( subject=HOST['h#patch-request-2-comment-2'], predicate=AS.mediaType) self.assertEqual(comment2_media_type.value, 'text/plain') comment2_content = g.value( subject=HOST['h#patch-request-2-comment-2'], predicate=AS.content) self.assertEqual(comment2_content.value, 'Looks good to me') self.assertIn( (HOST['h#change-2'], PROV.used, HOST['h#patch-2']), g) self.assertIn( (HOST['h#patch-2'], FOAF.page, HOST['patches/2/patch.jsonpatch']), g) self.assertIn( (HOST['h#change-2'], PROV.generated, HOST['d?version=2']), g) self.assertIn( (HOST['d?version=2'], PROV.specializationOf, HOST['d']), g)
def _login(dashboard_url, domain, username, password, cafile=None): """Login to the website to get a session. :param dashboard_url: The URL of the dashboard to log in to. :type dashboard_url: str :param domain: the domain to login into :type domain: str :param username: the username to login as :type username: str :param password: the password to use to login :type password: str :returns: tuple of (client, response) where response is the page after logging in. :rtype: (requests.sessions.Session, requests.models.Response) :raises: FailedAuth if the authorisation doesn't work """ auth_url = '{}/auth/login/'.format(dashboard_url) # start session, get csrftoken client = requests.session() client.get(auth_url, verify=cafile) if 'csrftoken' in client.cookies: csrftoken = client.cookies['csrftoken'] else: raise Exception("Missing csrftoken") # build and send post request overcloud_auth = openstack_utils.get_overcloud_auth() if overcloud_auth['OS_AUTH_URL'].endswith("v2.0"): api_version = 2 else: api_version = 3 keystone_client = openstack_utils.get_keystone_client(overcloud_auth) catalog = keystone_client.service_catalog.get_endpoints() logging.info(catalog) if api_version == 2: region = catalog['identity'][0]['publicURL'] else: region = [ i['url'] for i in catalog['identity'] if i['interface'] == 'public' ][0] auth = { 'domain': domain, 'username': username, 'password': password, 'csrfmiddlewaretoken': csrftoken, 'next': '/horizon/', 'region': region, } # In the minimal test deployment /horizon/project/ is unauthorized, # this does not occur in a full deployment and is probably due to # services/information missing that horizon wants to display data # for. # Redirect to /horizon/identity/ instead. if (openstack_utils.get_os_release() >= openstack_utils.get_os_release('xenial_queens')): auth['next'] = '/horizon/identity/' if (openstack_utils.get_os_release() >= openstack_utils.get_os_release('bionic_stein')): auth['region'] = 'default' if api_version == 2: del auth['domain'] logging.info('POST data: "{}"'.format(auth)) response = client.post(auth_url, data=auth, headers={'Referer': auth_url}, verify=cafile) # NOTE(ajkavanagh) there used to be a trusty/icehouse test in the # amulet test, but as the zaza tests only test from trusty/mitaka # onwards, the test has been dropped if (openstack_utils.get_os_release() >= openstack_utils.get_os_release('bionic_stein')): expect = "Sign Out" # update the in dashboard seems to require region to be default in # this test configuration region = 'default' else: expect = 'Projects - OpenStack Dashboard' if expect not in response.text: msg = 'FAILURE code={} text="{}"'.format(response, response.text) logging.info("Yeah, wen't wrong: {}".format(msg)) raise FailedAuth(msg) logging.info("Logged into okay") return client, response
def test_context_versioning(self): with open(filepath('test-patch-modify-context.json')) as f: patch = f.read() with self.client as client: res = client.patch( '/d/', data=patch, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) patch_url = urlparse(res.headers['Location']).path res = client.post( patch_url + 'merge', buffered=True, headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.NO_CONTENT) res = client.get('/d.json?version=0', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.OK) ctx = json.loads(res.get_data(as_text=True)).get('@context', None) self.assertIsNone(ctx) res = client.get('/c?version=0', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.NOT_FOUND) res = client.get('/d.json?version=1', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.OK) self.assertEqual( res.headers['Cache-Control'], 'public, max-age={}'.format(cache.LONG_TIME)) ctx = json.loads(res.get_data(as_text=True))['@context'] self.assertEqual(ctx[0], 'http://localhost.localdomain:5000/c?version=1') res = client.get('/c?version=1', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.OK) self.assertEqual( res.headers['Cache-Control'], 'public, max-age={}'.format(cache.LONG_TIME)) ctx = json.loads(res.get_data(as_text=True))['@context'] self.assertNotIn('foobar', ctx) res = client.get('/d.json?version=2', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.OK) ctx = json.loads(res.get_data(as_text=True))['@context'] self.assertEqual(ctx[0], 'http://localhost.localdomain:5000/c?version=2') res = client.get('/c?version=2', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.OK) ctx = json.loads(res.get_data(as_text=True))['@context'] self.assertIn('foobar', ctx)
def test_list_me_unauthorized(client): response = client.get('/api/me/thoughts/') assert http.client.UNAUTHORIZED == response.status_code
def test_get_non_existing_thought(client, thought_fixture): thought_id = 123456 response = client.get(f'/api/thoughts/{thought_id}/') assert http.client.NOT_FOUND == response.status_code
def test_get_history(self): with open(filepath('test-patch-adds-items.json')) as f: patch = f.read() with self.client as client: res1 = client.patch( '/d/', data=patch, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) patch_url = urlparse(res1.headers['Location']).path client.post( patch_url + 'merge', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) res2 = client.get('/h') self.assertEqual(res2.status_code, http.client.OK) self.assertEqual( res2.headers['Content-Type'], 'application/ld+json') jsonld = res2.get_data(as_text=True) g = ConjunctiveGraph() g.parse(format='json-ld', data=jsonld) # Initial data load self.assertIn( # None means any (PERIODO['p0h#change-1'], PROV.endedAtTime, None), g) self.assertIn( (PERIODO['p0h#change-1'], PROV.used, PERIODO['p0d?version=0']), g) self.assertIn( (PERIODO['p0d?version=0'], PROV.specializationOf, PERIODO['p0d']), g) self.assertIn( (PERIODO['p0h#change-1'], PROV.used, PERIODO['p0h#patch-1']), g) self.assertIn( (PERIODO['p0h#patch-1'], FOAF.page, PERIODO['p0patches/1/patch.jsonpatch']), g) self.assertIn( (PERIODO['p0h#change-1'], PROV.generated, PERIODO['p0d?version=1']), g) self.assertIn( (PERIODO['p0d?version=1'], PROV.specializationOf, PERIODO['p0d']), g) self.assertIn( (PERIODO['p0h#change-1'], PROV.generated, PERIODO['p0trgkv?version=1']), g) self.assertIn( (PERIODO['p0trgkv?version=1'], PROV.specializationOf, PERIODO['p0trgkv']), g) self.assertIn( (PERIODO['p0h#change-1'], PROV.generated, PERIODO['p0trgkvwbjd?version=1']), g) self.assertIn( (PERIODO['p0trgkvwbjd?version=1'], PROV.specializationOf, PERIODO['p0trgkvwbjd']), g) # Change from first submitted patch self.assertIn( # None means any (PERIODO['p0h#change-2'], PROV.startedAtTime, None), g) self.assertIn( # None means any (PERIODO['p0h#change-2'], PROV.endedAtTime, None), g) start = g.value( subject=PERIODO['p0h#change-2'], predicate=PROV.startedAtTime) self.assertEqual(start.datatype, XSD.dateTime) self.assertRegex(start.value.isoformat(), W3CDTF) end = g.value( subject=PERIODO['p0h#change-2'], predicate=PROV.endedAtTime) self.assertEqual(end.datatype, XSD.dateTime) self.assertRegex(end.value.isoformat(), W3CDTF) self.assertIn( (PERIODO['p0h#change-2'], PROV.wasAssociatedWith, URIRef('http://orcid.org/1234-5678-9101-112X')), g) self.assertIn( (PERIODO['p0h#change-2'], PROV.wasAssociatedWith, URIRef('http://orcid.org/1211-1098-7654-321X')), g) for association in g.subjects( predicate=PROV.agent, object=URIRef('http://orcid.org/1234-5678-9101-112X')): role = g.value(subject=association, predicate=PROV.hadRole) self.assertIn(role, (PERIODO['p0v#submitted'], PERIODO['p0v#updated'])) merger = g.value( predicate=PROV.agent, object=URIRef('http://orcid.org/1211-1098-7654-321X')) self.assertIn( (PERIODO['p0h#change-2'], PROV.qualifiedAssociation, merger), g) self.assertIn( (merger, PROV.hadRole, PERIODO['p0v#merged']), g) self.assertIn( (PERIODO['p0h#change-2'], PROV.used, PERIODO['p0d?version=1']), g) self.assertIn( (PERIODO['p0d?version=1'], PROV.specializationOf, PERIODO['p0d']), g) self.assertIn( (PERIODO['p0h#change-2'], PROV.used, PERIODO['p0h#patch-2']), g) self.assertIn( (PERIODO['p0h#patch-2'], FOAF.page, PERIODO['p0patches/2/patch.jsonpatch']), g) self.assertIn( (PERIODO['p0h#change-2'], PROV.generated, PERIODO['p0d?version=2']), g) self.assertIn( (PERIODO['p0d?version=2'], PROV.specializationOf, PERIODO['p0d']), g) self.assertIn( (PERIODO['p0h#change-2'], PROV.generated, PERIODO['p0trgkv?version=2']), g) self.assertIn( (PERIODO['p0trgkv?version=2'], PROV.specializationOf, PERIODO['p0trgkv']), g) self.assertIn( (PERIODO['p0trgkv?version=2'], PROV.wasRevisionOf, PERIODO['p0trgkv?version=1']), g) entities = 0 for _, _, version in g.triples( (PERIODO['p0h#change-2'], PROV.generated, None)): entity = g.value(subject=version, predicate=PROV.specializationOf) self.assertEqual(str(entity) + '?version=2', str(version)) entities += 1 self.assertEqual(entities, 5)
def test_versioning(self): with open(filepath('test-patch-adds-items.json')) as f: patch1 = f.read() with open(filepath('test-patch-add-period.json')) as f: patch2 = f.read() with self.client as client: res = client.patch( '/d/', data=patch1, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) patch_url = urlparse(res.headers['Location']).path res = client.post( patch_url + 'merge', buffered=True, headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.NO_CONTENT) res = client.patch( '/d/', data=patch2, content_type='application/json', headers={'Authorization': 'Bearer ' + 'NTAwNWViMTgtYmU2Yi00YWMwLWIwODQtMDQ0MzI4OWIzMzc4'}) patch_url = urlparse(res.headers['Location']).path res = client.post( patch_url + 'merge', buffered=True, headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.NO_CONTENT) res = client.get('/trgkv?version=0', headers={'Accept': 'application/json'}, follow_redirects=True) self.assertEqual(res.status_code, http.client.NOT_FOUND) for version in range(1, 4): res = client.get( '/trgkv?version={}'.format(version), headers={'Accept': 'application/json'}) self.assertEqual( res.status_code, http.client.SEE_OTHER) self.assertEqual( '/' + res.headers['Location'].split('/')[-1], '/trgkv.json?version={}'.format(version)) res = client.get( '/trgkv.json?version={}'.format(version)) self.assertEqual( res.status_code, http.client.OK) self.assertEqual( res.headers['Content-Type'], 'application/json') ctx = json.loads(res.get_data(as_text=True))['@context'] self.assertEqual( ctx[0], 'http://localhost.localdomain:5000/c?version={}'.format(version) ) res = client.get('/history.jsonld?inline-context') self.assertEqual( res.headers['Cache-Control'], 'public, max-age=0') self.assertEqual( res.headers['X-Accel-Expires'], '{}'.format(cache.MEDIUM_TIME))
def test_government(client): """Tests the /government route.""" response = client.get(flask.url_for('government.government_home')) assert response.status_code == http.client.OK
def get_mode(client) : print("mode: {}".format(client.get("config/mode")['body'].decode('UTF-8')))
def list_colors(client) : print("colors: {}".format(client.get("config/color_specs")['body'].decode('UTF-8')))
def test_home(client): """Tests the / route.""" response = client.get(flask.url_for('home')) assert response.status_code == http.client.OK
def test_group_sibling_graphs(self): with open(filepath('test-graph.json')) as f: graph_json = f.read() with self.client as client: res = client.put( '/graphs/places/A', data=graph_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.CREATED) res = client.put( '/graphs/places/B', data=graph_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.CREATED) res = client.put( '/graphs/not-places/C', data=graph_json, content_type='application/json', headers={'Authorization': 'Bearer ' + 'ZjdjNjQ1ODQtMDc1MC00Y2I2LThjODEtMjkzMmY1ZGFhYmI4'}) self.assertEqual(res.status_code, http.client.CREATED) res = client.get('/graphs/places/') self.assertEqual(res.status_code, http.client.OK) data = json.loads(res.get_data(as_text=True)) self.assertEqual( {'http://localhost.localdomain:5000/graphs/places/A', 'http://localhost.localdomain:5000/graphs/places/B'}, set(data['graphs'].keys())) self.assertEqual( 'http://localhost.localdomain:5000/graphs/places/', data['@context']['graphs']['@id']) res = client.get('/graphs/places.json') self.assertEqual(res.status_code, http.client.OK) data = json.loads(res.get_data(as_text=True)) self.assertEqual( {'http://localhost.localdomain:5000/graphs/places/A', 'http://localhost.localdomain:5000/graphs/places/B'}, set(data['graphs'].keys())) self.assertEqual( 'http://localhost.localdomain:5000/graphs/places/', data['@context']['graphs']['@id']) res = client.get('/graphs/places') self.assertEqual(res.status_code, http.client.OK) data = json.loads(res.get_data(as_text=True)) self.assertEqual( {'http://localhost.localdomain:5000/graphs/places/A', 'http://localhost.localdomain:5000/graphs/places/B'}, set(data['graphs'].keys())) self.assertEqual( 'http://localhost.localdomain:5000/graphs/places/', data['@context']['graphs']['@id']) self.assertEqual( res.headers['Content-Disposition'], 'attachment; filename="periodo-graph-places.json"') res = client.get('/graphs/') self.assertEqual(res.status_code, http.client.OK) data = json.loads(res.get_data(as_text=True)) self.assertEqual( {'http://localhost.localdomain:5000/graphs/places/A', 'http://localhost.localdomain:5000/graphs/places/B', 'http://localhost.localdomain:5000/graphs/not-places/C', 'http://localhost.localdomain:5000/d/'}, set(data['graphs'].keys())) self.assertEqual( 'http://localhost.localdomain:5000/graphs/', data['@context']['graphs']['@id']) self.assertEqual( res.headers['Content-Disposition'], 'attachment; filename="periodo-graphs.json"')
def get_colorspec(client) : print("colorspec: {}".format(client.get("config/lamp/color_name")['body'].decode('UTF-8')))
def test_get_not_found_product(client): response = client.get('/api/product/1234') assert http.client.NOT_FOUND == response.status_code
def list(client) : print("pin: {}".format(client.get("config/pin")['body'].decode('UTF-8'))) print("num_pixels: {}".format(client.get("config/num_pixels")['body'].decode('UTF-8')))