def test_post_contrib_one_data_source_with_id(app): ''' using /contributors endpoint ''' post_data = {"id": "id_test", "name": "name_test", "data_prefix": "AAA"} post_data["preprocesses"] = [{ "id": "toto", "type": "Ruspell", "source_params": { "tc_data": { "key": "data_sources_id", "value": "datasource_stif" }, "bano_data": { "key": "data_sources_id", "value": "bano_75" } } }] raw = post(app, '/contributors', json.dumps(post_data)) assert raw.status_code == 201, print(to_json(raw)) raw = app.get('/contributors/id_test/') r = to_json(raw) assert raw.status_code == 200, print(r) assert len(r["contributors"][0]["preprocesses"]) == 1
def test_post_ds_duplicate_two_data_source(app, contributor): """ using /data_sources endpoint """ post_ds = { "id": "duplicate_id", "name": "data_source_name1", "input": { "type": "url", "url": "http://stif.com/od.zip" } } raw = post(app, '/contributors/id_test/data_sources', json.dumps(post_ds)) assert raw.status_code == 201, print(to_json(raw)) post_ds = { "id": "duplicate_id", "name": "data_source_name2", "input": { "type": "url", "url": "http://stif.com/od.zip" } } raw = post(app, '/contributors/id_test/data_sources', json.dumps(post_ds)) payload = to_json(raw) assert raw.status_code == 409, print(payload) assert payload['error'] == "Duplicate data_source id 'duplicate_id'"
def test_contributor_export(app): raw = post(app, '/contributors', '{"id": "id_test", "name":"name_test", "data_prefix":"AAA"}') assert raw.status_code == 201 raw = post(app, '/contributors/id_test/actions/export', {}) assert raw.status_code == 201 r = to_json(raw) assert 'job' in r job = r.get('job') assert job.get('action_type') == 'contributor_export' raw_job = app.get('/jobs') assert raw_job.status_code == 200 r_jobs = to_json(raw_job) assert len(r_jobs['jobs']) == 1 assert r_jobs.get('jobs')[0]['id'] == job['id'] raw_job = app.get('/jobs/{}'.format(job['id'])) assert raw_job.status_code == 200 r_jobs = to_json(raw_job) assert len(r_jobs['jobs']) == 1 assert r_jobs.get('jobs')[0]['id'] == job['id'] raw_job = app.get('/jobs/toto') assert raw_job.status_code == 404
def test_patch_one_data_source_name_of_two_and_add_one(app): post_data = {"id": "id_test", "name":"name_test", "data_prefix":"AAA"} post_data["data_sources"] = [] post_data["data_sources"].append({"name":"data_source_name", "data_format":"Neptune"}) post_data["data_sources"].append({"name":"data_source_2", "data_format":"Neptune"}) raw = post(app, '/contributors', json.dumps(post_data)) r = to_json(raw) print("created contrib : ") print(r) assert raw.status_code == 201, print(r) new_data_source = {} new_data_source["id"] = r["contributor"]["data_sources"][1]["id"] new_data_source["name"] = "name_modified" r["contributor"]["data_sources"][0] = new_data_source data_source_list = {} data_source_list["data_sources"] = [new_data_source, {"name":"data_source_3"}] print("patching data with ", json.dumps(data_source_list)) raw = patch(app, '/contributors/id_test', json.dumps(data_source_list)) r = to_json(raw) assert raw.status_code == 200, print(r) assert len(r["contributor"]["data_sources"]) == 3 patched_data_sources = r["contributor"]["data_sources"] assert patched_data_sources[0]["data_format"] == "Neptune" assert patched_data_sources[1]["data_format"] == "Neptune" assert patched_data_sources[2]["data_format"] == "gtfs" assert patched_data_sources[0]["name"] == "data_source_name" assert patched_data_sources[1]["name"] == "name_modified" assert patched_data_sources[2]["name"] == "data_source_3"
def test_patch_contrib_data_source_with_full_contributor(app): """ using /contributors endpoint """ post_data = { "id": "id_test", "name": "name_test", "data_prefix": "AAA", "data_sources": [ { "name": "data_source_name", "input": { "type": "url", "url": "http://stif.com/od.zip" } } ] } raw = post(app, '/contributors', json.dumps(post_data)) r = to_json(raw) assert raw.status_code == 201, print(r) r["contributors"][0]["data_sources"][0]["name"] = "name_modified" raw = patch(app, '/contributors/id_test', json.dumps(r["contributors"][0])) r = to_json(raw) assert raw.status_code == 200, print(r) assert len(r["contributors"][0]["data_sources"]) == 1 patched_data_source = r["contributors"][0]["data_sources"][0] assert patched_data_source["name"] == "name_modified"
def test_post_contrib_one_data_source_with_data_format(app): """ using /contributors endpoint """ post_data = { "id": "id_test", "name": "name_test", "data_prefix": "AAA", "data_sources": [ { "name": "data_source_name", "data_format": "Neptune", "input": { "type": "url", "url": "http://stif.com/od.zip" } } ] } raw = post(app, '/contributors', json.dumps(post_data)) assert raw.status_code == 201, print(to_json(raw)) raw = app.get('/contributors/id_test/') r = to_json(raw) assert raw.status_code == 200, print(r) assert len(r["contributors"][0]["data_sources"]) == 1 assert r["contributors"][0]["data_sources"][0]["data_format"] == "Neptune" assert r["contributors"][0]["data_sources"][0]["input"]["type"] == "url" assert r["contributors"][0]["data_sources"][0]["input"]["url"] == "http://stif.com/od.zip"
def test_post_ds_two_data_source(app, contributor): """ using /data_sources endpoint """ post_ds = { "name": "data_source_name1", "input": { "type": "url", "url": "http://stif.com/od.zip" } } raw = post(app, '/contributors/id_test/data_sources', json.dumps(post_ds)) assert raw.status_code == 201, print(to_json(raw)) post_ds = { "name": "data_source_name2", "input": { "type": "url", "url": "http://stif.com/od.zip" } } raw = post(app, '/contributors/id_test/data_sources', json.dumps(post_ds)) assert raw.status_code == 201, print(to_json(raw)) raw = app.get('/contributors/id_test/data_sources') r = to_json(raw) assert raw.status_code == 200, print(r) assert len(r["data_sources"]) == 2 assert r["data_sources"][0]["id"] != r["data_sources"][1]["id"]
def test_post_grid_calendar_returns_success_status(app, coverage, get_app_context): filename = 'export_calendars.zip' path = os.path.join( os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'fixtures/gridcalendar/', filename) files = {'file': (open(path, 'rb'), 'export_calendars.zip')} raw = app.post('/coverages/jdr/grid_calendar', data=files) r = to_json(raw) assert raw.status_code == 200 assert r.get('message') == 'OK' raw = app.get('/coverages') r = to_json(raw) assert len(r['coverages']) == 1 assert 'grid_calendars_id' in r['coverages'][0] gridfs = GridFS(mongo.db) file_id = r['coverages'][0]['grid_calendars_id'] assert gridfs.exists(ObjectId(file_id)) #we update the file (it's the same, but that's not the point) files = {'file': (open(path, 'rb'), 'export_calendars.zip')} raw = app.post('/coverages/jdr/grid_calendar', data=files) assert raw.status_code == 200 raw = app.get('/coverages') r = to_json(raw) assert len(r['coverages']) == 1 assert 'grid_calendars_id' in r['coverages'][0] #it should be another file assert file_id != r['coverages'][0]['grid_calendars_id'] #the previous file has been deleted assert not gridfs.exists(ObjectId(file_id)) #and the new one exist assert gridfs.exists(ObjectId(r['coverages'][0]['grid_calendars_id']))
def test_add_coverage_no_name(app): raw = post(app, '/coverages', '{"id": "id_test"}') r = to_json(raw) assert 'error' in r assert raw.status_code == 400 raw = app.get('/coverages') r = to_json(raw) assert len(r["coverages"]) == 0
def test_add_coverage_no_prefix(app): raw = post(app, '/contributors', '{"id": "id_test", "name":"name_test"}') r = to_json(raw) assert 'error' in r assert raw.status_code == 400 raw = app.get('/contributors') r = to_json(raw) assert len(r["contributors"]) == 0
def test_post_contrib_one_data_source_without_id(app): post_data = {"id": "id_test", "name":"name_test", "data_prefix":"AAA"} post_data["data_sources"] = [] post_data["data_sources"].append({"name":"data_source_name"}) raw = post(app, '/contributors', json.dumps(post_data)) print(to_json(raw)) assert raw.status_code == 201 raw = app.get('/contributors/id_test/') r = to_json(raw) print(r) assert raw.status_code == 200 assert len(r["contributor"]["data_sources"]) == 1
def test_get_ntfs_bad_requedted_type(app, coverage_obj, fixture_dir): with get_valid_ntfs_memory_archive() as (ntfs_file_name, ntfs_zip_memory): files = {"file": (ntfs_zip_memory, ntfs_file_name)} with requests_mock.Mocker() as m: m.post("http://tyr.prod/v0/instances/test", text="ok") raw = app.post("/coverages/test/environments/production/data_update", data=files) r = to_json(raw) assert r["message"].startswith("Valid fusio file provided") raw = app.get("/coverages/test/environments/production/data/ntfs_error") r = to_json(raw) assert raw.status_code == 404 print(r["message"]) assert r["message"].startswith("bad data type")
def test_post_ds_one_data_source_without_input(app, contributor): """ using /data_sources endpoint """ post_ds = {"name": "data_source_name"} raw = post(app, '/contributors/id_test/data_sources', json.dumps(post_ds)) r = to_json(raw) assert 'error' in r assert raw.status_code == 400, print(to_json(raw)) raw = app.get('/contributors/id_test/data_sources') r = to_json(raw) assert raw.status_code == 200, print(r) assert len(r["data_sources"]) == 0
def test_get_ntfs_bad_requedted_type(app, coverage_obj, fixture_dir): with get_valid_ntfs_memory_archive() as (ntfs_file_name, ntfs_zip_memory): files = {'file': (ntfs_zip_memory, ntfs_file_name)} with requests_mock.Mocker() as m: m.post('http://tyr.prod/v0/instances/test', text='ok') raw = app.post( '/coverages/test/environments/production/data_update', data=files) r = to_json(raw) assert r['message'].startswith('Valid fusio file provided') raw = app.get( '/coverages/test/environments/production/data/ntfs_error') r = to_json(raw) assert raw.status_code == 400 assert r["error"].startswith('Bad data type')
def test_add_coverage_with_all_env(app): raw = post(app, '/coverages', '''{"id": "id_test", "name": "name of the coverage", "environments" : { "preproduction": {"name": "pre", "tyr_url": "http://pre.bar/"}, "production": {"name": "prod", "tyr_url": "http://prod.bar/"}, "integration": {"name": "sim", "tyr_url": "http://int.bar/"} }}''') assert raw.status_code == 201 raw = app.get('/coverages') r = to_json(raw) assert len(r["coverages"]) == 1 assert isinstance(r["coverages"], list) coverage = r["coverages"][0] assert coverage["id"] == "id_test" assert coverage["name"] == "name of the coverage" assert 'environments' in coverage assert 'production' in coverage['environments'] assert 'preproduction' in coverage['environments'] assert 'integration' in coverage['environments'] assert coverage['environments']['preproduction']['name'] == 'pre' assert coverage['environments']['preproduction']['tyr_url'] == 'http://pre.bar/' assert coverage['environments']['production']['name'] == 'prod' assert coverage['environments']['production']['tyr_url'] == 'http://prod.bar/' assert coverage['environments']['integration']['name'] == 'sim' assert coverage['environments']['integration']['tyr_url'] == 'http://int.bar/'
def test_patch_simple_coverage(app): raw = post(app, '/coverages', '''{"id": "id_test", "name": "name of the coverage"}''') assert raw.status_code == 201 raw = app.get('/coverages') r = to_json(raw) assert len(r["coverages"]) == 1 assert isinstance(r["coverages"], list) assert r["coverages"][0]["id"] == "id_test" assert r["coverages"][0]["name"] == "name of the coverage" raw = patch(app, '/coverages/id_test', '{"name": "new name"}') assert raw.status_code == 200 r = to_json(raw) assert r["coverages"][0]["id"] == "id_test" assert r["coverages"][0]["name"] == "new name"
def test_add_data_source(app, coverage, data_source): raw = post(app, '/coverages/jdr/data_sources', json.dumps({"id": data_source.get('id')})) r = to_json(raw) assert raw.status_code == 200 data_sources = r.get('coverages')[0].get('data_sources') assert isinstance(data_sources, list) assert len(data_sources) == 1 assert data_sources[0] == data_source.get('id') # test add existing data_source in coverage raw = post(app, '/coverages/jdr/data_sources', json.dumps({"id": data_source.get('id')})) r = to_json(raw) assert raw.status_code == 409 assert r.get('error') == 'Data source id {} already exists in coverage jdr.'.format(data_source.get('id'))
def test_get_coverage_empty_success(app): raw = app.get('/coverages') assert raw.status_code == 200 raw = app.get('/coverages/') assert raw.status_code == 200 r = to_json(raw) assert len(r["coverages"]) == 0
def test_patch_simple_coverage(app): raw = post(app, '/coverages', '''{"id": "id_test", "name": "name of the coverage"}''') assert raw.status_code == 201 raw = app.get('/coverages') r = to_json(raw) assert len(r["coverages"]) == 1 assert isinstance(r["coverages"], list) assert r["coverages"][0]["id"] == "id_test" assert r["coverages"][0]["name"] == "name of the coverage" raw = patch(app, '/coverages/id_test', '{"name": "new name"}') assert raw.status_code == 200 r = to_json(raw) assert r["coverage"]["id"] == "id_test" assert r["coverage"]["name"] == "new name"
def test_post_osm_returns_invalid_coverage(app, fixture_dir): path = os.path.join(fixture_dir, 'geo_data/empty_pbf.osm.pbf') files = {'file': (open(path, 'rb'), 'empty_pbf.funky_extension')} raw = app.post('/coverages/jdr_bug/environments/production/data_update', data=files) r = to_json(raw) assert raw.status_code == 404 assert r.get('message') == 'bad coverage jdr_bug'
def test_post_contrib_two_data_source(app): """ using /contributors endpoint """ post_data = { "id": "id_test", "name": "name_test", "data_prefix": "AAA", "data_sources": [ { "name": "data_source_name", "input": { "type": "url", "url": "http://stif.com/od.zip" } }, { "name": "data_source_name2", "input": { "type": "url", "url": "http://stif.com/od.zip" } } ] } raw = post(app, '/contributors', json.dumps(post_data)) assert raw.status_code == 201 raw = app.get('/contributors/id_test/') r = to_json(raw) assert raw.status_code == 200, print(r) assert len(r["contributors"][0]["data_sources"]) == 2 assert r["contributors"][0]["data_sources"][0]["id"] != r["contributors"][0]["data_sources"][1]["id"]
def test_patch_data_source_with_full_contributor(app): post_data = {"id": "id_test", "name":"name_test", "data_prefix":"AAA"} post_data["data_sources"] = [] post_data["data_sources"].append({"name":"data_source_name"}) raw = post(app, '/contributors', json.dumps(post_data)) r = to_json(raw) assert raw.status_code == 201, print(r) r["contributor"]["data_sources"][0]["name"] = "name_modified" print("patching data with ", json.dumps(r["contributor"])) raw = patch(app, '/contributors/id_test', json.dumps(r["contributor"])) r = to_json(raw) print(r) assert raw.status_code == 200, print(r) assert len(r["contributor"]["data_sources"]) == 1 patched_data_source = r["contributor"]["data_sources"][0] assert patched_data_source["name"] == "name_modified"
def test_post_contrib_one_data_source_with_id(app): post_data = {"id": "id_test", "name":"name_test", "data_prefix":"AAA"} post_data["data_sources"] = [] post_data["data_sources"].append({"id": "data_source_id", "name":"data_source_name"}) raw = post(app, '/contributors', json.dumps(post_data)) print(to_json(raw)) assert raw.status_code == 400
def test_get_contributors_empty_success(app): raw = app.get('/contributors') assert raw.status_code == 200 raw = app.get('/contributors/') assert raw.status_code == 200 r = to_json(raw) assert len(r["contributors"]) == 0
def test_update_contributor_data_prefix_error(app): raw = post(app, '/contributors', '{"id": "id_test", "name": "name_test", "data_prefix":"AAA"}') assert raw.status_code == 201 raw = patch(app, '/contributors/id_test', '{"data_prefix": "AAB"}') r = to_json(raw) assert raw.status_code == 400
def test_post_osm_returns_invalid_file_extension_message(app, coverage_obj, fixture_dir): filename = 'empty_pbf.funky_extension' path = os.path.join(fixture_dir, 'geo_data/empty_pbf.funky_extension') files = {'file': (open(path, 'rb'), 'empty_pbf.funky_extension')} raw = app.post('/coverages/test/environments/production/data_update', data=files) r = to_json(raw) assert raw.status_code == 400 assert r.get('message').startswith('invalid file provided')
def test_post_pbf_with_bad_param(app, coverage_obj, fixture_dir): filename = 'empty_pbf.osm.pbf' path = os.path.join(fixture_dir, 'geo_data/empty_pbf.osm.pbf') files = {'file_name': (open(path, 'rb'), 'empty_pbf.osm.pbf')} raw = app.post('/coverages/test/environments/production/data_update', data=files) r = to_json(raw) assert raw.status_code == 400 assert r.get('message') == 'file provided with bad param ("file" param expected)'
def test_post_contrib_no_data_source(app): raw = post(app, '/contributors', '{"id": "id_test", "name":"name_test", "data_prefix":"AAA"}') assert raw.status_code == 201 raw = app.get('/contributors/id_test/') r = to_json(raw) print(r) assert raw.status_code == 200 assert len(r["contributor"]["data_sources"]) == 0
def test_post_osm_returns_invalid_coverage(app, fixture_dir): path = os.path.join(fixture_dir, 'geo_data/empty_pbf.osm.pbf') files = {'file': (open(path, 'rb'), 'empty_pbf.funky_extension')} raw = app.post('/coverages/jdr_bug/environments/production/data_update', data=files) r = to_json(raw) assert raw.status_code == 404 assert r.get('error') == 'Coverage jdr_bug not found.'
def test_update_contributor_id_impossible(app): """It should not be possible to update the id of an object""" raw = post(app, '/contributors', '{"id": "id_test", "name": "name_test", "data_prefix":"AAA"}') assert raw.status_code == 201 raw = patch(app, '/contributors/id_test', '{"id": "bob"}') r = to_json(raw) assert 'error' in r assert raw.status_code == 400
def test_kown_version_status(app, monkeypatch): """if TARTARE_VERSION is given at startup, a version is available""" version = 'v1.42.12' monkeypatch.setitem(os.environ, 'TARTARE_VERSION', version) raw = app.get('/status') r = to_json(raw) assert raw.status_code == 200 assert r.get('version') == version
def test_post_grid_calendar_returns_file_missing_status(app, coverage): path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'fixtures/gridcalendar/export_calendars_without_grid_calendars.zip') files = {'file': (open(path, 'rb'), 'export_calendars.zip')} raw = app.post('/coverages/jdr/grid_calendar', data=files) r = to_json(raw) assert raw.status_code == 400 assert r.get('message') == 'file(s) missing : grid_calendars.txt'
def test_post_grid_calendar_returns_non_compliant_file_status(app, coverage): path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'fixtures/gridcalendar/export_calendars_with_invalid_header.zip') files = {'file': (open(path, 'rb'), 'export_calendars.zip')} raw = app.post('/coverages/jdr/grid_calendar', data=files) r = to_json(raw) assert raw.status_code == 400 assert r.get('message') == 'non-compliant file(s) : grid_periods.txt'
def data_source(app, contributor): data_source = app.post( '/contributors/{}/data_sources'.format(contributor.get('id')), headers={'Content-Type': 'application/json'}, data= '{"name": "bobette", "data_format": "gtfs", "data_format": "Neptune",' '"input": {"type": "url", "url": "http://stif.com/od.zip"}}') return to_json(data_source)['data_sources'][0]
def test_add_contributors_unique_data_suffix_ok(app): raw = post(app, '/contributors', '{"id": "id_test1", "name":"name_test1", "data_prefix":"AAA"}') assert raw.status_code == 201 raw = post(app, '/contributors', '{"id": "id_test2", "name":"name_test2", "data_prefix":"AAB"}') assert raw.status_code == 201 raw = app.get('/contributors') r = to_json(raw) assert len(r["contributors"]) == 2
def test_post_contrib_no_data_source(app): raw = post(app, '/contributors', '{"id": "id_test", "name":"name_test", "data_prefix":"AAA"}') assert raw.status_code == 201 raw = app.get('/contributors/id_test/') r = to_json(raw) print(r) assert raw.status_code == 200 assert len(r["contributors"][0]["data_sources"]) == 0
def coverage_with_data_source_tram_lyon(app): coverage = app.post( '/coverages', headers={'Content-Type': 'application/json'}, data= '{"id": "jdr", "name": "name of the coverage jdr", "data_sources": ["tram_lyon"]}' ) return to_json(coverage)['coverages'][0]
def test_update_id_impossible(app): """It should not be possible to update the id of an object""" raw = post(app, '/coverages', '{"id": "id_test", "name": "name_test"}') assert raw.status_code == 201 raw = patch(app, '/coverages/id_test', '{"id": "bob"}') r = to_json(raw) assert 'error' in r assert raw.status_code == 400
def test_update_preprocess_with_id(app): ''' using /contributors endpoint ''' post_data = {"id": "id_test", "name": "name_test", "data_prefix": "AAA"} post_data["preprocesses"] = [{ "id": "toto", "type": "Ruspell", "source_params": { "tc_data": { "key": "data_sources_id", "value": "datasource_stif" }, "bano_data": { "key": "data_sources_id", "value": "bano_75" } } }] raw = post(app, '/contributors', json.dumps(post_data)) assert raw.status_code == 201, print(to_json(raw)) raw = app.get('/contributors/id_test/') r = to_json(raw) assert raw.status_code == 200, print(r) assert len(r["contributors"][0]["preprocesses"]) == 1 preprocess_id = r["contributors"][0]["preprocesses"][0]["id"] new_preprocess = { "type": "ComputeDirections", "source_params": { "tc_data": { "key": "data_sources.data_format", "value": "gtfs" } } } raw = patch(app, '/contributors/id_test/preprocesses/{}'.format(preprocess_id), json.dumps(new_preprocess)) r = to_json(raw) assert raw.status_code == 200, print(r) assert len(r["preprocesses"]) == 1 assert r["preprocesses"][0]["type"] == new_preprocess["type"] assert r["preprocesses"][0]["source_params"] == new_preprocess[ "source_params"]
def test_post_pbf_mocked(app, coverage_obj, fixture_dir, mocker): m = mocker.patch.object(send_file_to_tyr_and_discard, 'delay') path = os.path.join(fixture_dir, 'geo_data/empty_pbf.osm.pbf') files = {'file': (open(path, 'rb'), 'empty_pbf.osm.pbf')} raw = app.post('/coverages/test/environments/production/data_update', data=files) assert m.called r = to_json(raw) assert raw.status_code == 200 assert r.get('message').startswith('Valid osm file provided')
def test_add_coverage_with_env_invalid_url(app): raw = post(app, '/coverages', '''{"id": "id_test", "name": "name of the coverage", "environments" : {"notvalidenv": {"name": "pre", "tyr_url": "foo"}}}''') assert raw.status_code == 400 r = to_json(raw) assert 'error' in r assert 'environments' in r['error']
def test_post_grid_calendar_returns_file_missing_status(app, coverage): path = os.path.join( os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'fixtures/gridcalendar/export_calendars_without_grid_calendars.zip') files = {'file': (open(path, 'rb'), 'export_calendars.zip')} raw = app.post('/coverages/jdr/grid_calendar', data=files) r = to_json(raw) assert raw.status_code == 400 assert r.get('error') == 'File(s) missing : grid_calendars.txt.'
def test_post_grid_calendar_returns_non_compliant_file_status(app, coverage): path = os.path.join( os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'fixtures/gridcalendar/export_calendars_with_invalid_header.zip') files = {'file': (open(path, 'rb'), 'export_calendars.zip')} raw = app.post('/coverages/jdr/grid_calendar', data=files) r = to_json(raw) assert raw.status_code == 400 assert r.get('error') == 'Non-compliant file(s) : grid_periods.txt.'
def test_add_coverage_with_env_invalid_url(app): raw = post(app, '/coverages', '''{"id": "id_test", "name": "name of the coverage", "environments" : {"notvalidenv": {"name": "pre", "tyr_url": "foo"}}}''') print(raw.data) assert raw.status_code == 400 r = to_json(raw) assert 'error' in r assert 'environments' in r['error']
def test_update_coverage_forbid_unkown_field(app): raw = post(app, '/coverages', '{"id": "id_test", "name": "name_test"}') assert raw.status_code == 201 raw = patch(app, '/coverages/id_test', '{"name": "new_name_test", "foo": "bar"}') r = to_json(raw) assert raw.status_code == 400 assert 'error' in r
def test_post_pbf_returns_success_status(app, coverage_obj, fixture_dir): path = os.path.join(fixture_dir, 'geo_data/empty_pbf.osm.pbf') files = {'file': (open(path, 'rb'), 'empty_pbf.osm.pbf')} with requests_mock.Mocker() as m: m.post('http://tyr.prod/v0/instances/test', text='ok') raw = app.post('/coverages/test/environments/production/data_update', data=files) assert m.called r = to_json(raw) assert raw.status_code == 200 assert r.get('message').startswith('Valid osm file provided')
def test_post_ntfs_mocked(app, coverage_obj, mocker): #create ZIP file with fixture before sending it m = mocker.patch.object(send_ntfs_to_tyr, 'delay') with get_valid_ntfs_memory_archive() as (ntfs_file_name, ntfs_zip_memory): files = {'file': (ntfs_zip_memory, ntfs_file_name)} raw = app.post('/coverages/test/environments/production/data_update', data=files) assert m.called r = to_json(raw) assert raw.status_code == 200 assert r.get('message').startswith('Valid fusio file provided')
def test_update_contributor_name(app): raw = post(app, '/contributors', '{"id": "id_test", "name": "name_test", "data_prefix":"AAA"}') assert raw.status_code == 201 raw = patch(app, '/contributors/id_test', '{"name": "new_name_test"}') r = to_json(raw) assert raw.status_code == 200 assert r["contributor"]['id'] == "id_test" assert r["contributor"]['name'] == "new_name_test"
def test_update_coverage_returns_success_status(app): raw = post(app, '/coverages', '{"id": "id_test", "name": "name_test"}') assert raw.status_code == 201 raw = patch(app, '/coverages/id_test', '{"name": "new_name_test"}') r = to_json(raw) assert raw.status_code == 200 assert r["coverage"]['id'] == "id_test" assert r["coverage"]['name'] == "new_name_test"
def test_add_coverage_with_name(app): raw = post(app, '/coverages', '{"id": "id_test", "name": "name of the coverage"}') assert raw.status_code == 201 raw = app.get('/coverages') r = to_json(raw) assert len(r["coverages"]) == 1 assert isinstance(r["coverages"], list) assert r["coverages"][0]["id"] == "id_test" assert r["coverages"][0]["name"] == "name of the coverage"
def test_post_grid_calendar_returns_success_status(app, coverage, get_app_context): filename = 'export_calendars.zip' path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'fixtures/gridcalendar/', filename) files = {'file': (open(path, 'rb'), 'export_calendars.zip')} raw = app.post('/coverages/jdr/grid_calendar', data=files) r = to_json(raw) assert raw.status_code == 200 assert r.get('message') == 'OK' raw = app.get('/coverages') r = to_json(raw) assert len(r['coverages']) == 1 assert 'grid_calendars_id' in r['coverages'][0] gridfs = GridFS(mongo.db) file_id = r['coverages'][0]['grid_calendars_id'] assert gridfs.exists(ObjectId(file_id)) #we update the file (it's the same, but that's not the point) files = {'file': (open(path, 'rb'), 'export_calendars.zip')} raw = app.post('/coverages/jdr/grid_calendar', data=files) assert raw.status_code == 200 raw = app.get('/coverages') r = to_json(raw) assert len(r['coverages']) == 1 assert 'grid_calendars_id' in r['coverages'][0] #it should be another file assert file_id != r['coverages'][0]['grid_calendars_id'] #the previous file has been deleted assert not gridfs.exists(ObjectId(file_id)) #and the new one exist assert gridfs.exists(ObjectId(r['coverages'][0]['grid_calendars_id']))