def create_autocomplete_job_with_data_sets(): with app.app_context(): instance = models.Instance('fr') models.db.session.add(instance) models.db.session.commit() job = models.Job() job.instance = instance # we also create 2 datasets, one for fusio, one for autocomplete_cosmogony for i, dset_type in enumerate(['fusio', 'cosmogony']): dataset = models.DataSet() dataset.type = dset_type dataset.family_type = dataset.type if dataset.type == 'fusio': dataset.family_type = 'pt' dataset.name = '/path/to/dataset_{}'.format(i) else: dataset.family_type = 'autocomplete_cosmogony' dataset.name = '/path/to/dataset_cosmogony/cosmogony_europe.jsonl.gz' models.db.session.add(dataset) job.data_sets.append(dataset) job.state = 'done' models.db.session.add(job) models.db.session.commit()
def teardown(): with app.app_context(): end_point = models.EndPoint.query.get(d['end_point']) billing_plan = models.BillingPlan.query.get(d['billing_plan']) models.db.session.delete(end_point) models.db.session.delete(billing_plan) models.db.session.commit()
def create_instance(): with app.app_context(): instance = models.Instance('fr') models.db.session.add(instance) models.db.session.commit() return instance.id
def create_two_autocomplete_parameters(): with app.app_context(): autocomplete_param1 = models.AutocompleteParameter('europe', 'OSM', 'BANO', 'OSM', 'OSM', [8, 9]) autocomplete_param2 = models.AutocompleteParameter('france', 'OSM', 'OSM', 'FUSIO', 'OSM', [8, 9]) models.db.session.add(autocomplete_param1) models.db.session.add(autocomplete_param2) models.db.session.commit()
def default_equipments_config(): with app.app_context(): sytral = models.EquipmentsProvider('sytral') sytral.klass = 'sytral.klass' sytral.args = {'url': 'http://sytral.url', 'fail_max': 5, 'timeout': 2} models.db.session.add(sytral) sytral2 = models.EquipmentsProvider('sytral2') sytral2.klass = 'sytral.klass' sytral2.args = { 'url': 'http://sytral2.url', 'fail_max': 10, 'timeout': 1 } models.db.session.add(sytral2) models.db.session.commit() # refresh and detach the objets from the db before returning them models.db.session.refresh(sytral) models.db.session.refresh(sytral2) models.db.session.expunge(sytral) models.db.session.expunge(sytral2) yield sytral, sytral2 models.db.session.delete(sytral) models.db.session.delete(sytral2) models.db.session.commit()
def create_billing_plan(): with app.app_context(): billing_plan = models.BillingPlan(name='test', max_request_count=10, max_object_count=100, end_point_id=models.EndPoint.get_default().id) models.db.session.add(billing_plan) models.db.session.commit() return billing_plan.id
def test_purge_cities_job(): """ Test that 'cities' jobs and associated datasets are correctly purged 'cities' file should also be deleted unless used by a job to keep """ def create_cities_job(creation_date, path, state): job = models.Job() job.state = state dataset_backup_dir = path dataset = models.DataSet() dataset.type = 'cities' dataset.family_type = 'cities_family' dataset.name = '{}'.format(dataset_backup_dir) models.db.session.add(dataset) job.data_sets.append(dataset) job.created_at = creation_date models.db.session.add(job) cities_file_dir = app.config['CITIES_OSM_FILE_PATH'] # Have 2 jobs with the same dataset to test that it isn't deleted if one of the jobs is kept common_dataset_folder = tempfile.mkdtemp(dir=cities_file_dir) with app.app_context(): for i in range(2): create_cities_job(datetime.utcnow() - timedelta(days=i), common_dataset_folder, state='done') for j in range(2): create_cities_job(datetime.utcnow() - timedelta(days=j + 2), tempfile.mkdtemp(dir=cities_file_dir), state='done') models.db.session.commit() jobs_resp = api_get('/v0/jobs') assert 'jobs' in jobs_resp assert len(jobs_resp['jobs']) == 4 folders = set(glob.glob('{}/*'.format(cities_file_dir))) assert len(folders) == 3 app.config['DATASET_MAX_BACKUPS_TO_KEEP'] = 3 tasks.purge_cities() jobs_resp = api_get('/v0/jobs') assert 'jobs' in jobs_resp assert len(jobs_resp['jobs']) == 3 folders = set(glob.glob('{}/*'.format(cities_file_dir))) assert len(folders) == 2 app.config['DATASET_MAX_BACKUPS_TO_KEEP'] = 1 tasks.purge_cities() jobs_resp = api_get('/v0/jobs') assert 'jobs' in jobs_resp assert len(jobs_resp['jobs']) == 1 folders = set(glob.glob('{}/*'.format(cities_file_dir))) assert len(folders) == 1
def create_instance(): with app.app_context(): instance = models.Instance('fr') instance.import_ntfs_in_mimir = True models.db.session.add(instance) models.db.session.commit() return instance.id
def test_mimir_ntfs_false(create_instance, enable_mimir2): with app.app_context(): instance = models.Instance.query.get(create_instance) instance.import_ntfs_in_mimir = False actions = [] actions.extend(tasks.send_to_mimir(instance, 'test.poi', 'poi')) assert actions == []
def default_streetnetwork_backend(): with app.app_context(): kraken = models.StreetNetworkBackend('kraken') kraken.klass = 'kraken.klass' kraken.args = {'url': 'http://kraken.url', 'fail_max': 5, 'timeout': 2} models.db.session.add(kraken) asgard = models.StreetNetworkBackend('asgard') asgard.klass = 'asgard.klass' asgard.args = { 'url': 'http://asgard.url', 'fail_max': 10, 'timeout': 1 } models.db.session.add(asgard) models.db.session.commit() # refresh and detach the objets from the db before returning them models.db.session.refresh(kraken) models.db.session.refresh(asgard) models.db.session.expunge(kraken) models.db.session.expunge(asgard) yield kraken, asgard models.db.session.delete(kraken) models.db.session.delete(asgard) models.db.session.commit()
def create_instance_with_different_dataset_types_and_job_state( name, backup_dir): with app.app_context(): job_list = [] # Add jobs with type = 'poi', state = 'done' and 'running' job_list.append( create_job(datetime.utcnow() - timedelta(days=1), 'poi', backup_dir)) job_list.append( create_job(datetime.utcnow() - timedelta(days=2), 'poi', backup_dir)) job_list.append( create_job(datetime.utcnow() - timedelta(days=3), 'poi', backup_dir, state='running')) # Add jobs with type = 'fusio', state = 'done' and 'running' job_list.append( create_job(datetime.utcnow() - timedelta(days=4), 'fusio', backup_dir)) job_list.append( create_job(datetime.utcnow() - timedelta(days=5), 'fusio', backup_dir)) job_list.append( create_job(datetime.utcnow() - timedelta(days=6), 'fusio', backup_dir, state='running')) create_instance(name, job_list)
def default_config(): with app.app_context(): velib = models.BssProvider('velib') velib.network = 'velib' velib.klass = 'jcdecaux' velib.args = { 'operators': ['jcdecaux'], 'user': '******', 'password': '******' } models.db.session.add(velib) velov = models.BssProvider('velov') velov.network = 'velov' velov.klass = 'jcdecaux' velov.args = { 'operators': ['jcdecaux'], 'user': '******', 'password': '******' } models.db.session.add(velov) models.db.session.commit() # refresh and detach the objets from the db before returning them models.db.session.refresh(velib) models.db.session.refresh(velov) models.db.session.expunge(velib) models.db.session.expunge(velov) return (velib, velov)
def bdd(init_flask_db): """ All tests under this module will have a database with an up to date scheme At the end of the module the database scheme will be downgraded and upgraded again in the next module to test the database migrations """ with app.app_context(): flask_migrate.Migrate(app, db) migration_dir = os.path.join(os.path.dirname(__file__), '..', '..', 'migrations') flask_migrate.upgrade(directory=migration_dir) yield with app.app_context(): flask_migrate.downgrade(revision='base', directory=migration_dir)
def create_instance_with_one_type_dataset(name, backup_dir): with app.app_context(): job_list = [] for index, dataset_type in enumerate(['fusio', 'osm', 'poi']): job_list.append( create_job(datetime.utcnow() - timedelta(days=index), dataset_type, backup_dir)) create_instance(name, job_list)
def create_instance_with_same_type_datasets(name, backup_dir): with app.app_context(): job_list = [] for index in range(3): job_list.append( create_job(datetime.utcnow() - timedelta(days=index), 'poi', backup_dir)) create_instance(name, job_list)
def create_user(): with app.app_context(): user = models.User('test', '*****@*****.**') user.end_point = models.EndPoint.get_default() user.billing_plan = models.BillingPlan.get_default(user.end_point) models.db.session.add(user) models.db.session.commit() return user.id
def test_mimir_family_type_poi(create_instance, enable_mimir2): with app.app_context(): instance = models.Instance.query.get(create_instance) actions = [] actions.extend(tasks.send_to_mimir(instance, 'test.poi', 'poi')) assert actions[0].task == 'tyr.binarisation.poi2mimir' assert actions[0].args[1] == 'test.poi' assert len(actions) == 2 # poi2mimir + finish
def create_autocomplete_parameters(): with app.app_context(): autocomp = models.AutocompleteParameter(name='autocomp_name') models.db.session.add(autocomp) models.db.session.commit() yield autocomp.name models.db.session.delete(autocomp)
def create_instance(name): job_list = [] with app.app_context(): job_list = create_job_with_all_states() instance = models.Instance(name=name, jobs=job_list) models.db.session.add(instance) models.db.session.commit() return instance.name
def test_post_pbf_autocomplete_instance_not_exist(create_instance_fr, enable_mimir2): with app.app_context(): filename = 'empty_pbf.osm.pbf' path = os.path.join( os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'tests/fixtures/', filename ) with open(path, 'rb'): response, status = api_post('/v0/autocomplete_parameters/bob/update_data', check=False) assert status == 404
def create_instance_fr(): with app.app_context(): navitia_instance = models.Instance('fr') autocomplete_instance = models.AutocompleteParameter( name='fr', street='OSM', address='BANO', poi='OSM', admin='OSM', admin_level=[8] ) models.db.session.add(navitia_instance) models.db.session.add(autocomplete_instance) models.db.session.commit()
def create_poi_type_json_obj(poi_types_json): with app.app_context(): instance = models.Instance('fr') models.db.session.add(instance) poi_type_json_obj = models.PoiTypeJson(json.dumps(poi_types_json), instance) models.db.session.add(poi_type_json_obj) models.db.session.commit() return poi_type_json_obj
def create_instance(): with app.app_context(): instance = models.Instance('test_instance') models.db.session.add(instance) models.db.session.commit() yield instance.id models.db.session.delete(instance)
def create_user(geojson_feature_collection): with app.app_context(): user = models.User('test', '*****@*****.**') user.end_point = models.EndPoint.get_default() user.billing_plan = models.BillingPlan.get_default(user.end_point) user.shape = json.dumps(geojson_feature_collection) models.db.session.add(user) models.db.session.commit() return user.id
def create_instance(): job_list = [] with app.app_context(): job_list.append(create_job_with_poi_only()) instance = models.Instance(name='fr_instance', jobs=job_list) models.db.session.add(instance) models.db.session.commit() return instance.name
def create_api(): with app.app_context(): api = models.Api('test_api') models.db.session.add(api) models.db.session.commit() yield api.id models.db.session.delete(api)
def test_post_zip_file_on_job_with_wrong_extension(create_instance_fr): with app.app_context(): filename = 'empty_file_without_extension' path = os.path.join( os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'tests/fixtures/', filename) with open(path, 'rb') as f: files = {'file': (f, filename)} resp, status = api_post('/v0/jobs/fr', data=files, check=False) assert status == 400
def clean_db(): """ before all tests the database is cleared """ with app.app_context(): tables = ['"{}"'.format(table) for table in ['user', 'instance', 'authorization', 'key', 'data_set', 'job', 'poi_type_json', 'autocomplete_parameter']] db.session.execute('TRUNCATE {} CASCADE;'.format(', '.join(tables))) db.session.commit()
def test_post_zip_file_on_job_should_succeed(create_instance_fr): with app.app_context(): filename = 'fusio.zip' path = os.path.join( os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'tests/fixtures/', filename) with open(path, 'rb') as f: files = {'file': (f, filename)} resp, status = api_post('/v0/jobs/fr', data=files, check=False) assert status == 200
def default_ridesharing_service_config(): with app.app_context(): ridesharing0 = models.RidesharingService('TestCovoiturage0') ridesharing0.klass = 'jormungandr.scenarios.ridesharing.instant_system.InstantSystem' ridesharing0.args = { 'service_url': 'http://bob.com', "rating_scale_min": 0, "crowfly_radius": 500, "rating_scale_max": 5, "api_key": "aaa", "network": "Test Covoiturage", "feed_publisher": { "url": "url", "id": "test-coverage", "license": "Private", "name": "Test ABC - TEST", }, } ridesharing1 = models.RidesharingService('TestCovoiturage1') ridesharing1.klass = 'jormungandr.scenarios.ridesharing.instant_system.InstantSystem' ridesharing1.args = { 'service_url': 'http://bob.com', "rating_scale_min": 0, "crowfly_radius": 500, "rating_scale_max": 5, "api_key": "aaa", "network": "Test Covoiturage", "feed_publisher": { "url": "url", "id": "test-coverage", "license": "Private", "name": "Test ABC - TEST", }, } models.db.session.add(ridesharing0) models.db.session.add(ridesharing1) models.db.session.commit() # refresh and detach the objets from the db before returning them models.db.session.refresh(ridesharing0) models.db.session.refresh(ridesharing1) models.db.session.expunge(ridesharing0) models.db.session.expunge(ridesharing1) # Create instance instance = models.Instance('default') models.db.session.add(instance) models.db.session.commit() models.db.session.refresh(instance) models.db.session.expunge(instance) yield instance, ridesharing0, ridesharing1 models.db.session.delete(instance) models.db.session.delete(ridesharing0) models.db.session.delete(ridesharing1) models.db.session.commit()