def test_load_sqlite(self): count = Collection.all().count() assert 0 == count, count db_uri = 'sqlite:///' + self.get_fixture_path('kek.sqlite') os.environ['ALEPH_TEST_BULK_DATABASE_URI'] = db_uri yml_path = self.get_fixture_path('kek.yml') config = load_config_file(yml_path) bulk_load(config) count = Collection.all().count() assert 1 == count, count coll = Collection.by_foreign_id('kek') assert coll.category == 'scrape', coll.category _, headers = self.login(is_admin=True) flush_index() res = self.client.get('/api/2/entities?q=friede+springer', headers=headers) assert res.status_code == 200, res assert res.json['total'] == 1, res.json res0 = res.json['results'][0] assert res0['id'] == '9895ccc1b3d6444ccc6371ae239a7d55c748a714', res0
def get_schemata(): app = current_app._get_current_object() if not hasattr(app, '_schemata'): schema_yaml = app.config.get('SCHEMA_YAML') log.info("Loading schema from: %s", schema_yaml) from aleph.schema import SchemaSet app._schemata = SchemaSet(load_config_file(schema_yaml)) return app._schemata
def get_datasets(): app = current_app._get_current_object() if not hasattr(app, '_datasets'): datasets_yaml = app.config.get('DATASETS_YAML') if datasets_yaml is not None: log.info("Loading datasets from: %s", datasets_yaml) datasets = load_config_file(datasets_yaml) else: log.warn("No datasets.yaml defined.") datasets = {} from aleph.datasets import DatasetSet app._datasets = DatasetSet(datasets) return app._datasets
def test_entity_references(self): db_uri = 'file://' + self.get_fixture_path('experts.csv') os.environ['ALEPH_TEST_BULK_CSV'] = db_uri yml_path = self.get_fixture_path('experts.yml') config = load_config_file(yml_path) bulk_load(config) flush_index() res = self.client.get('/api/2/entities?q=Climate') assert res.json['total'] == 1, res.json grp_id = res.json['results'][0]['id'] res = self.client.get('/api/2/entities/%s/references' % grp_id) results = res.json['results'] assert len(results) == 1, results assert results[0]['count'] == 3, results
def test_load_sqlite(self): count = Collection.all().count() assert 0 == count, count yml_path = self.get_fixture_path('kek.yml') config = load_config_file(yml_path) bulk_load(config) flush_index() count = Collection.all().count() assert 1 == count, count res = self.client.get('/api/2/entities?q=friede+springer') assert res.status_code == 200, res assert res.json['total'] == 1, res.json res0 = res.json['results'][0] assert res0['id'] == '9895ccc1b3d6444ccc6371ae239a7d55c748a714', res0
def test_load_csv(self): count = Collection.all().count() assert 0 == count, count db_uri = 'file://' + self.get_fixture_path('experts.csv') os.environ['ALEPH_TEST_BULK_CSV'] = db_uri yml_path = self.get_fixture_path('experts.yml') config = load_config_file(yml_path) bulk_load(config) flush_index() count = Collection.all().count() assert 1 == count, count res = self.client.get('/api/2/entities?q=Greenfield') assert res.status_code == 200, res assert res.json['total'] == 1, res.json res0 = res.json['results'][0] assert res0['id'] == '6897ef1acd633c229d812c1c495f030d212c9081', res0
def bulkload(file_name): """Index all the entities in a given dataset.""" log.info("Loading bulk data from: %s", file_name) config = load_config_file(file_name) bulk_load(config)