def test_add_vocab(): ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') payload = { 'name': 'categoryTab', 'tags': ['forestChange', 'treeCoverChange'], 'application': 'gfw' } ds.add_vocabulary(vocab_params=payload, token=API_TOKEN) updated_ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') assert len(updated_ds.vocabulary) > 0
def test_add_widget(): d = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') w = d.add_widget(widget_params={ 'name': 'Template Widget', 'widgetConfig': {'key': 'val'}, 'application': ['gfw'] }, token=API_TOKEN) assert type(w.id) == str assert type(w.attributes) == dict assert len(w.attributes) > 0
def test_add_meta(): ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') payload = { 'application': 'gfw', 'info': {'citation': 'Example citation', 'color': '#fe6598', 'description': 'This is an example dataset.', 'isLossLayer': True, 'name': 'Template Layer'}, 'language': 'en' } ds.add_metadata(meta_params=payload, token=API_TOKEN) updated_ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') assert len(updated_ds.metadata) > 0
def test_create_new_dataset(): atts = { "name": "NEW Template Dataset", "application": ["gfw"], "connectorType": "rest", "provider": "cartodb", "connectorUrl": "https://wri-01.carto.com/tables/gfw_land_rights/public_map", "tableName": "gfw_land_rights", "published": False, "env": "staging", } new = Dataset(attributes=atts, token=API_TOKEN) assert new.attributes['name'] == 'NEW Template Dataset' assert new.delete(token=API_TOKEN, force=True) == None
def test_merge_widget(): staging_widget = Widget(id_hash='66de77eb-dee3-4c56-9ad4-cf68d8b107fd', server='https://staging-api.globalforestwatch.org') staging_widget.update(update_params={ 'name': 'Template Widget Staging', 'widgetConfig': {} }, token=API_TOKEN) production_widget = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7').widget[0] whitelist = ['name', 'widgetConfig'] merged_widget = production_widget.merge(token=API_TOKEN, target_widget=None, target_widget_id=staging_widget.id, target_server='https://staging-api.globalforestwatch.org', key_whitelist=whitelist, force=True) merged_atts = {k:v for k,v in merged_widget.attributes.items() if k in whitelist} production_atts = {k:v for k,v in production_widget.attributes.items() if k in whitelist} assert merged_atts == production_atts
def test_update_meta(): ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') m = ds.metadata[0] assert type(m.id) == str payload = { 'application': 'gfw', 'info': {'citation': 'TEST', 'color': '#fe6598', 'description': 'TEST', 'isLossLayer': False, 'name': 'Template Layer'}, 'language': 'en'} m.update(update_params=payload, token=API_TOKEN) ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') updated_m = ds.metadata[0] assert updated_m.attributes['info']['description'] == 'TEST' assert updated_m.attributes['info']['isLossLayer'] == False
def test_update_vocab(): ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') v = ds.vocabulary[0] assert type(v.id) == str payload = { 'name': 'categoryTab', 'tags': ['forestChange', 'treeCoverChange'] } updated_v = v.update(update_params=payload, token=API_TOKEN) assert updated_v[0].attributes['name'] == 'categoryTab' assert updated_v[0].attributes['tags'] == ['forestChange', 'treeCoverChange']
def test_update_widget(): ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') w = ds.widget[0] assert type(w.id) == str payload = { 'name': 'Widget UPDATED', 'widgetConfig': {'updated': True} } updated_w = w.update(update_params=payload, token=API_TOKEN) assert updated_w.attributes['name'] == 'Widget UPDATED' assert updated_w.attributes['widgetConfig'].get('updated', None) == True
def test_clone_and_delete_dataset(): d = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') cloned = d.clone(token=API_TOKEN, env='production', dataset_params={ 'name': 'Template Dataset CLONED', 'published': False }, clone_children=True) assert cloned.attributes['name'] == f'Template Dataset CLONED' assert cloned.id is not '7cf3fab2-3fbe-4980-b572-712207b2c8c7' vocabulary = cloned.vocabulary metadata = cloned.metadata widget = cloned.widget layer = cloned.layers assert len(vocabulary) > 0 assert len(metadata) > 0 assert len(widget) > 0 assert len(layer) > 0 assert vocabulary[0].delete(token=API_TOKEN) == None assert metadata[0].delete(token=API_TOKEN) == None assert widget[0].delete(token=API_TOKEN) == None assert layer[0].delete(token=API_TOKEN, force=True) == None assert cloned.delete(token=API_TOKEN, force=True) == None
def test_update_dataset(): ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') updated = ds.update(token=API_TOKEN, update_params={'name': f'Template Dataset UPDATED'}) assert updated.attributes['name'] == f'Template Dataset UPDATED' updated = ds.update(token=API_TOKEN, update_params={'name': f'Template Dataset'}) assert updated.attributes['name'] == 'Template Dataset'
def test_dataset_load(): ds = Dataset(id_hash='897ecc76-2308-4c51-aeb3-495de0bdca79') load_path = f'./tests' loaded = ds.load(path=load_path, check=True) assert loaded.id == '897ecc76-2308-4c51-aeb3-495de0bdca79' os.remove(load_path+f"/{ds.id}.json")
def test_dataset_save(): ds = Dataset(id_hash='897ecc76-2308-4c51-aeb3-495de0bdca79') save_path = './tests' ds.save(path=save_path) assert os.path.exists(save_path+f"/{ds.id}.json") == True
def test_access_widget(): ds = Dataset(id_hash='dcd1e9c7-1370-404e-8816-eaa51d4b1a39') assert type(ds.widget) == list assert len(ds.widget) > 0
def test_access_meta_attributes(): ds = Dataset('7cf3fab2-3fbe-4980-b572-712207b2c8c7') meta = ds.metadata[0].attributes assert type(meta) is dict
def test_access_meta(): ds = Dataset(id_hash='bb1dced4-3ae8-4908-9f36-6514ae69713f') assert type(ds.metadata) == list assert len(ds.metadata) > 0
def test_access_vocab(): ds = Dataset(id_hash='bb1dced4-3ae8-4908-9f36-6514ae69713f') assert type(ds.vocabulary) == list assert len(ds.vocabulary) > 0
def test_queries_on_datasets(): ds = Dataset(id_hash='bd5d7924-611e-4302-9185-8054acb0b44b') df = ds.query() assert len(df) > 0 df = ds.query('SELECT fid, ST_ASGEOJSON(the_geom_webmercator) FROM data LIMIT 5') assert len(df) == 5
def test_create_dataset(): ds = Dataset(id_hash='bb1dced4-3ae8-4908-9f36-6514ae69713f') assert ds.id == 'bb1dced4-3ae8-4908-9f36-6514ae69713f' assert type(ds.attributes) == dict assert len(ds.attributes) > 0
def test_dataset_intersect(): ds = Dataset(id_hash='fee5fc38-7a62-49b8-8874-dfa31cbb1ef6') g = Geometry(parameters={'iso': 'BRA', 'adm1': 1, 'adm2': 1}) i = ds.intersect(g) assert i == None
def test_delete_widget(): ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') w = ds.widget[0] assert type(w.id) == str deleted_widget= w.delete(token=API_TOKEN) assert deleted_widget == None
def test_delete_meta(): ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') m = ds.metadata[0] assert type(m.id) == str deleted_meta = m.delete(token=API_TOKEN) assert deleted_meta == None
def test_delete_vocab(): ds = Dataset(id_hash='7cf3fab2-3fbe-4980-b572-712207b2c8c7') v = ds.vocabulary[0] assert type(v.id) == str deleted_vocab = v.delete(token=API_TOKEN) assert deleted_vocab == None