def test_taxo_query(config, database, fastapi, caplog):
    """ This depends on the DB which has a subset of the production one """
    caplog.set_level(logging.ERROR)
    from tests.test_import import test_import
    prj_id = test_import(config, database, caplog, "Test taxo query")

    url = TAXA_QUERY_URL.format(taxon_id=849)
    # Unauthenticated call
    rsp = fastapi.get(url)
    # Security barrier
    assert rsp.status_code == status.HTTP_200_OK

    url = TAXA_QUERY_URL.format(taxon_id=849)
    # Unauthenticated call
    rsp = fastapi.get(url)
    assert rsp.json() == {
        'children': [5141],
        'display_name': 'Cyanobacteria<Proteobacteria',
        'id': 849,
        'lineage': ['Cyanobacteria', 'Proteobacteria', 'Bacteria', 'living'],
        'id_lineage': [849, 96, 3, 1],
        'name': 'Cyanobacteria',
        'nb_children_objects': 0,
        'nb_objects': 0
    }
def test_project_stats(config, database, fastapi, caplog):
    caplog.set_level(logging.FATAL)

    # Admin imports the project
    from tests.test_import import test_import, test_import_a_bit_more_skipping
    prj_id = test_import(config, database, caplog, "Stats test project")
    # Add a sample spanning 2 days
    test_import_a_bit_more_skipping(config, database, caplog,
                                    "Stats test project")
    # Taxa & classif statistics
    url = PROJECT_CLASSIF_STATS_URL.format(prj_ids=prj_id)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == 200
    assert rsp.json() == [{
        'nb_dubious':
        0,
        'nb_predicted':
        11,
        'nb_unclassified':
        0,
        'nb_validated':
        0,
        'projid':
        prj_id,
        'used_taxa': [45072, 78418, 84963, 85011, 85012, 85078]
    }]

    # Get free column statistics
    url = PROJECT_FREE_COLS_STATS_URL.format(project_id=prj_id)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == 200
    expected = [
        'Stats test project',
        "OrderedDict([('by', 'n01'), ('width', 'n02'), ('height', 'n03'), ('area', "
        "'n04'), ('mean', 'n05'), ('major', 'n06'), ('minor', 'n07'), ('feret', "
        "'n08'), ('area_exc', 'n09'), ('thickr', 'n10'), ('esd', 'n11'), "
        "('elongation', 'n12'), ('range', 'n13'), ('meanpos', 'n14'), ('centroids', "
        "'n15'), ('cv', 'n16'), ('sr', 'n17'), ('perimareaexc', 'n18'), "
        "('feretareaexc', 'n19'), ('perimferet', 'n20'), ('perimmajor', 'n21'), "
        "('circex', 'n22'), ('cdexc', 'n23'), ('kurt_mean', 'n24'), ('skew_mean', "
        "'n25'), ('convperim_perim', 'n26'), ('convarea_area', 'n27'), "
        "('symetrieh_area', 'n28'), ('symetriev_area', 'n29'), ('nb1_area', 'n30'), "
        "('nb2_area', 'n31'), ('nb3_area', 'n32'), ('nb1_range', 'n33'), "
        "('nb2_range', 'n34'), ('nb3_range', 'n35'), ('median_mean', 'n36'), "
        "('median_mean_range', 'n37'), ('skeleton_area', 'n38'), ('extra', 't01')])",
        ' (0): ',
        'Total: 0 values, dup 0 values',
        'generic_m106_mn01_n1_sml (5): '
        '[9811,10823,#5,u1],[33,65,#5,u1],[47,94,#5,u1],[516,1583,#5,u1],[192.2400054932,241.0399932861,#5,u1],[48.7999992371,70.1999969482,#5,u1],[13.1000003815,41.2000007629,#5,u1],[51.5999984741,102.5999984741,#5,u1],[0,100,#4,u2],[2,2.8239998817,#4,u2],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1],[NaN,NaN,#5,u1]',
        'Total: 190 values, dup 40 values',
        'generic_m106_mn01_n2_sml (3): '
        '[14,10961,#3,u1],[23,56,#3,u1],[26,38,#3,u1],[413,929,#3,u1],[175.5299987793,222.75,#3,u1],[24.7000007629,44.2000007629,#3,u1],[21.2999992371,26.7999992371,#3,u1],[27.2999992371,58.2000007629,#3,u1],[0,9,#2,u2],[1.8600000143,2,#2,u2],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1]',
        'Total: 114 values, dup 40 values',
        'generic_m106_mn01_n3_sml (3): '
        '[14,10961,#3,u1],[23,56,#3,u1],[26,38,#3,u1],[413,929,#3,u1],[175.5299987793,222.75,#3,u1],[24.7000007629,44.2000007629,#3,u1],[21.2999992371,26.7999992371,#3,u1],[27.2999992371,58.2000007629,#3,u1],[0,9,#2,u2],[1.8600000143,2,#2,u2],[1.86,1.86,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1],[NaN,NaN,#3,u1]',
        'Total: 114 values, dup 40 values',
    ]
    actual = rsp.json()
    assert actual == expected
Beispiel #3
0
def test_purge_partial(config, database, caplog):
    caplog.set_level(logging.ERROR)
    from tests.test_import import test_import
    prj_id = test_import(config, database, caplog, "Test Purge partial")
    # Delete using wrong object IDs
    obj_ids = [500000 + i for i in range(15)]
    r = ObjectManager().delete(current_user_id=ADMIN_USER_ID,
                               object_ids=obj_ids)
    assert r == (0, 0, 0, 0)
    check_project(prj_id)
Beispiel #4
0
def test_clone_project(config, database, fastapi, caplog):
    caplog.set_level(logging.CRITICAL)
    from tests.test_import import test_import
    prj_id = test_import(config, database, caplog, "Clone source")
    caplog.set_level(logging.DEBUG)
    url = "/projects/create"
    # Failing attempt
    response = fastapi.post(url, headers=ADMIN_AUTH, json={"title": "Clone of 1", "clone_of_id": -1})
    assert response.status_code == status.HTTP_404_NOT_FOUND
    # Working attempt
    response = fastapi.post(url, headers=ADMIN_AUTH, json={"title": "Clone of 1", "clone_of_id": prj_id})
    assert response.status_code == status.HTTP_200_OK
    assert int(response.json()) > 0
Beispiel #5
0
def test_purge_plain(config, database, fastapi, caplog):
    caplog.set_level(logging.ERROR)
    from tests.test_import import test_import
    prj_id = test_import(config, database, caplog, "Test Purge")
    # Delete full
    ProjectsService().delete(current_user_id=ADMIN_USER_ID,
                             prj_id=prj_id,
                             only_objects=False)
    # Check it's gone
    with pytest.raises(AssertionError, match="Not found"):
        ProjectsService().delete(current_user_id=ADMIN_USER_ID,
                                 prj_id=prj_id,
                                 only_objects=False)
def test_queries(config, database, fastapi, caplog):
    caplog.set_level(logging.ERROR)

    # Admin imports the project
    from tests.test_import import test_import, test_import_a_bit_more_skipping
    prj_id = test_import(config, database, caplog, "Queries test project")
    # Add a sample spanning 2 days
    test_import_a_bit_more_skipping(config, database, caplog,
                                    "Queries test project")

    ref = [6, 7, 8, 11, 12, 13, 1, 2, 3, 4, 5]
    all = _prj_query(fastapi, CREATOR_AUTH, prj_id, order="depth_min")
    # we must offset expected by first actual objID as they vary, run to run
    min_objid = min(all)
    ref = [r + min_objid - 1 for r in ref]
    assert all == ref

    all = _prj_query(fastapi, CREATOR_AUTH, prj_id, order="-depth_min")
    ref_v = ref[:]
    ref_v.reverse()
    assert all == ref_v

    by_taxo = _prj_query(fastapi, CREATOR_AUTH, prj_id, order="-classifname")

    # TODO: Should be a free column _name_
    by_free_col = _prj_query(fastapi, CREATOR_AUTH, prj_id, order="n01")

    limit_4 = _prj_query(fastapi, CREATOR_AUTH, prj_id, size=4)
    assert len(limit_4) == 4

    limit_4_start_4 = _prj_query(fastapi,
                                 CREATOR_AUTH,
                                 prj_id,
                                 start=4,
                                 size=4)
    assert len(limit_4_start_4) == 4

    assert set(limit_4).isdisjoint(set(limit_4_start_4))
def test_taxotree_query(config, database, fastapi, caplog):
    """ This depends on the DB which has a subset of the production one """
    caplog.set_level(logging.ERROR)
    from tests.test_import import test_import
    prj_id = test_import(config, database, caplog, "Test taxo search")

    url = TAXA_SEARCH_URL.format(project_id=prj_id, query="")
    # Unauthenticated call
    rsp = fastapi.get(url, json={})
    # Security barrier
    assert rsp.status_code == status.HTTP_200_OK

    url = TAXA_SEARCH_URL.format(project_id=prj_id,
                                 query=quote_plus("cyano<living"))
    # Unauthenticated call
    rsp = fastapi.get(url, json={})
    assert rsp.json() == [{
        'id': 233,
        'pr': 0,
        'text': 'Cyanobacteria<Bacteria'
    }, {
        'id': 849,
        'pr': 0,
        'text': 'Cyanobacteria<Proteobacteria'
    }, {
        'id': 2396,
        'pr': 0,
        'text': 'Cyanophora'
    }, {
        'id': 1680,
        'pr': 0,
        'text': 'Cyanophyceae'
    }, {
        'id': 2395,
        'pr': 0,
        'text': 'Cyanoptyche'
    }]
def test_classif(config, database, fastapi, caplog):
    caplog.set_level(logging.ERROR)
    from tests.test_import import test_import
    prj_id = test_import(config, database, caplog, "Test Classify/Validate")

    obj_ids = _prj_query(fastapi, CREATOR_AUTH, prj_id)
    assert len(obj_ids) == 8

    copepod_id = 25828
    entomobryomorpha_id = 25835
    crustacea = 12846
    # See if the taxa we are going to use are OK
    rsp = fastapi.get(
        TAXA_SET_QUERY_URL.format(taxa_ids="%d+%d" %
                                  (copepod_id, entomobryomorpha_id)))
    # Note: There is no real lineage in test DB
    assert rsp.json() == [{
        'children': [84964],
        'display_name':
        'Copepoda',
        'id':
        25828,
        'id_lineage': [25828, 16621, 12846, 11517, 2367, 382, 8, 2, 1],
        'lineage': [
            'Copepoda', 'Maxillopoda', 'Crustacea', 'Arthropoda', 'Metazoa',
            'Holozoa', 'Opisthokonta', 'Eukaryota', 'living'
        ],
        'name':
        'Copepoda',
        'nb_children_objects':
        0,
        'nb_objects':
        0
    }, {
        'children': [],
        'display_name':
        'Entomobryomorpha',
        'id':
        25835,
        'id_lineage': [25835, 16630, 12845, 11517, 2367, 382, 8, 2, 1],
        'lineage': [
            'Entomobryomorpha', 'Collembola', 'Hexapoda', 'Arthropoda',
            'Metazoa', 'Holozoa', 'Opisthokonta', 'Eukaryota', 'living'
        ],
        'name':
        'Entomobryomorpha',
        'nb_children_objects':
        0,
        'nb_objects':
        0
    }]

    # Initial stats just after load
    def get_stats():
        stats_url = PROJECT_CLASSIF_STATS_URL.format(prj_ids="%s" % prj_id)
        stats_rsp = fastapi.get(stats_url, headers=ADMIN_AUTH)
        assert stats_rsp.status_code == status.HTTP_200_OK
        return stats_rsp.json()[0]

    def get_object_set_stats():
        stats_url = OBJECT_SET_SUMMARY_URL.format(project_id=prj_id)
        filters = ProjectFilters()
        stats_rsp = fastapi.post(stats_url, headers=ADMIN_AUTH, json=filters)
        assert stats_rsp.status_code == status.HTTP_200_OK
        return stats_rsp.json()

    # All is predicted, see source archive
    assert get_stats() == {
        'nb_dubious': 0,
        'nb_predicted': 8,
        'nb_unclassified': 0,
        'nb_validated': 0,
        'projid': prj_id,
        'used_taxa': [45072, 78418, 84963, 85011, 85012, 85078]
    }

    # Try a revert on a fresh project
    url = OBJECT_SET_REVERT_URL.format(project_id=prj_id,
                                       dry_run=True,
                                       tgt_usr="******" +
                                       str(ORDINARY_USER2_USER_ID))
    rsp = fastapi.post(url, headers=CREATOR_AUTH, json={})
    # Security barrier
    assert rsp.status_code == status.HTTP_403_FORBIDDEN

    # Working revert, erase all from import - dry first
    url = OBJECT_SET_REVERT_URL.format(project_id=prj_id,
                                       dry_run=True,
                                       tgt_usr="")
    rsp = fastapi.post(url, headers=ADMIN_AUTH, json={})
    assert rsp.status_code == status.HTTP_200_OK
    stats = rsp.json()
    assert len(stats['classif_info']) == 6
    assert len(stats['last_entries']) == 8
    # Working revert, erase all from import
    url = OBJECT_SET_REVERT_URL.format(project_id=prj_id,
                                       dry_run=False,
                                       tgt_usr="")
    rsp = fastapi.post(url, headers=ADMIN_AUTH, json={})
    assert rsp.status_code == status.HTTP_200_OK
    stats = rsp.json()
    # assert stats == {'classif_info': {}, 'last_entries': []}

    # Same stats
    assert get_stats() == {
        'nb_dubious': 0,
        'nb_predicted': 0,
        'nb_unclassified': 8,
        'nb_validated': 0,
        'projid': prj_id,
        'used_taxa': [-1]
    }

    obj_stats = get_object_set_stats()
    assert obj_stats == {
        'dubious_objects': 0,
        'predicted_objects': 0,
        'total_objects': 8,
        'validated_objects': 0
    }

    # Reset all to predicted
    url = OBJECT_SET_RESET_PREDICTED_URL.format(project_id=prj_id)
    rsp = fastapi.post(url, headers=ADMIN_AUTH, json={})
    assert rsp.status_code == status.HTTP_200_OK

    # Super ML result, 4 first objects are crustacea
    def classify_auto_all(classif_id):
        url = OBJECT_SET_CLASSIFY_AUTO_URL
        classifications = [classif_id for _obj in obj_ids[:4]]
        scores = [0.52 for _obj in obj_ids[:4]]
        rsp = fastapi.post(url,
                           headers=ADMIN_AUTH,
                           json={
                               "target_ids": obj_ids[:4],
                               "classifications": classifications,
                               "scores": scores,
                               "keep_log": True
                           })
        assert rsp.status_code == status.HTTP_200_OK

    classify_auto_all(crustacea)

    assert get_stats() == {
        'nb_dubious': 0,
        'nb_predicted': 4,
        'nb_unclassified': 4,
        'nb_validated': 0,
        'projid': prj_id,
        'used_taxa': [-1, crustacea]
    }

    # Admin (me!) thinks that all is a copepod :)
    def classify_all(classif_id):
        url = OBJECT_SET_CLASSIFY_URL
        classifications = [classif_id for _obj in obj_ids]
        rsp = fastapi.post(url,
                           headers=ADMIN_AUTH,
                           json={
                               "target_ids": obj_ids,
                               "classifications": classifications,
                               "wanted_qualification": "V"
                           })
        assert rsp.status_code == status.HTTP_200_OK

    classify_all(copepod_id)

    # Same stats
    assert get_stats() == {
        'nb_dubious': 0,
        'nb_predicted': 0,
        'nb_unclassified': 0,
        'nb_validated': 8,
        'projid': prj_id,
        'used_taxa': [25828]
    }  # No more Unclassified and Copepod is in +

    # No history yet as the object was just created
    def classif_history():
        url = OBJECT_HISTORY_QUERY_URL.format(object_id=obj_ids[0])
        response = fastapi.get(url, headers=ADMIN_AUTH)
        assert response.status_code == status.HTTP_200_OK
        return response.json()

    classif = classif_history()
    assert classif is not None
    assert len(classif) == 0

    # Not a copepod :(
    classify_all(entomobryomorpha_id)

    def classify_all_no_change(classif_id):
        url = OBJECT_SET_CLASSIFY_URL
        classifications = [-1 for _obj in obj_ids]
        rsp = fastapi.post(url,
                           headers=ADMIN_AUTH,
                           json={
                               "target_ids": obj_ids,
                               "classifications": classifications,
                               "wanted_qualification": "V"
                           })
        assert rsp.status_code == status.HTTP_200_OK

    classify_all_no_change(entomobryomorpha_id)

    classif2 = classif_history()
    assert classif2 is not None
    # Date is not predictable
    classif2[0]['classif_date'] = 'hopefully just now'
    # nor object_id
    classif2[0]['objid'] = 1
    assert classif2 == [{
        'classif_date': 'hopefully just now',
        'classif_id': 25828,
        'classif_qual': 'V',
        'classif_score': None,
        'classif_type': 'M',
        'classif_who': 1,
        'objid': 1,
        'taxon_name': 'Copepoda',
        'user_name': 'Application Administrator'
    }]

    # There should be 0 predicted
    obj_ids = _prj_query(fastapi, CREATOR_AUTH, prj_id, statusfilter='P')
    assert len(obj_ids) == 0
    # There should be 8 validated
    obj_ids = _prj_query(fastapi, CREATOR_AUTH, prj_id, statusfilter='V')
    assert len(obj_ids) == 8

    url = PROJECT_CLASSIF_STATS_URL.format(prj_ids="%s" % prj_id)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    assert rsp.json() == [
        {
            'nb_dubious': 0,
            'nb_predicted': 0,
            'nb_unclassified': 0,
            'nb_validated': 8,
            'projid': prj_id,
            'used_taxa': [25835]
        }
    ]  # <- copepod is gone, unclassified as well, replaced with entomobryomorpha

    # Delete some object via API, why not?
    rsp = fastapi.delete(OBJECT_SET_DELETE_URL,
                         headers=ADMIN_AUTH,
                         json=obj_ids[:4])
    assert rsp.status_code == status.HTTP_200_OK

    # Ensure they are gone
    rsp = fastapi.post(OBJECT_SET_PARENTS_URL,
                       headers=ADMIN_AUTH,
                       json=obj_ids)
    assert rsp.status_code == status.HTTP_200_OK
    resp = rsp.json()
    assert len(resp['acquisition_ids']) == 4
    for prj in resp['project_ids']:
        assert prj == prj_id
    assert resp['total_ids'] == 4
def test_create_collection(config, database, fastapi, caplog):
    caplog.set_level(logging.FATAL)

    # Admin imports the project
    from tests.test_import import test_import
    prj_id = test_import(config, database, caplog, "Collection project 1")

    # And creates a collection with it
    url = COLLECTION_CREATE_URL
    rsp = fastapi.post(url,
                       headers=ADMIN_AUTH,
                       json={
                           "title": "Test collection",
                           "project_ids": [prj_id]
                       })
    assert rsp.status_code == status.HTTP_200_OK
    coll_id = rsp.json()

    # Faulty re-read
    url = COLLECTION_QUERY_URL.format(collection_id=-1)
    rsp = fastapi.get(url)
    assert rsp.status_code == status.HTTP_403_FORBIDDEN

    # Re-read
    url = COLLECTION_QUERY_URL.format(collection_id=coll_id)
    rsp = fastapi.get(url)
    # No admin, error
    assert rsp.status_code == status.HTTP_403_FORBIDDEN
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    the_coll = rsp.json()
    assert the_coll == {
        'abstract': None,
        'associate_organisations': [],
        'associate_users': [],
        'citation': None,
        'contact_user': None,
        'creator_organisations': [],
        'creator_users': [],
        'description': None,
        'id': coll_id,
        'license': '',
        'project_ids': [prj_id],
        'provider_user': None,
        'title': 'Test collection'
    }

    # Update the abstract
    url = COLLECTION_UPDATE_URL.format(collection_id=coll_id)
    the_coll['abstract'] = """
    A bit less abstract...
    """
    rsp = fastapi.put(url, headers=ADMIN_AUTH, json=the_coll)
    assert rsp.status_code == status.HTTP_200_OK

    # Fail updating the project list
    url = COLLECTION_UPDATE_URL.format(collection_id=coll_id)
    the_coll["project_ids"] = [1, 5, 6]
    with pytest.raises(Exception):
        rsp = fastapi.put(url, headers=ADMIN_AUTH, json=the_coll)

    # Search for it
    url = COLLECTION_SEARCH_URL.format(title="%coll%")
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    assert rsp.json() == [{
        'abstract': """
    A bit less abstract...
    """,
        'associate_organisations': [],
        'associate_users': [],
        'citation': None,
        'contact_user': None,
        'creator_organisations': [],
        'creator_users': [],
        'description': None,
        'id': coll_id,
        'license': '',
        'project_ids': [prj_id],
        'provider_user': None,
        'title': 'Test collection'
    }]

    # Empty search test
    url = COLLECTION_SEARCH_URL.format(title="coll%")
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    assert rsp.json() == []

    # Delete the collection
    url = COLLECTION_DELETE_URL.format(collection_id=coll_id)
    rsp = fastapi.delete(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK

    # Ensure it's gone
    url = COLLECTION_QUERY_URL.format(collection_id=coll_id)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_404_NOT_FOUND
Beispiel #10
0
def test_emodnet_export(config, database, fastapi, caplog):
    caplog.set_level(logging.FATAL)

    # Admin imports the project
    from tests.test_import import test_import, test_import_a_bit_more_skipping
    prj_id = test_import(config, database, caplog, "EMODNET project")
    # Add a sample spanning 2 days
    test_import_a_bit_more_skipping(config, database, caplog,
                                    "EMODNET project")

    # Get the project for update
    url = PROJECT_QUERY_URL.format(project_id=prj_id, manage=True)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    prj_json = rsp.json()

    coll_title = "EMODNET test collection"
    # Create a minimal collection with only this project
    url = COLLECTION_CREATE_URL
    rsp = fastapi.post(url,
                       headers=ADMIN_AUTH,
                       json={
                           "title": coll_title,
                           "project_ids": [prj_id]
                       })
    assert rsp.status_code == status.HTTP_200_OK
    coll_id = rsp.json()

    caplog.set_level(logging.DEBUG)

    # Admin exports it
    # First attempt with LOTS of missing data
    url = COLLECTION_EXPORT_EMODNET_URL.format(collection_id=coll_id,
                                               dry=False,
                                               zeroes=True,
                                               comp=True,
                                               morph=True)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    job_id = rsp.json()["job_id"]
    job = wait_for_stable(job_id)
    api_check_job_failed(fastapi, job_id, '5 error(s) during run')
    # TODO: Errors text
    # assert rsp.json()["errors"] == ['No valid data creator (user or organisation) found for EML metadata.',
    #                                 'No valid contact user found for EML metadata.',
    #                                 "No valid metadata provider user found for EML metadata.",
    #                                 "Collection 'abstract' field is empty",
    #                                 "Collection license should be one of [<LicenseEnum.CC0: 'CC0 1.0'>, "
    #                                 "<LicenseEnum.CC_BY: 'CC BY 4.0'>, <LicenseEnum.CC_BY_NC: 'CC BY-NC 4.0'>] to be "
    #                                 "accepted, not ."]
    # assert rsp.json()["warnings"] == []

    # Validate everything, otherwise no export.
    obj_ids = _prj_query(fastapi, CREATOR_AUTH, prj_id)
    assert len(obj_ids) == 11
    url = OBJECT_SET_CLASSIFY_URL
    classifications = [-1 for _obj in obj_ids]  # Keep current
    rsp = fastapi.post(url,
                       headers=ADMIN_AUTH,
                       json={
                           "target_ids": obj_ids,
                           "classifications": classifications,
                           "wanted_qualification": "V"
                       })
    assert rsp.status_code == status.HTTP_200_OK

    # Update underlying project license
    url = PROJECT_UPDATE_URL.format(project_id=prj_id)
    prj_json["license"] = "CC BY 4.0"
    # And give a contact who is now mandatory
    prj_json["contact"] = prj_json["managers"][0]
    rsp = fastapi.put(url, headers=ADMIN_AUTH, json=prj_json)
    assert rsp.status_code == status.HTTP_200_OK

    add_concentration_data(fastapi, prj_id)

    # Update the collection to fill in missing data
    url = COLLECTION_QUERY_URL.format(collection_id=coll_id)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    the_coll = rsp.json()
    url = COLLECTION_UPDATE_URL.format(collection_id=coll_id)
    the_coll['abstract'] = """
This series is part of the long term planktonic monitoring of
    # Villefranche-sur-mer, which is one of the oldest and richest in the world.
    # The data collection and processing has been funded by several projects
    # over its lifetime. It is currently supported directly by the Institut de la Mer
    # de Villefranche (IMEV), as part of its long term monitoring effort.
    """
    the_coll[
        'license'] = "CC BY 4.0"  # Would do nothing as the license comes from the underlying project
    user_doing_all = {
        'id': REAL_USER_ID,
        # TODO: below is redundant with ID and ignored, but fails validation (http 422) if not set
        'email': 'creator',
        'name': 'User Creating Projects'
    }
    the_coll['creator_users'] = [user_doing_all]
    the_coll['contact_user'] = user_doing_all
    the_coll['provider_user'] = user_doing_all
    rsp = fastapi.put(url, headers=ADMIN_AUTH, json=the_coll)
    assert rsp.status_code == status.HTTP_200_OK

    url = COLLECTION_EXPORT_EMODNET_URL.format(collection_id=coll_id,
                                               dry=False,
                                               zeroes=False,
                                               comp=True,
                                               morph=True)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    job_id = rsp.json()["job_id"]
    job = wait_for_stable(job_id)
    api_check_job_ok(fastapi, job_id)
    # warns = rsp.json()["warnings"]
    # # assert warns == []
    # assert rsp.json()["errors"] == []
    # job_id = rsp.json()["job_id"]

    # Download the result zip
    url = JOB_DOWNLOAD_URL.format(job_id=job_id)
    # Ensure it's not public
    rsp = fastapi.get(url)
    assert rsp.status_code == status.HTTP_403_FORBIDDEN
    # But the creator can get it
    # rsp = fastapi.get(url, headers=REAL_USER_AUTH)
    # assert rsp.status_code == status.HTTP_200_OK

    # Admin can get it
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    set_dates_in_ref(ref_zip)
    unzip_and_check(rsp.content, ref_zip)

    url_with_0s = COLLECTION_EXPORT_EMODNET_URL.format(collection_id=coll_id,
                                                       dry=False,
                                                       zeroes=True,
                                                       comp=True,
                                                       morph=True)
    rsp = fastapi.get(url_with_0s, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    job_id = rsp.json()["job_id"]
    job = wait_for_stable(job_id)
    api_check_job_ok(fastapi, job_id)
    dl_url = JOB_DOWNLOAD_URL.format(job_id=job_id)
    rsp = fastapi.get(dl_url, headers=ADMIN_AUTH)
    set_dates_in_ref(with_zeroes_zip)
    unzip_and_check(rsp.content, with_zeroes_zip)

    url_raw_data = COLLECTION_EXPORT_EMODNET_URL.format(collection_id=coll_id,
                                                        dry=False,
                                                        zeroes=False,
                                                        comp=False,
                                                        morph=True)
    rsp = fastapi.get(url_raw_data, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    job_id = rsp.json()["job_id"]
    job = wait_for_stable(job_id)
    api_check_job_ok(fastapi, job_id)
    dl_url = JOB_DOWNLOAD_URL.format(job_id=job_id)
    rsp = fastapi.get(dl_url, headers=ADMIN_AUTH)
    set_dates_in_ref(no_computations_zip)
    unzip_and_check(rsp.content, no_computations_zip)

    url_query_back = COLLECTION_QUERY_BY_TITLE_URL.format(title=coll_title)
    rsp = fastapi.get(url_query_back)
    assert rsp.status_code == status.HTTP_200_OK
    coll_desc = rsp.json()
    assert coll_desc['title'] == coll_title
def test_emodnet_export(config, database, fastapi, caplog):
    caplog.set_level(logging.FATAL)

    # Admin imports the project
    from tests.test_import import test_import, test_import_a_bit_more_skipping
    prj_id = test_import(config, database, caplog, "EMODNET project")
    # Add a sample spanning 2 days
    test_import_a_bit_more_skipping(config, database, caplog,
                                    "EMODNET project")

    # Get the project for update
    url = PROJECT_QUERY_URL.format(project_id=prj_id, manage=True)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    prj_json = rsp.json()

    # Create a minimal collection with only this project
    url = COLLECTION_CREATE_URL
    rsp = fastapi.post(url,
                       headers=ADMIN_AUTH,
                       json={
                           "title": "EMODNET test collection",
                           "project_ids": [prj_id]
                       })
    assert rsp.status_code == status.HTTP_200_OK
    coll_id = rsp.json()

    caplog.set_level(logging.DEBUG)

    # Admin exports it
    # First attempt with LOTS of missing data
    url = COLLECTION_EXPORT_EMODNET_URL.format(collection_id=coll_id)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    assert rsp.json()["errors"] == [
        'No valid data creator (user or organisation) found for EML metadata.',
        'No valid contact user found for EML metadata.',
        "No valid metadata provider user found for EML metadata.",
        "Collection 'abstract' field is empty",
        "Collection license should be one of [<LicenseEnum.CC0: 'CC0 1.0'>, "
        "<LicenseEnum.CC_BY: 'CC BY 4.0'>, <LicenseEnum.CC_BY_NC: 'CC BY-NC 4.0'>] to be "
        "accepted, not ."
    ]
    assert rsp.json()["warnings"] == []
    task_id = rsp.json()["task_id"]
    assert task_id == 0  # No valid task as there were errors

    # Update underlying project license
    url = PROJECT_UPDATE_URL.format(project_id=prj_id)
    prj_json["license"] = "CC BY 4.0"
    # And give a contact who is now mandatory
    prj_json["contact"] = prj_json["managers"][0]
    rsp = fastapi.put(url, headers=ADMIN_AUTH, json=prj_json)
    assert rsp.status_code == status.HTTP_200_OK

    add_concentration_data(fastapi, prj_id)

    # Update the collection to fill in missing data
    url = COLLECTION_QUERY_URL.format(collection_id=coll_id)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    the_coll = rsp.json()
    url = COLLECTION_UPDATE_URL.format(collection_id=coll_id)
    the_coll['abstract'] = """
This series is part of the long term planktonic monitoring of
    # Villefranche-sur-mer, which is one of the oldest and richest in the world.
    # The data collection and processing has been funded by several projects
    # over its lifetime. It is currently supported directly by the Institut de la Mer
    # de Villefranche (IMEV), as part of its long term monitoring effort.
    """
    the_coll[
        'license'] = "CC BY 4.0"  # Would do nothing as the license comes from the underlying project
    user_doing_all = {
        'id': REAL_USER_ID,
        # TODO: below is redundant with ID, ignored, but fails validation (http 422) if not set
        'email': 'creator',
        'name': 'User Creating Projects'
    }
    the_coll['creator_users'] = [user_doing_all]
    the_coll['contact_user'] = user_doing_all
    the_coll['provider_user'] = user_doing_all
    rsp = fastapi.put(url, headers=ADMIN_AUTH, json=the_coll)
    assert rsp.status_code == status.HTTP_200_OK

    url = COLLECTION_EXPORT_EMODNET_URL.format(collection_id=coll_id)
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK
    warns = rsp.json()["warnings"]
    #assert warns == []
    assert rsp.json()["errors"] == []
    task_id = rsp.json()["task_id"]

    # Download the result zip
    url = TASK_DOWNLOAD_URL.format(task_id=task_id)
    # Ensure it's not public
    rsp = fastapi.get(url)
    assert rsp.status_code == status.HTTP_403_FORBIDDEN
    # But the creator can get it
    # rsp = fastapi.get(url, headers=REAL_USER_AUTH)
    # assert rsp.status_code == status.HTTP_200_OK
    # Admin can get it
    rsp = fastapi.get(url, headers=ADMIN_AUTH)
    assert rsp.status_code == status.HTTP_200_OK