def test_admin_images(config, database, fastapi, caplog): caplog.set_level(logging.ERROR) from tests.test_import import test_import_uvp6 prj_id = test_import_uvp6(config, database, caplog, "Test Project Admin") url = PROJECT_DIGEST_URL.format(project_id=prj_id) # Simple user cannot rsp = fastapi.get(url, headers=USER_AUTH) assert rsp.status_code == status.HTTP_403_FORBIDDEN # Admin can rsp = fastapi.get(url, headers=ADMIN_AUTH) assert rsp.status_code == status.HTTP_200_OK assert rsp.json() == "Digest for 30 images done." # TODO: some common error cases # md5 is persisted rsp = fastapi.get(url, headers=ADMIN_AUTH) assert rsp.status_code == status.HTTP_200_OK assert rsp.json() == "Digest for 0 images done."
def test_subentities(config, database, fastapi, caplog): caplog.set_level(logging.ERROR) from tests.test_import import test_import_uvp6 prj_id = test_import_uvp6(config, database, caplog, "Test Subset Merge") check_project(prj_id) # Pick the first object qry_rsp, _total = ObjectManager().query(ADMIN_USER_ID, prj_id, filters={}) first_obj = qry_rsp[0] first_objid = first_obj[0] # obj id # Wrong ID url = OBJECT_QUERY_URL.format(object_id=-1) response = fastapi.get(url, headers=ADMIN_AUTH) assert response.status_code == status.HTTP_404_NOT_FOUND # OK ID url = OBJECT_QUERY_URL.format(object_id=first_objid) response = fastapi.get(url, headers=ADMIN_AUTH) assert response.status_code == status.HTTP_200_OK obj = response.json() assert obj is not None # Move up in hierarchy sample_id = first_obj[2] # Wrong ID url = SAMPLE_QUERY_URL.format(sample_id=-1) response = fastapi.get(url, headers=ADMIN_AUTH) assert response.status_code == status.HTTP_404_NOT_FOUND # OK ID url = SAMPLE_QUERY_URL.format(sample_id=sample_id) response = fastapi.get(url, headers=ADMIN_AUTH) assert response.status_code == status.HTTP_200_OK sample = response.json() assert sample is not None acquis_id = first_obj[1] # Wrong ID url = ACQUISITION_QUERY_URL.format(acquisition_id=-1) response = fastapi.get(url, headers=ADMIN_AUTH) assert response.status_code == status.HTTP_404_NOT_FOUND # OK ID url = ACQUISITION_QUERY_URL.format(acquisition_id=acquis_id) response = fastapi.get(url, headers=ADMIN_AUTH) assert response.status_code == status.HTTP_200_OK acquisition = response.json() assert acquisition is not None process_id = acquis_id # Wrong ID url = PROCESS_QUERY_URL.format(process_id=-1) response = fastapi.get(url, headers=ADMIN_AUTH) assert response.status_code == status.HTTP_404_NOT_FOUND # OK ID url = PROCESS_QUERY_URL.format(process_id=process_id) response = fastapi.get(url, headers=ADMIN_AUTH) assert response.status_code == status.HTTP_200_OK process = response.json() assert process is not None # Wrong ID url = OBJECT_HISTORY_QUERY_URL.format(object_id=-1) response = fastapi.get(url) # TODO: A 0-len history should be a not found ? # assert response.status_code == status.HTTP_404_NOT_FOUND # OK ID url = OBJECT_HISTORY_QUERY_URL.format(object_id=first_objid) response = fastapi.get( url ) # The entry point is public and project as well, no need for: , headers=ADMIN_AUTH) assert response.status_code == status.HTTP_200_OK classif = response.json() assert classif is not None assert len(classif) == 0
def test_update_prj(config, database, fastapi, caplog): caplog.set_level(logging.ERROR) from tests.test_import import test_import_uvp6 prj_id = test_import_uvp6(config, database, caplog, "Test Project Updates") # Do like in legacy app, i.e. fetch/modify/resend url = PROJECT_QUERY_URL.format(project_id=prj_id, manage=True) rsp = fastapi.get(url, headers=ADMIN_AUTH) ref_json = {'acquisition_free_cols': {'aa': 't03', 'exp': 't04', 'gain': 't06', 'pixel': 't05', 'ratio': 't10', 'smbase': 't08', 'smzoo': 't09', 'sn': 't01', 'threshold': 't07', 'volimage': 't02'}, 'annotators': [], 'highest_right': 'Manage', 'classiffieldlist': None, 'classifsettings': None, 'cnn_network_id': None, 'comments': None, 'contact': None, 'instrument': 'uvp6', 'init_classif_list': [], 'license': '', 'managers': [{'active': True, 'country': None, 'email': 'admin', 'id': 1, 'name': 'Application Administrator', 'organisation': None, 'usercreationdate': '2020-05-12T08:59:48.701060', 'usercreationreason': None}], 'obj_free_cols': {'%area': 'n23', 'angle': 'n16', 'area': 'n01', 'area_exc': 'n24', 'circ.': 'n17', 'circex': 'n51', 'convarea': 'n39', 'convarea_area': 'n55', 'convperim': 'n38', 'convperim_perim': 'n54', 'cv': 'n45', 'elongation': 'n42', 'fcons': 'n40', 'feret': 'n18', 'feretareaexc': 'n48', 'fractal': 'n25', 'height': 'n13', 'histcum1': 'n28', 'histcum2': 'n29', 'histcum3': 'n30', 'intden': 'n19', 'kurt': 'n22', 'kurt_mean': 'n52', 'major': 'n14', 'max': 'n06', 'mean': 'n02', 'meanpos': 'n44', 'median': 'n20', 'median_mean': 'n64', 'median_mean_range': 'n65', 'min': 'n05', 'minor': 'n15', 'mode': 'n04', 'nb1': 'n31', 'nb1_area': 'n58', 'nb1_range': 'n61', 'nb2': 'n32', 'nb2_area': 'n59', 'nb2_range': 'n62', 'nb3': 'n33', 'nb3_area': 'n60', 'nb3_range': 'n63', 'perim.': 'n11', 'perimareaexc': 'n47', 'perimferet': 'n49', 'perimmajor': 'n50', 'range': 'n43', 'skelarea': 'n26', 'skeleton_area': 'n66', 'skew': 'n21', 'skew_mean': 'n53', 'slope': 'n27', 'sr': 'n46', 'stddev': 'n03', 'symetrieh': 'n34', 'symetrieh_area': 'n56', 'symetriehc': 'n36', 'symetriev': 'n35', 'symetriev_area': 'n57', 'symetrievc': 'n37', 'thickr': 'n41', 'width': 'n12', 'x': 'n07', 'xm': 'n09', 'y': 'n08', 'ym': 'n10'}, 'objcount': 15.0, # 'owner': {'active': True, # 'country': None, # 'email': 'admin', # 'id': 1, # 'name': 'Application Administrator', # 'organisation': None, # 'usercreationdate': '2020-05-12T08:59:48.701060', # 'usercreationreason': None}, # 'owner_id': 0, 'pctclassified': None, 'pctvalidated': 0.0, 'popoverfieldlist': None, 'process_free_cols': {'date': 't02', 'first_img': 't04', 'fontcolor': 't10', 'fontheight_px': 't11', 'footerheight_px': 't12', 'gamma': 't06', 'invert': 't07', 'keeporiginal': 't09', 'last_img': 't05', 'scale': 't13', 'scalebarsize_mm': 't08', 'software': 't01', 'time': 't03'}, 'projid': prj_id, 'projtype': None, 'rf_models_used': None, 'sample_free_cols': {'argoid': 't17', 'barcode': 't14', 'bottomdepth': 't05', 'comment': 't13', 'cruise': 't02', 'ctdrosettefilename': 't06', 'dn': 't07', 'integrationtime': 't16', 'nebuloussness': 't11', 'profileid': 't01', 'sampledatetime': 't18', 'sampletype': 't15', 'seastate': 't10', 'ship': 't03', 'stationid': 't04', 'winddir': 't08', 'windspeed': 't09', 'yoyo': 't12'}, 'status': 'Annotate', 'title': 'Test Project Updates', 'viewers': [], 'visible': True} read_json = rsp.json() assert read_json == ref_json upd_json = deepcopy(read_json) contact_usr = upd_json["managers"][0] # Attempt to reproduce ecotaxa/ecotaxa_dev#596 del contact_usr['usercreationdate'] contact_usr['usercreationdate'] = None del contact_usr['usercreationreason'] url = PROJECT_UPDATE_URL.format(project_id=prj_id) upd_json["comments"] = "New comment" upd_json["contact"] = contact_usr upd_json["visible"] = False rsp = fastapi.put(url, headers=ADMIN_AUTH, json=upd_json) assert rsp.status_code == status.HTTP_200_OK # Re-read url = PROJECT_QUERY_URL.format(project_id=prj_id, manage=False) rsp = fastapi.get(url, headers=ADMIN_AUTH) assert rsp.json() != ref_json assert rsp.json()["comments"] == "New comment" assert rsp.json()["visible"] is False # For ecotaxa/ecotaxa_dev#602 # Set no contact at all no_contact_upd = deepcopy(read_json) no_contact_upd["contact"] = None url = PROJECT_UPDATE_URL.format(project_id=prj_id) rsp = fastapi.put(url, headers=ADMIN_AUTH, json=no_contact_upd) assert rsp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY assert rsp.text == '{"detail":"A valid Contact is needed."}' # Set a contact with wrong id wrong_contact_upd = deepcopy(read_json) wrong_contact_upd["contact"] = {"id": ORDINARY_USER2_USER_ID, "name": "name", "email": "*****@*****.**"} url = PROJECT_UPDATE_URL.format(project_id=prj_id) rsp = fastapi.put(url, headers=ADMIN_AUTH, json=wrong_contact_upd) assert rsp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY assert rsp.text == '{"detail":"Could not set Contact, the designated user is not in Managers list."}'
def test_updates(config, database, caplog): caplog.set_level(logging.ERROR) prj_id = test_import_uvp6(config, database, caplog, "Test Updates") check_project(prj_id) acquis_id, process_id, sample_id = _get_ids(prj_id) # Typo in column name with SamplesService() as sce: nb_upd = sce.update_set(ADMIN_USER_ID, [sample_id], ColUpdateList([upd("chip", "sagitta4")])) assert nb_upd == 0 # Update ship in the only sample, and a date to see upds = ColUpdateList( [upd("ship", "sagitta4"), upd("sampledatetime", "20200208-111218")]) with SamplesService() as sce: nb_upd = sce.update_set(ADMIN_USER_ID, [sample_id, sample_id], upds) assert nb_upd == 1 # Update 1st acquisition, and a float, to see upds = ColUpdateList([upd("orig_id", "aid5"), upd("exp", "0.6")]) with AcquisitionsService() as sce: nb_upd = sce.update_set(ADMIN_USER_ID, [acquis_id], upds) assert nb_upd == 1 # Update 1st process upds = ColUpdateList([upd("date", "20200325"), upd("invert", "n")]) with ProcessesService() as sce: nb_upd = sce.update_set(ADMIN_USER_ID, [process_id], upds) assert nb_upd == 1 # Update all objects with ObjectManager() as sce: objs, _details, total = sce.query(ADMIN_USER_ID, prj_id, {}, order_field='objid') objs = [an_obj[0] for an_obj in objs] assert len(objs) == 15 # Wrong column with ObjectManager() as sce: nb_upd = sce.update_set(ADMIN_USER_ID, objs, ColUpdateList([upd("chip", "sagitta4")])) assert nb_upd == 0 # Free column with ObjectManager() as sce: nb_upd = sce.update_set(ADMIN_USER_ID, objs, ColUpdateList([upd("area", "10")])) assert nb_upd == 15 # Plain column with ObjectManager() as sce: nb_upd = sce.update_set(ADMIN_USER_ID, objs, ColUpdateList([upd("depth_min", "10")])) assert nb_upd == 15 # Dump the project after changes with open(OUT_JSON_MODIF, "w") as fd: dump_project(ADMIN_USER_ID, prj_id, fd) # Special column # TODO: Avoiding diff on purpose, it's just to cover code. with ObjectManager() as sce: nb_upd = sce.update_set(ADMIN_USER_ID, objs, ColUpdateList([upd("classif_id", "100")])) assert nb_upd == 15 # Json diff with open(OUT_JSON_REF) as fd1: json_src = json.load(fd1) with open(OUT_JSON_MODIF) as fd2: json_subset = json.load(fd2) diffs = DeepDiff(json_src, json_subset) # Validate by removing all know differences b/w source and subset assert 'iterable_item_added' not in diffs assert 'iterable_item_removed' not in diffs assert 'dictionary_item_added' not in diffs assert 'dictionary_item_removed' not in diffs changed_values = diffs['values_changed'] assert changed_values == { "root['samples'][0]['acquisitions'][0]['aid']": { 'new_value': 'aid5', 'old_value': 'b_da_19' }, "root['samples'][0]['acquisitions'][0]['exp']": { 'new_value': '0.6', 'old_value': '1.257' }, "root['samples'][0]['acquisitions'][0]['processings'][0]['date']": { 'new_value': '20200325', 'old_value': '20200317' }, "root['samples'][0]['acquisitions'][0]['processings'][0]['invert']": { 'new_value': 'n', 'old_value': 'y' }, "root['samples'][0]['acquisitions'][0]['objects'][0]['area']": { 'new_value': 10.0, 'old_value': 207.0 }, "root['samples'][0]['acquisitions'][0]['objects'][0]['depth_min']": { 'new_value': 10.0, 'old_value': 194.63 }, "root['samples'][0]['acquisitions'][0]['objects'][10]['area']": { 'new_value': 10.0, 'old_value': 119.0 }, "root['samples'][0]['acquisitions'][0]['objects'][10]['depth_min']": { 'new_value': 10.0, 'old_value': 215.76 }, "root['samples'][0]['acquisitions'][0]['objects'][11]['area']": { 'new_value': 10.0, 'old_value': 137.0 }, "root['samples'][0]['acquisitions'][0]['objects'][11]['depth_min']": { 'new_value': 10.0, 'old_value': 224.44 }, "root['samples'][0]['acquisitions'][0]['objects'][12]['area']": { 'new_value': 10.0, 'old_value': 93.0 }, "root['samples'][0]['acquisitions'][0]['objects'][12]['depth_min']": { 'new_value': 10.0, 'old_value': 252.235 }, "root['samples'][0]['acquisitions'][0]['objects'][13]['area']": { 'new_value': 10.0, 'old_value': 165.0 }, "root['samples'][0]['acquisitions'][0]['objects'][13]['depth_min']": { 'new_value': 10.0, 'old_value': 253.615 }, "root['samples'][0]['acquisitions'][0]['objects'][14]['area']": { 'new_value': 10.0, 'old_value': 360.0 }, "root['samples'][0]['acquisitions'][0]['objects'][14]['depth_min']": { 'new_value': 10.0, 'old_value': 255.44 }, "root['samples'][0]['acquisitions'][0]['objects'][1]['area']": { 'new_value': 10.0, 'old_value': 107.0 }, "root['samples'][0]['acquisitions'][0]['objects'][1]['depth_min']": { 'new_value': 10.0, 'old_value': 195.36 }, "root['samples'][0]['acquisitions'][0]['objects'][2]['area']": { 'new_value': 10.0, 'old_value': 122.0 }, "root['samples'][0]['acquisitions'][0]['objects'][2]['depth_min']": { 'new_value': 10.0, 'old_value': 195.68 }, "root['samples'][0]['acquisitions'][0]['objects'][3]['area']": { 'new_value': 10.0, 'old_value': 94.0 }, "root['samples'][0]['acquisitions'][0]['objects'][3]['depth_min']": { 'new_value': 10.0, 'old_value': 195.68 }, "root['samples'][0]['acquisitions'][0]['objects'][4]['area']": { 'new_value': 10.0, 'old_value': 199.0 }, "root['samples'][0]['acquisitions'][0]['objects'][4]['depth_min']": { 'new_value': 10.0, 'old_value': 195.68 }, "root['samples'][0]['acquisitions'][0]['objects'][5]['area']": { 'new_value': 10.0, 'old_value': 176.0 }, "root['samples'][0]['acquisitions'][0]['objects'][5]['depth_min']": { 'new_value': 10.0, 'old_value': 212.62 }, "root['samples'][0]['acquisitions'][0]['objects'][6]['area']": { 'new_value': 10.0, 'old_value': 151.0 }, "root['samples'][0]['acquisitions'][0]['objects'][6]['depth_min']": { 'new_value': 10.0, 'old_value': 213.525 }, "root['samples'][0]['acquisitions'][0]['objects'][7]['area']": { 'new_value': 10.0, 'old_value': 90.0 }, "root['samples'][0]['acquisitions'][0]['objects'][7]['depth_min']": { 'new_value': 10.0, 'old_value': 214.165 }, "root['samples'][0]['acquisitions'][0]['objects'][8]['area']": { 'new_value': 10.0, 'old_value': 158.0 }, "root['samples'][0]['acquisitions'][0]['objects'][8]['depth_min']": { 'new_value': 10.0, 'old_value': 215.415 }, "root['samples'][0]['acquisitions'][0]['objects'][9]['area']": { 'new_value': 10.0, 'old_value': 163.0 }, "root['samples'][0]['acquisitions'][0]['objects'][9]['depth_min']": { 'new_value': 10.0, 'old_value': 215.76 }, "root['samples'][0]['sampledatetime']": { 'new_value': '20200208-111218', 'old_value': '20200205-111218' }, "root['samples'][0]['ship']": { 'new_value': 'sagitta4', 'old_value': 'sagitta3' } }