def test_candidate_list_saved_to_any_accessible_groups( upload_data_token_two_groups, view_only_token_two_groups, public_filter, public_group, public_group2, ): # Post two candidates for filter belonging to public_group obj_id = str(uuid.uuid4()) obj_id2 = str(uuid.uuid4()) status, data = api( "POST", "candidates", data={ "id": obj_id, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token_two_groups, ) assert status == 200 status, data = api( "POST", "candidates", data={ "id": obj_id2, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token_two_groups, ) assert status == 200 # obj_id is saved to only public_group2 status, data = api( "POST", "sources", data={ "id": obj_id, "group_ids": [public_group2.id] }, token=upload_data_token_two_groups, ) assert status == 200 assert data["data"]["id"] == obj_id # Select for candidates passing public_filter, which belongs to public_group # Since we set "savedToAnyAccessible", should still get back obj_id even if # is saved to only public_group2 # Should not get obj_id2 back since it was not saved status, data = api( "GET", "candidates", params={ "groupIDs": f"{public_group.id}", "savedStatus": "savedToAnyAccessible", }, token=view_only_token_two_groups, ) assert status == 200 assert len(data["data"]["candidates"]) == 1 assert data["data"]["candidates"][0]["id"] == obj_id
def test_add_and_retrieve_annotation_group_access( annotation_token_two_groups, public_source_two_groups, public_group2, public_group, annotation_token, ): status, data = api( 'POST', f'sources/{public_source_two_groups.id}/annotations', data={ 'origin': 'kowalski', 'data': { 'offset_from_host_galaxy': 1.5 }, 'group_ids': [public_group2.id], }, token=annotation_token_two_groups, ) assert status == 200 annotation_id = data['data']['annotation_id'] # This token belongs to public_group2 status, data = api( 'GET', f'sources/{public_source_two_groups.id}/annotations/{annotation_id}', token=annotation_token_two_groups, ) assert status == 200 assert data['data']['data'] == {'offset_from_host_galaxy': 1.5} assert data['data']['origin'] == 'kowalski' # This token does not belong to public_group2 status, data = api( 'GET', f'sources/{public_source_two_groups.id}/annotations/{annotation_id}', token=annotation_token, ) assert status == 403 # Both tokens should be able to view this annotation status, data = api( 'POST', f'sources/{public_source_two_groups.id}/annotations', data={ 'origin': 'GAIA', 'data': { 'offset_from_host_galaxy': 1.5 }, 'group_ids': [public_group.id, public_group2.id], }, token=annotation_token_two_groups, ) assert status == 200 annotation_id = data['data']['annotation_id'] status, data = api( 'GET', f'sources/{public_source_two_groups.id}/annotations/{annotation_id}', token=annotation_token_two_groups, ) assert status == 200 assert data['data']['data'] == {'offset_from_host_galaxy': 1.5} assert data['data']['origin'] == 'GAIA' status, data = api( 'GET', f'sources/{public_source_two_groups.id}/annotations/{annotation_id}', token=annotation_token, ) assert status == 200 assert data['data']['data'] == {'offset_from_host_galaxy': 1.5}
def test_candidate_list(view_only_token, public_candidate): status, data = api("GET", "candidates", token=view_only_token) assert status == 200 assert data["status"] == "success"
def test_add_sources_two_groups( driver, super_admin_user_two_groups, public_group, public_group2, upload_data_token_two_groups, taxonomy_token_two_groups, classification_token_two_groups, ): obj_id = str(uuid.uuid4()) t1 = datetime.now(timezone.utc) # upload a new source, saved to the public group status, data = api( 'POST', 'sources', data={ 'id': f'{obj_id}', 'ra': 234.22, 'dec': -22.33, 'redshift': 0.153, 'altdata': { 'simbad': { 'class': 'RRLyr' } }, 'transient': False, 'ra_dis': 2.3, 'group_ids': [public_group.id], }, token=upload_data_token_two_groups, ) assert status == 200 assert data['data']['id'] == f'{obj_id}' driver.get(f"/become_user/{super_admin_user_two_groups.id}" ) # TODO decorator/context manager? assert 'localhost' in driver.current_url driver.get('/sources') # filter on the object id driver.click_xpath("//button[@data-testid='Filter Table-iconButton']") obj_button = driver.wait_for_xpath("//input[@name='sourceID']") obj_button.clear() obj_button.send_keys(obj_id) driver.click_xpath( "//div[contains(@class, 'MUIDataTableFilter-root')]//span[text()='Submit']" ) # find the name of the newly added source driver.wait_for_xpath(f"//a[contains(@href, '/source/{obj_id}')]") # find the date it was saved saved_at_element = driver.wait_for_xpath( f"//*[text()[contains(., '{t1.strftime('%Y-%m-%dT%H:%M')}')]]") saved_group1 = parser.parse(saved_at_element.text + " UTC") assert abs(saved_group1 - t1) < timedelta(seconds=30) # check the redshift shows up driver.wait_for_xpath(f"//*[text()[contains(., '{'0.153'}')]]") # little triangle you push to expand the table driver.click_xpath( "//tr[@data-testid='MUIDataTableBodyRow-0']//*[@id='expandable-button']" ) # make sure the div containing the individual source appears driver.wait_for_xpath(f'//tr[@data-testid="groupSourceExpand_{obj_id}"]') # post a taxonomy and classification status, data = api( 'POST', 'taxonomy', data={ 'name': "test taxonomy" + str(uuid.uuid4()), 'hierarchy': taxonomy, 'group_ids': [public_group.id, public_group2.id], 'provenance': f"tdtax_{__version__}", 'version': __version__, 'isLatest': True, }, token=taxonomy_token_two_groups, ) assert status == 200 taxonomy_id = data['data']['taxonomy_id'] status, data = api( 'POST', 'classification', data={ 'obj_id': obj_id, 'classification': 'Algol', 'taxonomy_id': taxonomy_id, 'probability': 1.0, 'group_ids': [public_group.id], }, token=classification_token_two_groups, ) assert status == 200 # check the classification doesn't shows up (it should not show up without a page refresh!) driver.wait_for_xpath_to_disappear( f"//*[text()[contains(., '{'Algol'}')]]", timeout=1) # filter on the object id (page refresh, but still filtering on this object) driver.click_xpath("//button[@data-testid='Filter Table-iconButton']") obj_button = driver.wait_for_xpath("//input[@name='sourceID']") obj_button.clear() obj_button.send_keys(obj_id) driver.click_xpath( "//div[contains(@class, 'MUIDataTableFilter-root')]//span[text()='Submit']" ) # check the classification does show up after a refresh driver.wait_for_xpath(f"//*[text()[contains(., '{'Algol'}')]]") # add this source to another group t2 = datetime.now(timezone.utc) status, data = api( 'POST', 'sources', data={ 'id': f'{obj_id}', 'ra': 234.22, 'dec': -22.33, 'redshift': 0.153, 'altdata': { 'simbad': { 'class': 'RRLyr' } }, 'transient': False, 'ra_dis': 2.3, 'group_ids': [public_group2.id], }, token=upload_data_token_two_groups, ) assert status == 200 assert data['status'] == 'success' # post another classification, by another group status, data = api( 'POST', 'classification', data={ 'obj_id': obj_id, 'classification': 'RS CVn', 'taxonomy_id': taxonomy_id, 'probability': 1.0, 'group_ids': [public_group2.id], }, token=classification_token_two_groups, ) assert status == 200 # filter on the object id (page refresh, but still filtering on this object) driver.click_xpath("//button[@data-testid='Filter Table-iconButton']") obj_button = driver.wait_for_xpath("//input[@name='sourceID']") obj_button.clear() obj_button.send_keys(obj_id) driver.click_xpath( "//div[contains(@class, 'MUIDataTableFilter-root')]//span[text()='Submit']" ) # make sure the new classification, made to group 2, shows up driver.wait_for_xpath(f"//*[text()[contains(., '{'RS CVn'}')]]") # find the date it was saved to group2 saved_at_element = driver.wait_for_xpath( f"//*[text()[contains(., '{t2.strftime('%Y-%m-%dT%H:%M')}')]]") saved_group2 = parser.parse(saved_at_element.text + " UTC") assert abs(saved_group2 - t2) < timedelta(seconds=2) # the new group must have been saved later! assert saved_group2 > saved_group1
def test_filter_by_alias_and_origin( driver, user, public_group, upload_data_token, taxonomy_token, classification_token, ): # Post an object with an alias and an origin source_id = str(uuid.uuid4()) alias = str(uuid.uuid4()) origin = str(uuid.uuid4()) status, data = api( "POST", "sources", data={ "id": source_id, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "group_ids": [public_group.id], "alias": [alias], "origin": origin, }, token=upload_data_token, ) assert status == 200 driver.get(f"/become_user/{user.id}") driver.get("/sources") # Filter for alias driver.click_xpath("//button[@data-testid='Filter Table-iconButton']") alias_field = driver.wait_for_xpath( "//*[@data-testid='alias-text']//input", ) alias_field.send_keys(alias) driver.click_xpath( "//div[contains(@class, 'MUIDataTableFilter-root')]//span[text()='Submit']", scroll_parent=True, ) # Should see the posted source driver.wait_for_xpath(f'//a[@data-testid="{source_id}"]') # Now search for a different alias driver.click_xpath("//button[@data-testid='Filter Table-iconButton']") alias_field = driver.wait_for_xpath( "//*[@data-testid='alias-text']//input") alias_field.send_keys(str(uuid.uuid4())) driver.click_xpath( "//div[contains(@class, 'MUIDataTableFilter-root')]//span[text()='Submit']" ) # Should no longer see the source driver.wait_for_xpath_to_disappear(f'//a[@data-testid="{source_id}"]') # Filter for origin driver.click_xpath("//button[@data-testid='Filter Table-iconButton']") alias_field = driver.wait_for_xpath( "//*[@data-testid='origin-text']//input", ) alias_field.send_keys(origin) driver.click_xpath( "//div[contains(@class, 'MUIDataTableFilter-root')]//span[text()='Submit']" ) # Should see the posted source driver.wait_for_xpath(f'//a[@data-testid="{source_id}"]') # Now search for a different alias driver.click_xpath("//button[@data-testid='Filter Table-iconButton']") origin_field = driver.wait_for_xpath( "//*[@data-testid='origin-text']//input", ) origin_field.send_keys(str(uuid.uuid4())) driver.click_xpath( "//div[contains(@class, 'MUIDataTableFilter-root')]//span[text()='Submit']" ) # Should no longer see the source driver.wait_for_xpath_to_disappear(f'//a[@data-testid="{source_id}"]')
def test_post_instrument_fov(super_admin_token): telescope_name = str(uuid.uuid4()) status, data = api( 'POST', 'telescope', data={ 'name': telescope_name, 'nickname': telescope_name, 'lat': 0.0, 'lon': 0.0, 'elevation': 0.0, 'diameter': 10.0, }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' telescope_id = data['data']['id'] instrument_name = str(uuid.uuid4()) status, data = api( 'POST', 'instrument', data={ 'name': instrument_name, 'type': 'imager', 'band': 'NIR', 'filters': ['f110w'], 'telescope_id': telescope_id, 'field_fov_type': 'circle', 'field_fov_attributes': 3.0, }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' instrument_id = data['data']['id'] params = {'includeRegion': True} # wait for the fields to populate nretries = 0 fields_loaded = False while not fields_loaded and nretries < 5: try: status, data = api( 'GET', f'instrument/{instrument_id}', token=super_admin_token, params=params, ) assert status == 200 assert data['status'] == 'success' assert data['data']['band'] == 'NIR' fields_loaded = True except AssertionError: nretries = nretries + 1 time.sleep(3) assert status == 200 assert data['status'] == 'success' print(data['data']) assert ( data['data']['region'] == '# Region file format: DS9 astropy/regions\nfk5\ncircle(0.000006,0.000003,3.000000)\n' )
def test_observation(super_admin_token, view_only_token): datafile = f'{os.path.dirname(__file__)}/../data/GW190425_initial.xml' with open(datafile, 'rb') as fid: payload = fid.read() data = {'xml': payload} status, data = api('POST', 'gcn_event', data=data, token=super_admin_token) assert status == 200 assert data['status'] == 'success' telescope_name = str(uuid.uuid4()) status, data = api( 'POST', 'telescope', data={ 'name': telescope_name, 'nickname': telescope_name, 'lat': 0.0, 'lon': 0.0, 'elevation': 0.0, 'diameter': 10.0, }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' telescope_id = data['data']['id'] fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv' regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg' instrument_name = str(uuid.uuid4()) status, data = api( 'POST', 'instrument', data={ 'name': instrument_name, 'type': 'imager', 'band': 'Optical', 'filters': ['ztfr'], 'telescope_id': telescope_id, 'field_data': pd.read_csv(fielddatafile)[:5].to_dict(orient='list'), 'field_region': Regions.read(regionsdatafile).serialize(format='ds9'), }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' # wait for the fields to populate time.sleep(15) datafile = f'{os.path.dirname(__file__)}/../../../data/sample_observation_data.csv' data = { 'telescopeName': telescope_name, 'instrumentName': instrument_name, 'observationData': pd.read_csv(datafile).to_dict(orient='list'), } status, data = api('POST', 'observation', data=data, token=super_admin_token) assert status == 200 assert data['status'] == 'success' # wait for the executed observations to populate time.sleep(15) data = { 'telescopeName': telescope_name, 'instrumentName': instrument_name, 'startDate': "2019-04-25 08:18:05", 'endDate': "2019-04-28 08:18:05", 'localizationDateobs': "2019-04-25T08:18:05", 'localizationName': "bayestar.fits.gz", 'localizationCumprob': 1.01, 'returnStatistics': True, } status, data = api('GET', 'observation', params=data, token=super_admin_token) assert status == 200 data = data["data"] assert len(data['observations']) == 10 assert np.isclose(data['probability'], 2.927898964006069e-05) assert any( [ d['obstime'] == '2019-04-25T08:18:18.002909' and d['observation_id'] == 84434604 for d in data['observations'] ] ) for d in data['observations']: if d['observation_id'] == 84434604: observation_id = d['id'] break status, data = api( 'DELETE', f'observation/{observation_id}', token=super_admin_token ) assert status == 200 data = { 'telescopeName': telescope_name, 'instrumentName': instrument_name, 'startDate': "2019-04-25 08:18:05", 'endDate': "2019-04-28 08:18:05", 'localizationDateobs': "2019-04-25T08:18:05", 'localizationName': "bayestar.fits.gz", 'localizationCumprob': 1.01, 'returnStatistics': True, } status, data = api('GET', 'observation', params=data, token=super_admin_token) assert status == 200 data = data["data"] assert len(data['observations']) == 9 assert not any( [ d['obstime'] == '2019-04-25T08:18:18.002909' and d['observation_id'] == 84434604 for d in data['observations'] ] )
def test_exclude_by_outdated_annotations(annotation_token, view_only_token, public_group, public_candidate, public_candidate2): status, data = api( "GET", "candidates", params={"groupIDs": f"{public_group.id}"}, token=view_only_token, ) assert status == 200 num_candidates = len(data["data"]["candidates"]) origin = str(uuid.uuid4()) t0 = datetime.datetime.utcnow() # recall when it was created time_offset = (datetime.datetime.utcnow() - datetime.datetime.now()) / datetime.timedelta(hours=1) t0 += datetime.timedelta( hours=time_offset) # adjust for time zone of PC running the tests t0 += datetime.timedelta(seconds=60) # give some extra time # add an annotation from this origin status, data = api( "POST", f"sources/{public_candidate.id}/annotations", data={ "obj_id": public_candidate.id, "origin": origin, "data": { 'value1': 1 } }, token=annotation_token, ) assert status == 200 status, data = api( "GET", "candidates", params={ "groupIDs": f"{public_group.id}", "annotationExcludeOrigin": origin }, token=view_only_token, ) assert status == 200 assert (num_candidates == len(data["data"]["candidates"]) + 1 ) # should have one less candidate status, data = api( "GET", "candidates", params={ "groupIDs": f"{public_group.id}", "annotationExcludeOrigin": origin, "annotationExcludeOutdatedDate": str(t0), }, token=view_only_token, ) assert status == 200 assert num_candidates == len( data["data"] ["candidates"]) # should now have all the original candidates
def test_candidate_list_saved_to_any_selected_groups( upload_data_token_two_groups, view_only_token_two_groups, public_filter, public_group, public_group2, ): # Post three candidates for the same filter obj_id1 = str(uuid.uuid4()) obj_id2 = str(uuid.uuid4()) obj_id3 = str(uuid.uuid4()) status, data = api( "POST", "candidates", data={ "id": obj_id1, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token_two_groups, ) assert status == 200 status, data = api( "POST", "candidates", data={ "id": obj_id2, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token_two_groups, ) assert status == 200 status, data = api( "POST", "candidates", data={ "id": obj_id3, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token_two_groups, ) assert status == 200 # Save the two candidates as sources # obj_id1 is saved to only public_group2 status, data = api( "POST", "sources", data={ "id": obj_id1, "group_ids": [public_group2.id] }, token=upload_data_token_two_groups, ) assert status == 200 assert data["data"]["id"] == obj_id1 # obj_id2 is saved to only public_group status, data = api( "POST", "sources", data={ "id": obj_id2, "group_ids": [public_group.id] }, token=upload_data_token_two_groups, ) assert status == 200 assert data["data"]["id"] == obj_id2 # Now get candidates saved to any of public_group and public_group2 # Should not get obj_id3 back since it was not saved status, data = api( "GET", "candidates", params={ "groupIDs": f"{public_group.id},{public_group2.id}", "savedStatus": "savedToAnySelected", }, token=view_only_token_two_groups, ) assert status == 200 # Should get obj_id1 and obj_id2 back assert len(data["data"]["candidates"]) == 2 assert (len({obj_id1, obj_id2}.difference( map(lambda x: x["id"], data["data"]["candidates"]))) == 0)
def test_candidate_list_pagination( view_only_token, upload_data_token, public_group, public_filter, ): # Upload two candidates with know passed_at order obj_id1 = str(uuid.uuid4()) obj_id2 = str(uuid.uuid4()) status, data = api( "POST", "candidates", data={ "id": obj_id1, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token, ) assert status == 200 status, data = api( "POST", "candidates", data={ "id": obj_id2, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow() + datetime.timedelta(days=1)), }, token=upload_data_token, ) assert status == 200 # Default order is descending passed_at status, data = api( "GET", "candidates", params={ "numPerPage": 1, "pageNumber": 2, "groupIDs": f"{public_group.id}" }, token=view_only_token, ) assert status == 200 assert data["data"]["candidates"][0]["id"] == obj_id1 assert "queryID" in data["data"] query_id = data["data"]["queryID"] status, data = api( "GET", "candidates", params={ "pageNumber": 1, "queryID": query_id }, token=view_only_token, ) assert status == 200 assert data["data"]["queryID"] == query_id # Wait until cache is expired time.sleep(3) # Submit new request, which will create new (unrelated) cache, triggering # cleanup of expired cache files status, data = api( "GET", "candidates", token=view_only_token, ) assert status == 200 # Cache should now be removed, so we expect a new query ID status, data = api( "GET", "candidates", params={ "pageNumber": 1, "queryID": query_id }, token=view_only_token, ) assert status == 200 assert data["data"]["queryID"] != query_id # Invalid page status, data = api( "GET", "candidates", params={ "numPerPage": 1, "pageNumber": 4 }, token=view_only_token, ) assert status == 400 assert "Page number out of range" in data["message"]
def test_candidate_list_classifications( upload_data_token, taxonomy_token, classification_token, view_only_token, public_filter, public_group, ): # Post a candidate with a classification, and one without obj_id1 = str(uuid.uuid4()) obj_id2 = str(uuid.uuid4()) status, data = api( "POST", "candidates", data={ "id": obj_id1, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token, ) assert status == 200 status, data = api( "POST", "candidates", data={ "id": obj_id2, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token, ) assert status == 200 status, data = api( "POST", "sources", data={"id": obj_id1}, token=upload_data_token, ) assert status == 200 status, data = api( 'POST', 'taxonomy', data={ 'name': "test taxonomy" + str(uuid.uuid4()), 'hierarchy': taxonomy, 'group_ids': [public_group.id], 'provenance': f"tdtax_{__version__}", 'version': __version__, 'isLatest': True, }, token=taxonomy_token, ) assert status == 200 taxonomy_id = data['data']['taxonomy_id'] status, data = api( 'POST', 'classification', data={ 'obj_id': obj_id1, 'classification': 'Algol', 'taxonomy_id': taxonomy_id, 'probability': 1.0, 'group_ids': [public_group.id], }, token=classification_token, ) assert status == 200 # Filter for candidates with classification 'Algol' - should only get obj_id1 back status, data = api( "GET", "candidates", params={ "classifications": "Algol", "groupIDs": f"{public_group.id}" }, token=view_only_token, ) assert status == 200 assert len(data["data"]["candidates"]) == 1 assert data["data"]["candidates"][0]["id"] == obj_id1
def test_candidates_hidden_photometry_not_leaked( public_candidate, ztf_camera, public_group, public_group2, view_only_token, upload_data_token_two_groups, ): obj_id = str(public_candidate.id) # Post photometry to the object belonging to a different group status, data = api( 'POST', 'photometry', data={ 'obj_id': obj_id, 'mjd': 58000.0, 'instrument_id': ztf_camera.id, 'flux': 12.24, 'fluxerr': 0.031, 'zp': 25.0, 'magsys': 'ab', 'filter': 'ztfg', 'group_ids': [public_group2.id], 'altdata': { 'some_key': 'some_value' }, }, token=upload_data_token_two_groups, ) assert status == 200 assert data['status'] == 'success' photometry_id = data['data']['ids'][0] # Check the photometry sent back with the candidate status, data = api( "GET", "candidates", params={ "groupIDs": f"{public_group.id}", "includePhotometry": "true" }, token=view_only_token, ) assert status == 200 assert len(data["data"]["candidates"]) == 1 assert data["data"]["candidates"][0]["id"] == obj_id assert len(public_candidate.photometry) - 1 == len( data["data"]["candidates"][0]["photometry"]) assert photometry_id not in map( lambda x: x["id"], data["data"]["candidates"][0]["photometry"]) # Check for single GET call as well status, data = api( "GET", f"candidates/{obj_id}", params={"includePhotometry": "true"}, token=view_only_token, ) assert status == 200 assert data["data"]["id"] == obj_id assert len(public_candidate.photometry) - 1 == len( data["data"]["photometry"]) assert photometry_id not in map(lambda x: x["id"], data["data"]["photometry"])
def test_candidate_list_not_saved_to_all_selected_groups( upload_data_token_two_groups, view_only_token_two_groups, public_filter, public_group, public_group2, ): # Post three candidates for the same filter obj_id1 = str(uuid.uuid4()) obj_id2 = str(uuid.uuid4()) obj_id3 = str(uuid.uuid4()) status, data = api( "POST", "candidates", data={ "id": obj_id1, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token_two_groups, ) assert status == 200 status, data = api( "POST", "candidates", data={ "id": obj_id2, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token_two_groups, ) assert status == 200 status, data = api( "POST", "candidates", data={ "id": obj_id3, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token_two_groups, ) assert status == 200 # Obj_id1 is saved to both groups status, data = api( "POST", "sources", data={ "id": obj_id1, "group_ids": [public_group.id, public_group2.id] }, token=upload_data_token_two_groups, ) assert status == 200 assert data["data"]["id"] == obj_id1 # Obj_id3 is saved to public_group status, data = api( "POST", "sources", data={ "id": obj_id3, "group_ids": [public_group.id] }, token=upload_data_token_two_groups, ) assert status == 200 assert data["data"]["id"] == obj_id3 # Select for candidates using public_group and public_group2 # Should not get back obj_id since it is saved to both selected groups # Should get back obj_id2 since it is not saved at all # Should get back obj_id3 since it is saved to only public_group status, data = api( "GET", "candidates", params={ "groupIDs": f"{public_group.id},{public_group2.id}", "savedStatus": "notSavedToAllSelected", }, token=view_only_token_two_groups, ) if status != 200: print(data['message']) assert status == 200 # Should get obj_id2 and obj_id3 back assert len(data["data"]["candidates"]) == 2 assert (len({obj_id2, obj_id3}.difference( map(lambda x: x["id"], data["data"]["candidates"]))) == 0)
def test_candidate_list_not_saved_to_any_accessible_groups( upload_data_token_two_groups, view_only_token, public_filter, public_group, public_group2, ): # Post three candidates for the same filter obj_id1 = str(uuid.uuid4()) obj_id2 = str(uuid.uuid4()) obj_id3 = str(uuid.uuid4()) status, data = api( "POST", "candidates", data={ "id": obj_id1, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token_two_groups, ) assert status == 200 status, data = api( "POST", "candidates", data={ "id": obj_id2, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token_two_groups, ) assert status == 200 status, data = api( "POST", "candidates", data={ "id": obj_id3, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "filter_ids": [public_filter.id], "passed_at": str(datetime.datetime.utcnow()), }, token=upload_data_token_two_groups, ) assert status == 200 # Obj_id1 is saved to public_group2 status, data = api( "POST", "sources", data={ "id": obj_id1, "group_ids": [public_group2.id] }, token=upload_data_token_two_groups, ) assert status == 200 assert data["data"]["id"] == obj_id1 # Obj_id3 is saved to public_group status, data = api( "POST", "sources", data={ "id": obj_id3, "group_ids": [public_group.id] }, token=upload_data_token_two_groups, ) assert status == 200 assert data["data"]["id"] == obj_id3 # Select for candidates passing public_filter, which belongs to public_group # Since we set "notSavedToAnyAccessible", should get back obj_id even though # it is saved, since view_only_user doesn"t have public_group2 access # Should also get back obj_id2 since it is not saved at all # Should not get back obj_id3 since it is saved to public_group status, data = api( "GET", "candidates", params={ "groupIDs": f"{public_group.id}", "savedStatus": "notSavedToAnyAccessible", }, token=view_only_token, ) assert status == 200 # Should get obj_id1 and obj_id2 back assert len(data["data"]["candidates"]) == 2 assert (len({obj_id1, obj_id2}.difference( map(lambda x: x["id"], data["data"]["candidates"]))) == 0)
def post(endpoint, data, token=admin_token): response_status, data = api( "POST", endpoint, data=data, token=token, host=env.host ) return response_status, data
def test_patching_listing(user, user2, public_candidate, public_candidate2): token_id = create_token(ACLs=["Upload data"], user_id=user.id, name=str(uuid.uuid4())) list1 = str(uuid.uuid4()) status, data = api( 'POST', 'listing', data={ 'user_id': user.id, 'obj_id': public_candidate.id, 'list_name': list1 }, token=token_id, ) assert status == 200 item1 = data["data"]["id"] # get the list item ID status, data = api( 'POST', 'listing', data={ 'user_id': user.id, 'obj_id': public_candidate2.id, 'list_name': list1 }, token=token_id, ) assert status == 200 item2 = data["data"]["id"] # get the list item ID list2 = str(uuid.uuid4()) status, data = api( 'PATCH', f'listing/{item2}', data={ 'user_id': user.id, 'obj_id': public_candidate2.id, 'list_name': list2 }, token=token_id, ) assert status == 200 # get the list back, should include only one item that matches list2 status, data = api('GET', f'listing/{user.id}?listName={list2}', token=token_id) print(data["data"]) assert status == 200 new_list = data["data"] assert len(new_list) == 1 assert new_list[0]['id'] == item2 # the listing ID is the same assert new_list[0]['user_id'] == user.id # user stays the same assert new_list[0]['obj_id'] == public_candidate2.id # obj id is new assert new_list[0]['list_name'] == list2 # list name is new # try to patch with an invalid user id status, data = api( 'PATCH', f'listing/{item1}', data={ 'user_id': user2.id, 'obj_id': public_candidate2.id, 'list_name': list2 }, token=token_id, ) assert status == 400 assert 'Insufficient permission' in data['message']
def test_token_user_update_instrument( super_admin_token, manage_sources_token, view_only_token ): name = str(uuid.uuid4()) status, data = api( 'POST', 'telescope', data={ 'name': name, 'nickname': name, 'lat': 0.0, 'lon': 0.0, 'elevation': 0.0, 'diameter': 10.0, }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' telescope_id = data['data']['id'] instrument_name = str(uuid.uuid4()) status, data = api( 'POST', 'instrument', data={ 'name': instrument_name, 'type': 'imager', 'band': 'NIR', 'filters': ['f110w'], 'telescope_id': telescope_id, }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' instrument_id = data['data']['id'] status, data = api('GET', f'instrument/{instrument_id}', token=super_admin_token) assert status == 200 assert data['status'] == 'success' assert data['data']['band'] == 'NIR' new_name = f'Gattini2_{uuid.uuid4()}' status, data = api( 'PUT', f'instrument/{instrument_id}', data={ 'name': new_name, 'type': 'imager', 'band': 'NIR', 'filters': ['f110w'], 'telescope_id': telescope_id, }, token=manage_sources_token, ) assert status == 401 assert data['status'] == 'error' status, data = api( 'PUT', f'instrument/{instrument_id}', data={ 'name': new_name, 'type': 'imager', 'band': 'NIR', 'filters': ['f110w'], 'telescope_id': telescope_id, }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' status, data = api('GET', f'instrument/{instrument_id}', token=view_only_token) assert status == 200 assert data['status'] == 'success' assert data['data']['name'] == new_name
def test_listings_user_permissions( user, user2, super_admin_user, super_admin_token, upload_data_token, public_candidate, public_candidate2, ): status, data = api( 'POST', 'listing', data={ 'user_id': user.id, 'obj_id': public_candidate.id, 'list_name': 'favorites', }, token=upload_data_token, ) assert status == 200 item1 = data["data"]["id"] # get the list item ID # try to transfer ownership to a different user status, data = api( 'PATCH', f'listing/{item1}', data={ 'user_id': user2.id, 'obj_id': public_candidate.id, 'list_name': 'favorites', }, token=upload_data_token, ) assert status == 400 assert 'Insufficient permissions' in data['message'] # try to post to a different user status, data = api( 'POST', 'listing', data={ 'user_id': user2.id, 'obj_id': public_candidate.id, 'list_name': 'favorites', }, token=upload_data_token, ) assert status == 400 # try to add this to a different user, but with super admin privileges status, data = api( 'PATCH', f'listing/{item1}', data={ 'user_id': user2.id, 'obj_id': public_candidate.id, 'list_name': 'favorites', }, token=super_admin_token, ) assert status == 200 # get the list back, should include only one item that matches user2 status, data = api('GET', f'listing/{user2.id}?listName=favorites', token=super_admin_token) assert status == 200 new_list = data["data"] assert len(new_list) == 1 assert new_list[0]['id'] == item1 # the listing ID is the same assert new_list[0]['obj_id'] == public_candidate.id # obj stays the same # try to patch with only partial data inputs # bring this listing back to first user with super token permission status, data = api( 'PATCH', f'listing/{item1}', data={'user_id': user.id}, token=super_admin_token, ) assert status == 200 # change the object id only status, data = api( 'PATCH', f'listing/{item1}', data={'obj_id': public_candidate2.id}, token=upload_data_token, ) assert status == 200 # change the list name only status, data = api( 'PATCH', f'listing/{item1}', data={'list_name': 'new_listing'}, token=upload_data_token, ) assert status == 200 # get the list back, should include only one item that matches user2 status, data = api('GET', f'listing/{user.id}?listName=new_listing', token=super_admin_token) assert status == 200 new_list = data["data"] assert len(new_list) == 1 assert new_list[0]['id'] == item1 # the listing ID is the same assert new_list[0]['obj_id'] == public_candidate2.id # obj was updated assert new_list[0]['user_id'] == user.id # user was returned to original assert new_list[0]['list_name'] == 'new_listing' # new listing name
def test_token_user_post_get_instrument(super_admin_token): name = str(uuid.uuid4()) status, data = api( 'POST', 'telescope', data={ 'name': name, 'nickname': name, 'lat': 0.0, 'lon': 0.0, 'elevation': 0.0, 'diameter': 10.0, }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' telescope_id = data['data']['id'] fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv' regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg' instrument_name = str(uuid.uuid4()) status, data = api( 'POST', 'instrument', data={ 'name': instrument_name, 'type': 'imager', 'band': 'NIR', 'filters': ['f110w'], 'telescope_id': telescope_id, 'field_data': pd.read_csv(fielddatafile)[:5].to_dict(orient='list'), 'field_region': Regions.read(regionsdatafile).serialize(format='ds9'), }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' instrument_id = data['data']['id'] params = {'includeGeoJSON': True} # wait for the fields to populate nretries = 0 fields_loaded = False while not fields_loaded and nretries < 5: try: status, data = api( 'GET', f'instrument/{instrument_id}', params=params, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' assert data['data']['band'] == 'NIR' assert len(data['data']['fields']) == 5 fields_loaded = True except AssertionError: nretries = nretries + 1 time.sleep(3) params = {'includeGeoJSON': True} instrument_id = data['data']['id'] status, data = api( 'GET', f'instrument/{instrument_id}', params=params, token=super_admin_token ) assert status == 200 assert data['status'] == 'success' assert data['data']['band'] == 'NIR' assert len(data['data']['fields']) == 5 assert any( [ d['field_id'] == 1 and d['contour']['features'][0]['geometry']['coordinates'][0][0] == [110.84784299030288, -87.01522509948724] for d in data['data']['fields'] ] ) params = {'includeGeoJSONSummary': True} instrument_id = data['data']['id'] status, data = api( 'GET', f'instrument/{instrument_id}', params=params, token=super_admin_token ) assert status == 200 assert data['status'] == 'success' assert data['data']['band'] == 'NIR' assert len(data['data']['fields']) == 5 assert any( [ d['field_id'] == 1 and d['contour_summary']['features'][0]['geometry']['coordinates'][0] == [1.0239199794587863, -89.93778080237439] for d in data['data']['fields'] ] )
def test_invalid_listing_name_fails(user, upload_data_token, public_candidate): # we cannot post a listing with an empty string status, data = api( 'POST', 'listing', data={ 'user_id': user.id, 'obj_id': public_candidate.id, 'list_name': '' }, token=upload_data_token, ) assert status == 400 assert 'must begin with alphanumeric/underscore' in data["message"] # we cannot post a listing with a non-alphanumeric first letter status, data = api( 'POST', 'listing', data={ 'user_id': user.id, 'obj_id': public_candidate.id, 'list_name': ' ' }, token=upload_data_token, ) assert status == 400 assert 'must begin with alphanumeric/underscore' in data["message"] # we cannot post a listing with a non-alphanumeric first letter status, data = api( 'POST', 'listing', data={ 'user_id': user.id, 'obj_id': public_candidate.id, 'list_name': '-' }, token=upload_data_token, ) assert status == 400 assert 'must begin with alphanumeric/underscore' in data["message"] # this is ok status, data = api( 'POST', 'listing', data={ 'user_id': user.id, 'obj_id': public_candidate.id, 'list_name': 'favorites', }, token=upload_data_token, ) assert status == 200 listing_id = data["data"]["id"] # we cannot post a listing with a non-alphanumeric first letter status, data = api( 'PATCH', f'listing/{listing_id}', data={ 'user_id': user.id, 'obj_id': public_candidate.id, 'list_name': '' }, token=upload_data_token, ) assert status == 400 assert 'must begin with alphanumeric/underscore' in data["message"]
def test_observation_isot(super_admin_token, view_only_token): telescope_name = str(uuid.uuid4()) status, data = api( 'POST', 'telescope', data={ 'name': telescope_name, 'nickname': telescope_name, 'lat': 0.0, 'lon': 0.0, 'elevation': 0.0, 'diameter': 10.0, }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' telescope_id = data['data']['id'] fielddatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Fields.csv' regionsdatafile = f'{os.path.dirname(__file__)}/../../../data/ZTF_Region.reg' instrument_name = str(uuid.uuid4()) status, data = api( 'POST', 'instrument', data={ 'name': instrument_name, 'type': 'imager', 'band': 'Optical', 'filters': ['ztfr'], 'telescope_id': telescope_id, 'field_data': pd.read_csv(fielddatafile)[:5].to_dict(orient='list'), 'field_region': Regions.read(regionsdatafile).serialize(format='ds9'), }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' instrument_id = data['data']['id'] # wait for the fields to populate nretries = 0 fields_loaded = False while not fields_loaded and nretries < 5: try: status, data = api( 'GET', f'instrument/{instrument_id}', token=super_admin_token ) assert status == 200 assert data['status'] == 'success' assert data['data']['band'] == 'NIR' assert len(data['data']['fields']) == 5 fields_loaded = True except AssertionError: nretries = nretries + 1 time.sleep(3) datafile = ( f'{os.path.dirname(__file__)}/../../../data/sample_observation_data_isot.csv' ) data = { 'telescopeName': telescope_name, 'instrumentName': instrument_name, 'observationData': pd.read_csv(datafile).to_dict(orient='list'), } status, data = api('POST', 'observation', data=data, token=super_admin_token) assert status == 200 assert data['status'] == 'success' params = { 'startDate': "2019-04-25 08:18:05", 'endDate': "2019-04-28 08:18:05", } # wait for the executed observations to populate nretries = 0 observations_loaded = False while not observations_loaded and nretries < 5: try: status, data = api( 'GET', 'observation', params=params, token=super_admin_token ) assert status == 200 data = data["data"] assert len(data) == 10 observations_loaded = True except AssertionError: nretries = nretries + 1 time.sleep(3) assert any( [ d['obstime'] == '2019-04-25T08:18:18' and d['observation_id'] == 94434604 for d in data['observations'] ] )
def test_add_objects_to_list(user, public_candidate, public_candidate2): token_id = create_token(ACLs=["Upload data"], user_id=user.id, name=str(uuid.uuid4())) status, data = api( 'POST', 'listing', data={ 'user_id': user.id, 'obj_id': public_candidate.id, 'list_name': 'favorites', }, token=token_id, ) assert status == 200 item1 = data["data"]["id"] # get the list item ID status, data = api( 'POST', 'listing', data={ 'user_id': user.id, 'obj_id': public_candidate2.id, 'list_name': 'favorites', }, token=token_id, ) assert status == 200 item2 = data["data"]["id"] # get the list item ID # get the list back, should include only two items status, data = api('GET', f'listing/{user.id}?listName=favorites', token=token_id) assert status == 200 new_list = data["data"] items = [item["id"] for item in new_list] assert set(items) == {item1, item2} # try to post a listing to a non-existing object fake_obj_id = str(uuid.uuid4()) status, data = api( 'POST', 'listing', data={ 'user_id': user.id, 'obj_id': fake_obj_id, 'list_name': 'favorites' }, token=token_id, ) assert status == 400
def test_filter_by_classification( driver, user, public_group, upload_data_token, taxonomy_token, classification_token, ): # Post an object with a classification source_id = str(uuid.uuid4()) status, data = api( "POST", "sources", data={ "id": source_id, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "group_ids": [public_group.id], }, token=upload_data_token, ) assert status == 200 taxonomy_name = "test taxonomy" + str(uuid.uuid4()) status, data = api( 'POST', 'taxonomy', data={ 'name': taxonomy_name, 'hierarchy': taxonomy, 'group_ids': [public_group.id], 'provenance': f"tdtax_{__version__}", 'version': __version__, 'isLatest': True, }, token=taxonomy_token, ) assert status == 200 taxonomy_id = data['data']['taxonomy_id'] status, data = api( 'POST', 'classification', data={ 'obj_id': source_id, 'classification': 'Algol', 'taxonomy_id': taxonomy_id, 'probability': 1.0, 'group_ids': [public_group.id], }, token=classification_token, ) assert status == 200 driver.get(f"/become_user/{user.id}") driver.get("/sources") # Filter for classification driver.click_xpath("//button[@data-testid='Filter Table-iconButton']") driver.click_xpath( "//div[@data-testid='classifications-select']", scroll_parent=True, ) driver.click_xpath(f"//li[@data-value='{taxonomy_name}: Algol']", scroll_parent=True) driver.click_xpath( "//div[contains(@class, 'MUIDataTableFilter-root')]//span[text()='Submit']" ) # Should see the posted source driver.wait_for_xpath(f'//a[@data-testid="{source_id}"]') # Now search for a different classification driver.click_xpath("//button[@data-testid='Filter Table-iconButton']") driver.click_xpath( "//div[@data-testid='classifications-select']", scroll_parent=True, ) driver.click_xpath(f"//li[@data-value='{taxonomy_name}: AGN']", scroll_parent=True) driver.click_xpath( "//div[contains(@class, 'MUIDataTableFilter-root')]//span[text()='Submit']" ) # Should no longer see the source driver.wait_for_xpath_to_disappear(f'//a[@data-testid="{source_id}"]')
def test_filter_list(view_only_token, public_filter): status, data = api("GET", "filters", token=view_only_token) assert status == 200 assert data["status"] == "success" assert all(k in data["data"][0] for k in ["name", "group_id", "stream_id"])
def test_hr_diagram( driver, user, public_group, upload_data_token, annotation_token, ): # Post an object with Gaia data source_id = str(uuid.uuid4()) status, data = api( "POST", "sources", data={ "id": source_id, "ra": 234.22, "dec": -22.33, "redshift": 3, "transient": False, "ra_dis": 2.3, "group_ids": [public_group.id], }, token=upload_data_token, ) assert status == 200 driver.get(f"/become_user/{user.id}") status, data = api( 'POST', f'sources/{source_id}/annotations', data={ 'obj_id': source_id, 'origin': 'cross_match1', 'data': { 'gaia': { 'Mag_G': 11.3, 'Mag_Bp': 11.8, 'Mag_Rp': 11.0, 'Plx': 20 }, }, }, token=annotation_token, ) assert status == 200 driver.get("/sources") driver.click_xpath("//button[@data-testid='Filter Table-iconButton']") obj_button = driver.wait_for_xpath("//input[@name='sourceID']") obj_button.clear() obj_button.send_keys(source_id) driver.click_xpath( "//div[contains(@class, 'MUIDataTableFilter-root')]//span[text()='Submit']" ) # find the name of the newly added source driver.wait_for_xpath(f"//a[contains(@href, '/source/{source_id}')]") # little triangle you push to expand the table driver.click_xpath("//*[@id='expandable-button']") # make sure the div containing the individual source appears driver.wait_for_xpath( f'//tr[@data-testid="groupSourceExpand_{source_id}"]') driver.wait_for_xpath(f'//div[@data-testid="hr_diagram_{source_id}"]')
def test_sources_include_detection_stats( upload_data_token, super_admin_token, public_group, public_group2, upload_data_token_two_groups, view_only_token, ): obj_id = str(uuid.uuid4()) status, data = api( "POST", "sources", data={ "id": obj_id, "ra": 234.22, "dec": -22.33, "group_ids": [public_group.id], }, token=upload_data_token, ) assert status == 200 assert data["data"]["id"] == obj_id name = str(uuid.uuid4()) status, data = api( 'POST', 'telescope', data={ 'name': name, 'nickname': name, 'lat': 0.0, 'lon': 0.0, 'elevation': 0.0, 'diameter': 10.0, }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' telescope_id = data['data']['id'] instrument_name = str(uuid.uuid4()) status, data = api( 'POST', 'instrument', data={ 'name': instrument_name, 'type': 'imager', 'band': 'NIR', 'filters': ['ztfg'], 'telescope_id': telescope_id, }, token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' instrument_id = data['data']['id'] # Some very high mjd to make this the latest point # This is not a detection though status, data = api( 'POST', 'photometry', data={ 'obj_id': obj_id, 'mjd': 99999.0, 'instrument_id': instrument_id, 'mag': None, 'magerr': None, 'limiting_mag': 22.3, 'magsys': 'ab', 'filter': 'ztfg', 'group_ids': [public_group.id], }, token=upload_data_token, ) assert status == 200 assert data['status'] == 'success' # Another high mjd, but this time a photometry point not visible to the user status, data = api( 'POST', 'photometry', data={ 'obj_id': obj_id, 'mjd': 99900.0, 'instrument_id': instrument_id, 'mag': None, 'magerr': None, 'limiting_mag': 22.3, 'magsys': 'ab', 'filter': 'ztfg', 'group_ids': [public_group2.id], }, token=upload_data_token_two_groups, ) assert status == 200 assert data['status'] == 'success' # let the phot_stats table update time.sleep(10) # A high mjd, but lower than the first point # Since this is a detection, it should be returned as "last_detected" status, data = api( 'POST', 'photometry', data={ 'obj_id': obj_id, 'mjd': 90000.0, 'instrument_id': instrument_id, 'flux': 12.24, 'fluxerr': 0.031, 'zp': 25.0, 'magsys': 'ab', 'filter': 'ztfg', 'group_ids': [public_group.id], }, token=upload_data_token, ) assert status == 200 assert data['status'] == 'success' status, data = api( 'PUT', f'sources/{obj_id}/phot_stat', token=super_admin_token, ) assert status == 200 assert data['status'] == 'success' status, data = api( "GET", "sources", params={"includeDetectionStats": "true"}, token=view_only_token, ) assert status == 200 assert data["status"] == "success" # Note: 40_587 is the MJD of UNIX time 1970-01-01 # Because arrow.get views dates as seconds since UNIX time, # s["peak_detected_at"]` is the MJD of 90000 in isodate format. # In summary: arrow.get("1970-01-01") - datetime.timedelta(40587) => # <Arrow [1858-11-17T00:00:00+00:00]> assert any([ arrow.get( Time(s["photstats"][-1]["last_detected_mjd"], format="mjd").datetime) == arrow.get( (90000.0 - 40_587) * 86400.0) for s in data["data"]["sources"] ])
def test_update_annotation_group_list( annotation_token_two_groups, public_source_two_groups, public_group2, public_group, annotation_token, ): status, data = api( 'POST', f'sources/{public_source_two_groups.id}/annotations', data={ 'origin': 'kowalski', 'data': { 'offset_from_host_galaxy': 1.5 }, 'group_ids': [public_group2.id], }, token=annotation_token_two_groups, ) assert status == 200 annotation_id = data['data']['annotation_id'] # This token belongs to public_group2 status, data = api( 'GET', f'sources/{public_source_two_groups.id}/annotations/{annotation_id}', token=annotation_token_two_groups, ) assert status == 200 assert data['data']['origin'] == 'kowalski' assert data['data']['data'] == {'offset_from_host_galaxy': 1.5} # This token does not belong to public_group2 status, data = api( 'GET', f'sources/{public_source_two_groups.id}/annotations/{annotation_id}', token=annotation_token, ) assert status == 403 # Both tokens should be able to view annotation after updating group list status, data = api( 'PUT', f'sources/{public_source_two_groups.id}/annotations/{annotation_id}', data={ 'data': { 'offset_from_host_galaxy': 1.7 }, 'group_ids': [public_group.id, public_group2.id], }, token=annotation_token_two_groups, ) assert status == 200 status, data = api( 'GET', f'sources/{public_source_two_groups.id}/annotations/{annotation_id}', token=annotation_token_two_groups, ) assert status == 200 assert data['data']['data'] == {'offset_from_host_galaxy': 1.7} status, data = api( 'GET', f'sources/{public_source_two_groups.id}/annotations/{annotation_id}', token=annotation_token, ) assert status == 200 assert data['data']['data'] == {'offset_from_host_galaxy': 1.7}
def get(endpoint, token=admin_token): response_status, data = api("GET", endpoint, token=token, host=env.host) return response_status, data
def test_token_user_post_get_thumbnail_phot_id(upload_data_token, public_group): source_id = str(uuid.uuid4()) status, data = api('POST', 'sources', data={ 'id': source_id, 'ra': 234.22, 'dec': -22.33, 'redshift': 3, 'transient': False, 'ra_dis': 2.3, 'group_ids': [public_group.id] }, token=upload_data_token) assert status == 200 assert data['data']['id'] == source_id status, data = api('POST', 'photometry', data={ 'source_id': source_id, 'observed_at': str(datetime.datetime.now()), 'time_format': 'iso', 'time_scale': 'utc', 'instrument_id': 1, 'mag': 12.24, 'e_mag': 0.031, 'lim_mag': 14.1, 'filter': 'V' }, token=upload_data_token) assert status == 200 assert data['status'] == 'success' photometry_id = data['data']['ids'][0] orig_source_thumbnail_count = len( DBSession.query(Source).filter( Source.id == source_id).first().thumbnails) data = base64.b64encode( open(os.path.abspath('skyportal/tests/data/14gqr_new.png'), 'rb').read()) ttype = 'new' status, data = api('POST', 'thumbnail', data={ 'photometry_id': photometry_id, 'data': data, 'ttype': ttype }, token=upload_data_token) assert status == 200 assert data['status'] == 'success' thumbnail_id = data['data']['id'] assert isinstance(thumbnail_id, int) status, data = api('GET', f'thumbnail/{thumbnail_id}', token=upload_data_token) assert status == 200 assert data['status'] == 'success' assert data['data']['thumbnail']['type'] == 'new' assert (DBSession.query(Thumbnail).filter( Thumbnail.id == thumbnail_id).first().source.id) == source_id assert len( DBSession.query(Source).filter(Source.id == source_id).first(). thumbnails) == orig_source_thumbnail_count + 1
def test_source_list(view_only_token): status, data = api('GET', 'sources', token=view_only_token) assert status == 200 assert data['status'] == 'success'