def test_11_get_cleaned_data(self, client): url=url_builder('/api/mapping', self.files_dir, "homicide_report_total_and_sex.xlsx", "table-1a", "t2wml/table-1a.yaml") response=client.get(url) data = response.data.decode("utf-8") data = json.loads(data) cleaned_entries = data['layers']['cleaned']['entries'] assert len(cleaned_entries)== 3 assert cleaned_entries[1] == {'cleaned': '200', 'indices': [[6,3]], 'original': '4'}
def test_12_change_sheet(self, client): #GET /api/data/{project_folder}/<sheet_name> url=url_builder('/api/table', project_folder, self.data_file, "Sheet4") response=client.get(url) data = response.data.decode("utf-8") data = get_data(data) data.pop('project', None) self.results_dict['change_sheet']=data self.compare_jsons(data, 'change_sheet')
def test_09_get_cell(self, client): #GET '/api/data/{project_folder}/cell/<col>/<row>' url = '/api/data/cell/{col}/{row}?project_folder={project_folder}'.format( project_folder=project_folder, col="G", row=4) response = client.get(url) data = response.data.decode("utf-8") data = json.loads(data) self.results_dict['get_cell'] = data self.compare_jsons(data, 'get_cell')
def test_11_get_download(self, client): #GET '/api/project/{project_folder}/download/<filetype>' url=url_builder(f'/api/project/download/tsv', project_folder, self.data_file, self.sheet_name, self.yaml_file) response=client.get(url) data = response.data.decode("utf-8") data = json.loads(data) data= data["data"] with open(os.path.join(self.files_dir, "download.tsv"), 'r') as f: expected=f.read() assert expected==data
def test_11_get_loaded_yaml_files(self, client): url=url_builder('/api/table', self.files_dir, "dataset.xlsx", "Sheet3", "test.yaml") response=client.get(url) data = response.data.decode("utf-8") data = get_data(data) data.pop('project', None) self.results_dict['load_from_path']=data #with open(self.expected_results_path, 'w') as f: # json.dump(self.results_dict, f, sort_keys=False, indent=4) self.compare_jsons(data, 'load_from_path')
def test_11_get_download(self, client): #GET '/api/project/{project_folder}/download/<filetype>' url = '/api/project/download/{filetype}?project_folder={project_folder}'.format( project_folder=project_folder, filetype="tsv") response = client.get(url) data = response.data.decode("utf-8") data = json.loads(data) data = data["data"] with open(os.path.join(self.files_dir, "download.tsv"), 'r') as f: expected = f.read() assert expected == data
def test_12_change_sheet(self, client): #GET /api/data/{project_folder}/<sheet_name> url = '/api/data/{sheet_name}?project_folder={project_folder}'.format( project_folder=project_folder, sheet_name="Sheet4") response = client.get(url) data = response.data.decode("utf-8") data = json.loads(data) self.results_dict['change_sheet'] = data data['tableData'].pop('filename', None) self.expected_results_dict['change_sheet']['tableData'].pop( 'filename', None) self.compare_jsons(data, 'change_sheet')
def test_switching_back_to_sheets(client): #the bug is described in issue 156 files_dir=os.path.join(os.path.dirname(__file__), "files_for_tests", "homicide") path=create_project(client) load_data_file(client, path, os.path.join(files_dir, "homicide_report_total_and_sex.xlsx")) #load yaml response=load_yaml_file(client, path, filename=os.path.join(files_dir, "t2wml", "table-1a.yaml")) data = response.data.decode("utf-8") yaml_1_data = json.loads(data)["yamlRegions"] #switch tab url='/api/data/{sheet_name}?project_folder={path}'.format(path=path,sheet_name="table-1b") response=client.get(url) #load new yaml response=load_yaml_file(client, path, filename=os.path.join(files_dir, "t2wml", "table-1b.yaml")) data = response.data.decode("utf-8") yaml_2_data = json.loads(data)["yamlRegions"] #switch back to previous tab url='/api/data/{sheet_name}?project_folder={path}'.format(path=path,sheet_name="table-1a") response=client.get(url) data = response.data.decode("utf-8") switch_back_data = json.loads(data)["yamlData"]["yamlRegions"] #some of the results are sent back as unordered lists and need to be compared separately set_keys=sanitize_highlight_region(yaml_1_data, switch_back_data) for key in set_keys: yaml_2_data.pop(key, None) assert yaml_1_data!=yaml_2_data assert yaml_1_data==switch_back_data
def test_01a_clear_annotation_settings(self, client): #get old settings: url='/api/project/globalsettings' response=client.get(url) data = response.data.decode("utf-8") data = get_data(data) global datamart_integration_switch datamart_integration_switch=data["datamart_integration"] #set new settings url='/api/project/globalsettings' response=client.put(url, json=dict( datamartIntegration=False ))
def test_datamart_integration(client): #create project: project_folder = create_project(client) #get old settings: url = '/api/project/globalsettings' response = client.get(url) data = response.data.decode("utf-8") data = json.loads(data) old_global_settings = data #set new settings url = '/api/project/globalsettings' endpoint = 'https://*****:*****@dsbox02.isi.edu:8888/datamart-api-wm' response = client.put(url, json=dict(datamartApi=endpoint, datamartIntegration=True)) #upload data file filename = os.path.join(os.path.dirname(__file__), "files_for_tests", "region.xlsx") response = load_data_file(client, project_folder, filename) data = response.data.decode("utf-8") data = json.loads(data) #reset to old settings: url = '/api/project/globalsettings' endpoint = 'https://*****:*****@dsbox02.isi.edu:8888/datamart-api' #'https://*****:*****@dsbox02.isi.edu:8888/datamart-api-wm' response = client.put( url, json=dict( datamartApi=old_global_settings["datamart_api"], datamartIntegration=old_global_settings["datamart_integration"])) #check validity #with open (os.path.join(os.path.dirname(__file__), "files_for_tests", "datamart_results.json"), 'w') as f: # json.dump(data["layers"], f, sort_keys=False, indent=4) with open( os.path.join(os.path.dirname(__file__), "files_for_tests", "datamart_results.json"), 'r') as f: expected_layers = json.load(f) assert data["layers"] == expected_layers
def test_11_get_loaded_yaml_files(self, client): url = '/api/project?project_folder={path}'.format(path=self.files_dir) response = client.get(url) data = response.data.decode("utf-8") data = json.loads(data) data.pop('project') self.results_dict['load_from_path'] = data #some of the results are sent back as unordered lists and need to be compared separately set_keys = [] dict_1 = data["yamlData"]["yamlRegions"] dict_2 = self.expected_results_dict["load_from_path"]["yamlData"][ "yamlRegions"] sanitize_highlight_region(dict_1, dict_2) data['tableData'].pop('filename', None) self.expected_results_dict['load_from_path']['tableData'].pop( 'filename', None) self.compare_jsons(data, 'load_from_path')
def test_14_settings(self, client): from t2wml.settings import t2wml_settings #PUT '/api/project/{project_folder}/settings' url = '/api/project/settings?project_folder={project_folder}'.format( project_folder=project_folder) endpoint = 'https://query.wikidata.org/bigdata/namespace/wdq/sparql' response = client.put(url, data=dict(endpoint=endpoint, warnEmpty=False)) assert t2wml_settings.wikidata_provider.sparql_endpoint == endpoint #GET '/api/project/{project_folder}/settings' url = '/api/project/settings?project_folder={project_folder}'.format( project_folder=project_folder) response = client.get(url) data = response.data.decode("utf-8") data = json.loads(data) assert data[ "endpoint"] == 'https://query.wikidata.org/bigdata/namespace/wdq/sparql' assert data["warnEmpty"] == False
def test_02_get_project(self, client): url='/api/project?project_folder={project_folder}'.format(project_folder=project_folder) response=client.get(url) data = response.data.decode("utf-8") data = get_data(data) assert data["project"]["directory"]==project_folder