def test_biodiverse_3_fail(self): # Fails due to bad threshold selected homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) experiment_page = homepage.click_experiments() # First create an SDM experiment to work with: new_sdm_page = experiment_page.click_new_sdm_experiment() sdm_experiment_name = create_sdm(self, new_sdm_page) # Click on Experiments experiment_page = homepage.click_experiments() # New projection experiment new_projection_page = experiment_page.click_new_projection_experiment() projection_experiment_name = create_projection(self, sdm_experiment_name, new_projection_page) # Click on Experiments experiment_page = homepage.click_experiments() # New biodiverse experiment new_biodiverse_page = experiment_page.click_new_biodiverse_experiment() # generate a unique identifier for the projection experiment biodiverse_experiment_name = "biodiverse_" + generate_timestamp() new_biodiverse_page.enter_experiment_name(biodiverse_experiment_name) new_biodiverse_page.enter_experiment_description("This is a biodiverse experiment") # select the earlier generated projection experiment new_biodiverse_page.select_source_projection_tab() new_biodiverse_page.select_projection_experiments(projection_experiment_name) new_biodiverse_page.select_species("Phascolarctus cinereus") new_biodiverse_page.select_years("2015") new_biodiverse_page.select_layers("proj_RCP3PD_gfdl-cm20_2015_Phascolarctus.cinereus.tif") # select the configuration tab new_biodiverse_page.select_configuration_tab() new_biodiverse_page.select_threshold_value("KAPPA") new_biodiverse_page.select_cluster_size("20000") # run the experiment new_biodiverse_page.select_run() experiment_result_page = new_biodiverse_page.select_run_experiment() experiment_result_page.wait_for_experiment_to_complete() self.assertTrue(experiment_result_page.has_completed_with_failure()) # Check results self.assertTrue(experiment_result_page.has_results_header(biodiverse_experiment_name)) self.assertTrue(experiment_result_page.has_result_file("proj_RCP3PD_gfdl-cm20_2015_Phascolarctus.cinereus.tif")) self.assertTrue(experiment_result_page.has_result_file("biodiverse.plout")) self.assertTrue(experiment_result_page.has_result_file("pstats.json")) # Cleanup self.delete_experiment(sdm_experiment_name) self.delete_experiment(projection_experiment_name) self.delete_experiment(biodiverse_experiment_name)
def test_sharing_experiment(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) experiment_page = homepage.click_experiments() new_sdm_page = experiment_page.click_new_sdm_experiment() experiment_name = "sharing_test_" + generate_timestamp() new_sdm_page.enter_experiment_name(experiment_name) new_sdm_page.enter_experiment_description('Artificial Neural Network with Koala occurrences') new_sdm_page.select_configuration() new_sdm_page.select_sdm_algorithm('Artificial Neural Network') new_sdm_page.select_occurrences() new_sdm_page.select_occurrences_dataset('Koala - Mini occurrence dataset for Redland City') new_sdm_page.select_absences() new_sdm_page.select_absences_dataset('Koala - Mini absence dataset for Redland City') new_sdm_page.select_environment() new_sdm_page.select_current_climate_layers('30" (~1km)', 'Current climate layers for Redland City, 30" (~1km)') new_sdm_page.select_environmental_datasets('Current climate layers for Redland City, 30" (~1km)', 'B14 - Precipitation of Driest Month') new_sdm_page.select_run() experiment_view = new_sdm_page.select_review_start_experiment() # Wait until completion experiment_view.wait_for_experiment_to_complete() self.assertTrue(experiment_view.has_completed_successfully()) # Navigate back to experiment list experiment_page = experiment_view.click_experiments() # Check it's in the list experiments = experiment_page.get_experiment_list() self.assertTrue(experiment_name.lower() in experiments[0], "Could not find SDM experment") # Share it sharing_page = experiment_page.click_share_experiment(experiment_name) sharing_page.check_can_view("Logged-in users") sharing_page.agree_to_terms_and_conditions() sharing_page.select_share_save() # Log out logged_out_homepage = homepage.click_logout() login_page = logged_out_homepage.click_login() login_page.valid_login("testuser", "Pass.123") experiment_page = homepage.click_experiments() experiments = experiment_page.get_experiment_list() self.assertTrue(experiment_name in experiments[0]) # Log out so we can delete it logged_out_homepage = homepage.click_logout("test user") login_page = logged_out_homepage.click_login() login_page.valid_login(self.username, self.password) # Cleanup self.delete_experiment(experiment_name)
def test_upload_species_dataset(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() upload_page = datasets_page.select_dataset_upload() upload_page = upload_page.select_dataset_type("Species Dataset") upload_page.upload_file(os.getcwd() + "test.csv") upload_page.enter_dataset_title("species_dataset" + generate_timestamp()) upload_page.enter_dataset_description("bleh") upload_page.enter_scientific_name("bleh") upload_page.enter_taxon_id("bluh") upload_page.enter_common_name("bloop") upload_page.agree_to_terms_and_conditions() upload_page.submit()
def test_upload_futureclimate_layer(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() upload_page = datasets_page.select_dataset_upload() upload_page = upload_page.select_dataset_type("Future Climate Layer") upload_page.upload_file(os.getcwd() + "test.csv") upload_page.enter_dataset_title("environmental_layer" + generate_timestamp()) upload_page.enter_dataset_description("blurp") #upload_page.select_type("continuous") upload_page.select_resolution("30\" (~1km)") upload_page.enter_start_date(1, 1, 2000, "wow") upload_page.enter_end_date(2, 2, 2001, "weo") upload_page.select_emission_scenario("RCP6") upload_page.select_global_climate_model("Coupled Global Climate Model (CGCM3)") upload_page.agree_to_terms_and_conditions() upload_page.submit()
def test_ensemble_SDM(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) experiment_page = homepage.click_experiments() new_sdm_page = experiment_page.click_new_sdm_experiment() experiment_name = "ann_" + generate_timestamp() new_sdm_page.enter_experiment_name(experiment_name) new_sdm_page.enter_experiment_description('Artificial Neural Network with Koala occurrences') new_sdm_page.select_configuration() new_sdm_page.select_sdm_algorithm('Artificial Neural Network') new_sdm_page.select_occurrences() new_sdm_page.select_occurrences_dataset('Koala - Mini occurrence dataset for Redland City') new_sdm_page.select_absences() new_sdm_page.select_absences_dataset('Koala - Mini absence dataset for Redland City') new_sdm_page.select_environment() new_sdm_page.select_current_climate_layers('30" (~1km)', 'Current climate layers for Redland City, 30" (~1km)') new_sdm_page.select_environmental_datasets('Current climate layers for Redland City, 30" (~1km)', 'B14 - Precipitation of Driest Month') new_sdm_page.select_run() experiment_view = new_sdm_page.select_review_start_experiment() # Wait until completion experiment_view.wait_for_experiment_to_complete() self.assertTrue(experiment_view.has_completed_successfully()) experiments_page = experiment_view.click_experiments() new_ensemble_page = experiments_page.click_new_ensemble_experiment() new_ensemble_page.enter_experiment_name("Ensemble_" + generate_timestamp()) new_ensemble_page.enter_experiment_description("A description goes here.") new_ensemble_page.click_source_data() new_ensemble_page.select_dataset_type("sdm") # Choose the SDM from before new_ensemble_page.select_source_experiment(experiment_name) # Choose a file from that SDM experiment: new_ensemble_page.select_available_file("proj_current_Phascolarctus.cinereus.tif") new_ensemble_page.click_run() experiment_results = new_ensemble_page.click_start_experiment() experiment_results.wait_for_experiment_to_complete() self.assertTrue(experiment_results.has_completed_successfully())
def test_upload_species_trait(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() upload_page = datasets_page.select_dataset_upload() upload_page = upload_page.select_dataset_type("Species Trait") upload_page.upload_file(os.getcwd() + "test.csv") dataset_title = "species_trait_test_" + generate_timestamp() upload_page.enter_dataset_title(dataset_title) upload_page.enter_dataset_description("Description") upload_page.agree_to_terms_and_conditions() datasets_page = upload_page.submit() # Retry while datasets page hasn't loaded. datasets = datasets_page.get_dataset_list() while (len(datasets) == 0): datasets = datasets_page.get_dataset_list() self.assertTrue(dataset_title in datasets)
def test_import_ALA(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() # Get the number of datasets name_list = datasets_page.get_dataset_list() number_of_datasets = len(name_list) # import koala datasets_discover_page = datasets_page.select_dataset_discover() datasets_discover_page.enter_find_species("koala") datasets_discover_page.click_species() # We get redirected back to datasets list page here datasets_page = datasets_discover_page.click_download_species() # Try generate the list of names name_list = datasets_page.get_dataset_list() # The first one should be koala # Make sure we can find koala in the first one' self.assertNotEqual(name_list[0].find("koala"), -1, "Could not find koala dataset") # Check we have one more dataset than before. if number_of_datasets == 20: # The page only shows 100, so check it's 100 still self.assertEqual(len(name_list), 20, "Mismatch number of datasets") else: self.assertEqual(len(name_list), number_of_datasets + 1, "Mismatch number of datasets") # Wait until the first one doesn't have a spinner anymore datasets_page.wait_while_spinner(0) # Refresh the page datasets_page.driver.refresh() # See if the first one still has controls self.assertFalse(datasets_page.check_spinner(0), "Spinner still found when it shouldn't have been!") self.assertTrue(datasets_page.check_controls_exist(0), "Dataset controls not found for this dataset entry")
def test_sharing_dataset(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() datasets_discover_page = datasets_page.select_dataset_discover() datasets_discover_page.enter_find_species("pig") datasets_discover_page.click_species() # We get redirected back to datasets list page here datasets_page = datasets_discover_page.click_download_species() # Try generate the list of names name_list = datasets_page.get_dataset_list() # The first one should be pig # Make sure we can find pig in the first one' self.assertNotEqual(name_list[0].find("pig"), -1, "Could not find pig dataset") # Wait until the first one doesn't have a spinner anymore datasets_page.wait_while_spinner(0) # Refresh the page datasets_page.driver.refresh() # See if the first one still has controls self.assertFalse(datasets_page.check_spinner(0), "Spinner still found when it shouldn't have been!") self.assertTrue(datasets_page.check_controls_exist(0), "Dataset controls not found for this dataset entry") # click on share. sharing_page = datasets_page.click_share_dataset("pig") sharing_page.check_can_view("Logged-in users") sharing_page.agree_to_terms_and_conditions() datasets_page = sharing_page.select_share_save() logged_in_homepage = datasets_page.click_homepage() # Log out logged_out_homepage = logged_in_homepage.click_logout("admin") login_page = logged_out_homepage.click_login() homepage = login_page.valid_login("testuser", "Pass.123") datasets_page = homepage.click_datasets() datasets = datasets_page.get_dataset_list() self.assertTrue("pig" in datasets[0].lower()) # ************************************# # At this point, we log out of testuser, back into admin # to add a different ALA set to make sure the test # passed not because of a coincidence # ************************************# logged_out_homepage = homepage.click_logout("test user") login_page = logged_out_homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() datasets_discover_page = datasets_page.select_dataset_discover() datasets_discover_page.enter_find_species("platypus") datasets_discover_page.click_species() # We get redirected back to datasets list page here datasets_page = datasets_discover_page.click_download_species() # Try generate the list of names name_list = datasets_page.get_dataset_list() # The first one should be pig # Make sure we can find pig in the first one' self.assertNotEqual(name_list[0].find("platypus"), -1, "Could not find platypus dataset") # Wait until the first one doesn't have a spinner anymore datasets_page.wait_while_spinner(0) # Refresh the page datasets_page.driver.refresh() # See if the first one still has controls self.assertFalse(datasets_page.check_spinner(0), "Spinner still found when it shouldn't have been!") self.assertTrue(datasets_page.check_controls_exist(0), "Dataset controls not found for this dataset entry") # click on share. sharing_page = datasets_page.click_share_dataset("platypus") sharing_page.check_can_view("Logged-in users") sharing_page.agree_to_terms_and_conditions() sharing_page.select_share_save() # Log out logged_out_homepage = homepage.click_logout() login_page = logged_out_homepage.click_login() homepage = login_page.valid_login("testuser", "Pass.123") datasets_page = homepage.click_datasets() datasets = datasets_page.get_dataset_list() self.assertTrue("platypus" in datasets[0].lower())
def test_unsharing_dataset(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() datasets_discover_page = datasets_page.select_dataset_discover() datasets_discover_page.enter_find_species("pig") datasets_discover_page.click_species() # We get redirected back to datasets list page here datasets_page = datasets_discover_page.click_download_species() # Try generate the list of names name_list = datasets_page.get_dataset_list() # The first one should be pig # Make sure we can find pig in the first one' self.assertNotEqual(name_list[0].find("pig"), -1, "Could not find pig dataset") # Wait until the first one doesn't have a spinner anymore datasets_page.wait_while_spinner(0) # Refresh the page datasets_page.driver.refresh() # See if the first one still has controls self.assertFalse(datasets_page.check_spinner(0), "Spinner still found when it shouldn't have been!") self.assertTrue(datasets_page.check_controls_exist(0), "Dataset controls not found for this dataset entry") # click on share. sharing_page = datasets_page.click_share_dataset("pig") sharing_page.check_can_view("Logged-in users") sharing_page.agree_to_terms_and_conditions() sharing_page.select_share_save() # Do this again with a different dataset, so that when we unshare one, we know # which one should be at the top. datasets_discover_page = datasets_page.select_dataset_discover() datasets_discover_page.enter_find_species("rat") datasets_discover_page.click_species() # We get redirected back to datasets list page here datasets_page = datasets_discover_page.click_download_species() # Try generate the list of names name_list = datasets_page.get_dataset_list() # The first one should be pig # Make sure we can find pig in the first one' self.assertNotEqual(name_list[0].find("rat"), -1, "Could not find rat dataset") # Wait until the first one doesn't have a spinner anymore datasets_page.wait_while_spinner(0) # Refresh the page datasets_page.driver.refresh() # See if the first one still has controls self.assertFalse(datasets_page.check_spinner(0), "Spinner still found when it shouldn't have been!") self.assertTrue(datasets_page.check_controls_exist(0), "Dataset controls not found for this dataset entry") # click on share. sharing_page = datasets_page.click_share_dataset("rat") sharing_page.check_can_view("Logged-in users") sharing_page.agree_to_terms_and_conditions() datasets_list = sharing_page.select_share_save() logged_in_homepage = datasets_list.click_homepage() # Log out logged_out_homepage = logged_in_homepage.click_logout() login_page = logged_out_homepage.click_login() homepage = login_page.valid_login("testuser", "Pass.123") datasets_page = homepage.click_datasets() datasets = datasets_page.get_dataset_list() self.assertNotEqual("rat" in datasets[0].lower(), -1, "Wrong dataset in dataset list") # ************************************# # At this point, we log back into admin to unshare pig # ************************************# logged_out_homepage = homepage.click_logout("test user") login_page = logged_out_homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() # Toggle rat. (i.e. unshare) sharing_page = datasets_page.click_share_dataset("rat") sharing_page.check_can_view("Logged-in users") sharing_page.agree_to_terms_and_conditions() datasets_list = sharing_page.select_share_save() logged_in_homepage = datasets_list.click_homepage() logged_out_homepage = logged_in_homepage.click_logout() login_page = logged_out_homepage.click_login() homepage = login_page.valid_login("testuser", "Pass.123") datasets_page = homepage.click_datasets() datasets = datasets_page.get_dataset_list() self.assertNotEqual("pig" in datasets[0].lower(), -1, "Wrong dataset in the dataset list")
def test_app_login(self): homepage = Homepage(self.driver) self.assertEqual("BCCVL Home", homepage.title) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password)
def test_knowledgebase_page(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) knowledge_base_page = homepage.click_knowledge_base() self.assertEqual("BCCVL Knowledge Base", knowledge_base_page.title)
def test_experiments_page(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) experiments_page = homepage.click_experiments() self.assertEqual("BCCVL Experiment List", experiments_page.title)
def test_datasets_page(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() self.assertEqual("BCCVL Datasets", datasets_page.title)