def test_upload_species_dataset(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() upload_page = datasets_page.select_dataset_upload() upload_page = upload_page.select_dataset_type("Species Dataset") upload_page.upload_file(os.getcwd() + "test.csv") upload_page.enter_dataset_title("species_dataset" + generate_timestamp()) upload_page.enter_dataset_description("bleh") upload_page.enter_scientific_name("bleh") upload_page.enter_taxon_id("bluh") upload_page.enter_common_name("bloop") upload_page.agree_to_terms_and_conditions() upload_page.submit()
def test_upload_futureclimate_layer(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() upload_page = datasets_page.select_dataset_upload() upload_page = upload_page.select_dataset_type("Future Climate Layer") upload_page.upload_file(os.getcwd() + "test.csv") upload_page.enter_dataset_title("environmental_layer" + generate_timestamp()) upload_page.enter_dataset_description("blurp") #upload_page.select_type("continuous") upload_page.select_resolution("30\" (~1km)") upload_page.enter_start_date(1, 1, 2000, "wow") upload_page.enter_end_date(2, 2, 2001, "weo") upload_page.select_emission_scenario("RCP6") upload_page.select_global_climate_model("Coupled Global Climate Model (CGCM3)") upload_page.agree_to_terms_and_conditions() upload_page.submit()
def test_upload_species_trait(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() upload_page = datasets_page.select_dataset_upload() upload_page = upload_page.select_dataset_type("Species Trait") upload_page.upload_file(os.getcwd() + "test.csv") dataset_title = "species_trait_test_" + generate_timestamp() upload_page.enter_dataset_title(dataset_title) upload_page.enter_dataset_description("Description") upload_page.agree_to_terms_and_conditions() datasets_page = upload_page.submit() # Retry while datasets page hasn't loaded. datasets = datasets_page.get_dataset_list() while (len(datasets) == 0): datasets = datasets_page.get_dataset_list() self.assertTrue(dataset_title in datasets)
def test_import_ALA(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() # Get the number of datasets name_list = datasets_page.get_dataset_list() number_of_datasets = len(name_list) # import koala datasets_discover_page = datasets_page.select_dataset_discover() datasets_discover_page.enter_find_species("koala") datasets_discover_page.click_species() # We get redirected back to datasets list page here datasets_page = datasets_discover_page.click_download_species() # Try generate the list of names name_list = datasets_page.get_dataset_list() # The first one should be koala # Make sure we can find koala in the first one' self.assertNotEqual(name_list[0].find("koala"), -1, "Could not find koala dataset") # Check we have one more dataset than before. if number_of_datasets == 20: # The page only shows 100, so check it's 100 still self.assertEqual(len(name_list), 20, "Mismatch number of datasets") else: self.assertEqual(len(name_list), number_of_datasets + 1, "Mismatch number of datasets") # Wait until the first one doesn't have a spinner anymore datasets_page.wait_while_spinner(0) # Refresh the page datasets_page.driver.refresh() # See if the first one still has controls self.assertFalse(datasets_page.check_spinner(0), "Spinner still found when it shouldn't have been!") self.assertTrue(datasets_page.check_controls_exist(0), "Dataset controls not found for this dataset entry")
def test_sharing_dataset(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() datasets_discover_page = datasets_page.select_dataset_discover() datasets_discover_page.enter_find_species("pig") datasets_discover_page.click_species() # We get redirected back to datasets list page here datasets_page = datasets_discover_page.click_download_species() # Try generate the list of names name_list = datasets_page.get_dataset_list() # The first one should be pig # Make sure we can find pig in the first one' self.assertNotEqual(name_list[0].find("pig"), -1, "Could not find pig dataset") # Wait until the first one doesn't have a spinner anymore datasets_page.wait_while_spinner(0) # Refresh the page datasets_page.driver.refresh() # See if the first one still has controls self.assertFalse(datasets_page.check_spinner(0), "Spinner still found when it shouldn't have been!") self.assertTrue(datasets_page.check_controls_exist(0), "Dataset controls not found for this dataset entry") # click on share. sharing_page = datasets_page.click_share_dataset("pig") sharing_page.check_can_view("Logged-in users") sharing_page.agree_to_terms_and_conditions() datasets_page = sharing_page.select_share_save() logged_in_homepage = datasets_page.click_homepage() # Log out logged_out_homepage = logged_in_homepage.click_logout("admin") login_page = logged_out_homepage.click_login() homepage = login_page.valid_login("testuser", "Pass.123") datasets_page = homepage.click_datasets() datasets = datasets_page.get_dataset_list() self.assertTrue("pig" in datasets[0].lower()) # ************************************# # At this point, we log out of testuser, back into admin # to add a different ALA set to make sure the test # passed not because of a coincidence # ************************************# logged_out_homepage = homepage.click_logout("test user") login_page = logged_out_homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() datasets_discover_page = datasets_page.select_dataset_discover() datasets_discover_page.enter_find_species("platypus") datasets_discover_page.click_species() # We get redirected back to datasets list page here datasets_page = datasets_discover_page.click_download_species() # Try generate the list of names name_list = datasets_page.get_dataset_list() # The first one should be pig # Make sure we can find pig in the first one' self.assertNotEqual(name_list[0].find("platypus"), -1, "Could not find platypus dataset") # Wait until the first one doesn't have a spinner anymore datasets_page.wait_while_spinner(0) # Refresh the page datasets_page.driver.refresh() # See if the first one still has controls self.assertFalse(datasets_page.check_spinner(0), "Spinner still found when it shouldn't have been!") self.assertTrue(datasets_page.check_controls_exist(0), "Dataset controls not found for this dataset entry") # click on share. sharing_page = datasets_page.click_share_dataset("platypus") sharing_page.check_can_view("Logged-in users") sharing_page.agree_to_terms_and_conditions() sharing_page.select_share_save() # Log out logged_out_homepage = homepage.click_logout() login_page = logged_out_homepage.click_login() homepage = login_page.valid_login("testuser", "Pass.123") datasets_page = homepage.click_datasets() datasets = datasets_page.get_dataset_list() self.assertTrue("platypus" in datasets[0].lower())
def test_unsharing_dataset(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() datasets_discover_page = datasets_page.select_dataset_discover() datasets_discover_page.enter_find_species("pig") datasets_discover_page.click_species() # We get redirected back to datasets list page here datasets_page = datasets_discover_page.click_download_species() # Try generate the list of names name_list = datasets_page.get_dataset_list() # The first one should be pig # Make sure we can find pig in the first one' self.assertNotEqual(name_list[0].find("pig"), -1, "Could not find pig dataset") # Wait until the first one doesn't have a spinner anymore datasets_page.wait_while_spinner(0) # Refresh the page datasets_page.driver.refresh() # See if the first one still has controls self.assertFalse(datasets_page.check_spinner(0), "Spinner still found when it shouldn't have been!") self.assertTrue(datasets_page.check_controls_exist(0), "Dataset controls not found for this dataset entry") # click on share. sharing_page = datasets_page.click_share_dataset("pig") sharing_page.check_can_view("Logged-in users") sharing_page.agree_to_terms_and_conditions() sharing_page.select_share_save() # Do this again with a different dataset, so that when we unshare one, we know # which one should be at the top. datasets_discover_page = datasets_page.select_dataset_discover() datasets_discover_page.enter_find_species("rat") datasets_discover_page.click_species() # We get redirected back to datasets list page here datasets_page = datasets_discover_page.click_download_species() # Try generate the list of names name_list = datasets_page.get_dataset_list() # The first one should be pig # Make sure we can find pig in the first one' self.assertNotEqual(name_list[0].find("rat"), -1, "Could not find rat dataset") # Wait until the first one doesn't have a spinner anymore datasets_page.wait_while_spinner(0) # Refresh the page datasets_page.driver.refresh() # See if the first one still has controls self.assertFalse(datasets_page.check_spinner(0), "Spinner still found when it shouldn't have been!") self.assertTrue(datasets_page.check_controls_exist(0), "Dataset controls not found for this dataset entry") # click on share. sharing_page = datasets_page.click_share_dataset("rat") sharing_page.check_can_view("Logged-in users") sharing_page.agree_to_terms_and_conditions() datasets_list = sharing_page.select_share_save() logged_in_homepage = datasets_list.click_homepage() # Log out logged_out_homepage = logged_in_homepage.click_logout() login_page = logged_out_homepage.click_login() homepage = login_page.valid_login("testuser", "Pass.123") datasets_page = homepage.click_datasets() datasets = datasets_page.get_dataset_list() self.assertNotEqual("rat" in datasets[0].lower(), -1, "Wrong dataset in dataset list") # ************************************# # At this point, we log back into admin to unshare pig # ************************************# logged_out_homepage = homepage.click_logout("test user") login_page = logged_out_homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() # Toggle rat. (i.e. unshare) sharing_page = datasets_page.click_share_dataset("rat") sharing_page.check_can_view("Logged-in users") sharing_page.agree_to_terms_and_conditions() datasets_list = sharing_page.select_share_save() logged_in_homepage = datasets_list.click_homepage() logged_out_homepage = logged_in_homepage.click_logout() login_page = logged_out_homepage.click_login() homepage = login_page.valid_login("testuser", "Pass.123") datasets_page = homepage.click_datasets() datasets = datasets_page.get_dataset_list() self.assertNotEqual("pig" in datasets[0].lower(), -1, "Wrong dataset in the dataset list")
def test_datasets_page(self): homepage = Homepage(self.driver) login_page = homepage.click_login() homepage = login_page.valid_login(self.username, self.password) datasets_page = homepage.click_datasets() self.assertEqual("BCCVL Datasets", datasets_page.title)