def test_create_a_measure_as_editor(driver, live_server, government_departments, frequencies_of_release): rdu_user = UserFactory(user_type=TypeOfUser.RDU_USER, active=True) admin_user = UserFactory(user_type=TypeOfUser.ADMIN_USER, active=True) approved_measure_version = MeasureVersionFactory( status="APPROVED", data_sources__publisher=random.choice(government_departments), data_sources__frequency_of_release=random.choice( frequencies_of_release), ) sample_measure_version = MeasureVersionFactory.build( version="1.1", data_sources=[], update_corrects_data_mistake=True) sample_data_source = DataSourceFactory.build( publisher__name=random.choice(government_departments).name, frequency_of_release__description=random.choice( frequencies_of_release).description, ) # GIVEN a setup with Topic and Subtopic driver_login(driver, live_server, rdu_user) navigate_to_topic_page(driver, live_server, approved_measure_version.measure.subtopic.topic) # WHEN an editor creates and saves a new measure page measure_edit_page, page = create_measure_starting_at_topic_page( driver, live_server, approved_measure_version.measure.subtopic.topic, approved_measure_version.measure.subtopic, sample_measure_version, sample_data_source, ) # THEN the status should be draft assert measure_edit_page.is_current() assert measure_edit_page.get_status() == EXPECTED_STATUSES["draft"] measure_edit_page.click_save_and_send_to_review() # THEN the status should be internal review assert measure_edit_page.is_current() assert measure_edit_page.get_status( ) == EXPECTED_STATUSES["internal_review"] # WHEN we send page to department review measure_edit_page.click_department_review() # THEN the status should be department review driver.implicitly_wait(2) assert measure_edit_page.is_current() assert measure_edit_page.get_status( ) == EXPECTED_STATUSES["department_review"] # AND the approve button should not be on page assert measure_edit_page.approved_is_visible() is False # GIVEN the department link review_link = measure_edit_page.get_review_link() # WHEN we log out and go to the review link measure_edit_page.log_out() driver.get(review_link) # THEN the preview page ought to have content assert page.title in driver.page_source # GIVEN the admin user driver_login(driver, live_server, admin_user) # WHEN we go to the edit page navigate_to_edit_page( driver, live_server, approved_measure_version.measure.subtopic.topic, approved_measure_version.measure.subtopic, page, ) # THEN the approve button is visible assert measure_edit_page.approved_is_visible() is True # WHEN the admin user clicks approve measure_edit_page.click_approved() # THEN the status should be published assert measure_edit_page.get_status() == EXPECTED_STATUSES["published"] # WHEN we create a minor update measure_edit_page.click_update() measure_create_version_page = MeasureCreateVersionPage(driver) measure_create_version_page.click_minor_update() measure_create_version_page.click_create() # THEN we are on the 1.1 measure version edit page assert driver.current_url.endswith("/1.1/edit") # WHEN we try to submit the minor update immediately measure_edit_page.click_save_and_send_to_review() # THEN we get validation errors (corrections radio+edit summary) assert "There is a problem" in driver.page_source # WHEN we fill in the required data measure_edit_page.fill_measure_page_minor_edit_fields( sample_measure_version) # THEN we can publish the new version. measure_edit_page.click_save_and_send_to_review() measure_edit_page.click_department_review() measure_edit_page.click_approved() # THEN the status should be published assert measure_edit_page.get_status() == EXPECTED_STATUSES["published"] # WHEN we create a major update measure_edit_page.click_update() measure_create_version_page = MeasureCreateVersionPage(driver) measure_create_version_page.click_major_update() measure_create_version_page.click_create() # THEN we are on the 2.0 measure version edit page assert driver.current_url.endswith("/2.0/edit") # WHEN we try to submit the major update immediately measure_edit_page.click_save_and_send_to_review() # THEN we get validation errors (edit summary) assert "There is a problem" in driver.page_source # WHEN we add an upload file measure_edit_page.click_add_source_data() add_source_data_page = AddSourceDataPage(driver) add_source_data_page.fill_source_data_page( sample_measure_version.uploads[0]) add_source_data_page.click_save() # AND provide an edit summary measure_edit_page.fill_measure_page_major_edit_fields( sample_measure_version) measure_edit_page.click_save() # AND add a new primary data source add_primary_data_source_to_measure(driver, sample_data_source) # AND approve the major edit measure_edit_page.click_save_and_send_to_review() measure_edit_page.click_department_review() measure_edit_page.click_approved() # THEN the status should be published assert measure_edit_page.get_status() == EXPECTED_STATUSES["published"] measure_edit_page.log_out()
def test_can_reject_a_measure_in_review_as_editor(driver, live_server, government_departments, frequencies_of_release): rdu_user = UserFactory(user_type=TypeOfUser.RDU_USER, active=True) approved_measure_version = MeasureVersionFactory( status="APPROVED", data_sources__publisher=random.choice(government_departments), data_sources__frequency_of_release=random.choice( frequencies_of_release), ) sample_measure_version = MeasureVersionFactory.build(data_sources=[]) sample_data_source = DataSourceFactory.build( publisher__name=random.choice(government_departments).name, frequency_of_release__description=random.choice( frequencies_of_release).description, ) # GIVEN a setup with Topic and Subtopic driver_login(driver, live_server, rdu_user) navigate_to_topic_page(driver, live_server, approved_measure_version.measure.subtopic.topic) # WHEN an editor creates and saves a new measure page measure_edit_page, page = create_measure_starting_at_topic_page( driver, live_server, approved_measure_version.measure.subtopic.topic, approved_measure_version.measure.subtopic, sample_measure_version, sample_data_source, ) # THEN the status should be draft assert measure_edit_page.is_current() assert measure_edit_page.get_status() == EXPECTED_STATUSES["draft"] # WHEN we save and send it to internal review measure_edit_page.click_save_and_send_to_review() # THEN the status should be internal review assert measure_edit_page.get_status( ) == EXPECTED_STATUSES["internal_review"] # WHEN we reject the page measure_edit_page.click_reject() # THEN the status should be rejected assert measure_edit_page.get_status() == EXPECTED_STATUSES["rejected"] # WHEN we send it back to a draft measure_edit_page.click_send_back_to_draft() # THEN the status should be draft driver.implicitly_wait(2) assert measure_edit_page.is_current() assert measure_edit_page.get_status() == EXPECTED_STATUSES["draft"] # WHEN we save and send it for department review measure_edit_page.click_save_and_send_to_review() measure_edit_page.click_department_review() # THEN the status should be department review driver.implicitly_wait(2) assert measure_edit_page.is_current() assert measure_edit_page.get_status( ) == EXPECTED_STATUSES["department_review"] # WHEN we reject the measure again measure_edit_page.click_reject() # THEN the status should be rejected driver.implicitly_wait(2) assert measure_edit_page.is_current() assert measure_edit_page.get_status() == EXPECTED_STATUSES["rejected"] measure_edit_page.log_out()
def test_view_edit_measure_page(test_app_client, logged_in_rdu_user, stub_measure_data): data_source = DataSourceFactory.build( title="DWP Stats", type_of_data=["SURVEY"], source_url="http://dwp.gov.uk", publication_date="15th May 2017", note_on_corrections_or_updates="Note on corrections or updates", purpose="Purpose of data source", ) measure_version = MeasureVersionFactory(status="DRAFT", data_sources=[data_source], **stub_measure_data) response = test_app_client.get( url_for( "cms.edit_measure_version", topic_slug=measure_version.measure.subtopic.topic.slug, subtopic_slug=measure_version.measure.subtopic.slug, measure_slug=measure_version.measure.slug, version=measure_version.version, )) assert response.status_code == 200 page = BeautifulSoup(response.data.decode("utf-8"), "html.parser") assert page.h1.text.strip() == "Edit page" title = page.find("input", attrs={"id": "title"}) assert title assert title.attrs.get("value") == "Test Measure Page" subtopic = page.find("select", attrs={"id": "subtopic"}) assert subtopic assert int(subtopic.find("option", selected=True).attrs.get( "value")) == measure_version.measure.subtopic.id time_covered = page.find("input", attrs={"id": "time_covered"}) assert time_covered assert time_covered.attrs.get("value") == "4 months" assert len(page.find_all("input", class_="country")) == 5 # TODO lowest level of geography methodology_label = page.find("label", attrs={"for": "methodology"}) methodology = page.find("textarea", attrs={"id": "methodology"}) assert methodology_label.text.strip() == "Methodology" assert methodology.text == "how we measure unemployment" suppression_and_disclosure_label = page.find( "label", attrs={"for": "suppression_and_disclosure"}) suppression_and_disclosure = page.find( "textarea", attrs={"id": "suppression_and_disclosure"}) assert suppression_and_disclosure_label.text.strip( ) == "Suppression rules and disclosure control (optional)" assert suppression_and_disclosure.text == "Suppression rules and disclosure control" rounding_label = page.find("label", attrs={"for": "estimation"}) rounding = page.find("textarea", attrs={"id": "estimation"}) assert rounding_label.text.strip() == "Rounding (optional)" assert rounding.text == "X people are unemployed" # TODO publisher/dept source # TODO frequency of release summary = page.find("textarea", attrs={"id": "summary"}) assert summary assert summary.text == stub_measure_data["summary"] need_to_know = page.find("textarea", attrs={"id": "need_to_know"}) assert need_to_know assert need_to_know.text == "Need to know this" measure_summary = page.find("textarea", attrs={"id": "measure_summary"}) assert measure_summary assert measure_summary.text == "Unemployment measure summary" ethnicity_definition_summary = page.find( "textarea", attrs={"id": "ethnicity_definition_summary"}) assert ethnicity_definition_summary assert ethnicity_definition_summary.text == "This is a summary of ethnicity definitions" methodology = page.find("textarea", attrs={"id": "methodology"}) assert methodology assert methodology.text == "how we measure unemployment" estimation = page.find("textarea", attrs={"id": "estimation"}) assert estimation assert estimation.text == "X people are unemployed" related_publications = page.find("textarea", attrs={"id": "related_publications"}) assert related_publications assert related_publications.text == "Related publications" qmi_url = page.find("input", attrs={"id": "qmi_url"}) assert qmi_url assert qmi_url.attrs.get("value") == "http://www.quality-street.gov.uk" further_technical_information = page.find( "textarea", attrs={"id": "further_technical_information"}) assert further_technical_information assert further_technical_information.text == "Further technical information"