def test_websites_content_publish_sorting(drf_client, global_admin_user, published): # pylint: disable=unused-argument """should be able to filter to just published or not""" drf_client.force_login(global_admin_user) website = WebsiteFactory.create(published=True) unpublished = WebsiteContentFactory.create_batch( 3, website=website, # they were created after the publish date created_on=website.publish_date + datetime.timedelta(days=2), ) published = WebsiteContentFactory.create_batch( 3, website=website, ) for content in published: content.created_on = website.publish_date - datetime.timedelta(days=2) content.save() api_url = reverse( "websites_content_api-list", kwargs={ "parent_lookup_website": website.name, }, ) resp = drf_client.get(api_url, {"published": published}) content = published if published else unpublished expected_ids = sorted([c.text_id for c in content]) assert resp.data["count"] == 3 assert expected_ids == sorted([c["text_id"] for c in resp.data["results"]])
def test_websites_content_list_multiple_type(drf_client, global_admin_user): """The list view of WebsiteContent should be able to filter by multiple type values""" drf_client.force_login(global_admin_user) website = WebsiteFactory.create() WebsiteContentFactory.create_batch( 3, website=website, type=factory.Iterator(["page", "resource", "other"]), ) api_url = reverse( "websites_content_api-list", kwargs={ "parent_lookup_website": website.name, }, ) resp = drf_client.get( api_url, { "type[0]": "page", "type[1]": "resource" }, ) assert resp.data["count"] == 2 results = resp.data["results"] assert {result["type"] for result in results} == {"page", "resource"}
def test_unassigned_youtube_ids(mocker, is_ocw): """videos_with_unassigned_youtube_ids should return WebsiteContent objects for videos with no youtube ids""" mocker.patch("websites.api.is_ocw_site", return_value=is_ocw) website = WebsiteFactory.create() WebsiteContentFactory.create_batch( 4, website=website, metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "video_metadata": { "youtube_id": "abc123" }, }, ) videos_without_ids = [] videos_without_ids.append( WebsiteContentFactory.create( website=website, metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "video_metadata": {}, }, )) for yt_id in [None, ""]: videos_without_ids.append( WebsiteContentFactory.create( website=website, metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "video_metadata": { "youtube_id": yt_id }, }, )) WebsiteContentFactory.create( website=website, metadata={ "resourcetype": RESOURCE_TYPE_IMAGE, "video_metadata": { "youtube_id": "bad_data" }, }, ) unassigned_content = videos_with_unassigned_youtube_ids(website) if is_ocw: assert len(unassigned_content) == 3 for content in videos_without_ids: assert content in unassigned_content else: assert len(unassigned_content) == 0
def test_websitecontent_autogen_filename_unique(mocker, filename_base, existing_filenames, exp_result_filename, exclude_content): """ get_valid_new_filename should return a filename that obeys uniqueness constraints, adding a suffix and removing characters from the end of the string as necessary. """ # Set a lower limit for max filename length to test that filenames are truncated appropriately mocker.patch("websites.api.CONTENT_FILENAME_MAX_LEN", 14) content_type = "page" dirpath = "path/to" website = WebsiteFactory.create() contents = WebsiteContentFactory.create_batch( len(existing_filenames), website=website, type=content_type, dirpath=dirpath, filename=factory.Iterator(existing_filenames), ) exclude_text_id = contents[ 0].text_id if exclude_content and contents else None assert (get_valid_new_filename( website_pk=website.pk, dirpath=dirpath, filename_base=filename_base, exclude_text_id=exclude_text_id, ) == (exp_result_filename if not exclude_content else filename_base))
def test_incomplete_content_warnings(mocker, has_missing_ids, has_missing_captions, has_truncatable_text): """incomplete_content_warnings should return expected warning messages""" website = WebsiteFactory.create() video_content = WebsiteContentFactory.create_batch(3, website=website) no_yt_ids = video_content[0:2] if has_missing_ids else [] no_caps = video_content[1:3] if has_missing_captions else [] truncatable_vids = [video_content[2]] if has_truncatable_text else [] mocker.patch("websites.api.videos_with_truncatable_text", return_value=truncatable_vids) mocker.patch( "websites.api.videos_with_unassigned_youtube_ids", return_value=no_yt_ids, ) mocker.patch( "websites.api.videos_missing_captions", return_value=no_caps, ) warnings = incomplete_content_warnings(website) warnings_len = 0 if has_missing_ids: warnings_len += 1 for content in no_yt_ids: assert content.title in warnings[0] if has_missing_captions: warnings_len += 1 for content in no_caps: assert content.title in warnings[1 if has_missing_ids else 0] if has_truncatable_text: warnings_len += 1 assert len(warnings) == warnings_len assert video_content[2].title in warnings[warnings_len - 1] if not has_missing_ids and not has_missing_captions and not has_truncatable_text: assert warnings == []
def github(settings, mocker, mock_branches): """ Create a github backend for a website """ settings.GIT_TOKEN = "faketoken" settings.GIT_ORGANIZATION = "fake_org" mock_github_api = mocker.patch( "content_sync.backends.github.GithubApiWrapper", ) mock_repo = mock_github_api.get_repo.return_value mock_repo.default_branch = settings.GIT_BRANCH_MAIN mock_repo.get_branches.return_value = [mock_branches[0]] website = WebsiteFactory.create() WebsiteContentFactory.create_batch(5, website=website) backend = GithubBackend(website) backend.api = mock_github_api yield SimpleNamespace(backend=backend, api=mock_github_api, repo=mock_repo, branches=mock_branches)
def test_videos_missing_captions(mocker, is_ocw): """videos_missing_captions should return WebsiteContent objects for videos with no captions""" mocker.patch("websites.api.is_ocw_site", return_value=is_ocw) website = WebsiteFactory.create() WebsiteContentFactory.create_batch( 3, website=website, metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "video_files": { "video_captions_file": "abc123" }, }, ) videos_without_captions = [] for captions in [None, ""]: videos_without_captions.append( WebsiteContentFactory.create( website=website, metadata={ "resourcetype": RESOURCE_TYPE_VIDEO, "video_files": { "video_captions_file": captions }, }, )) WebsiteContentFactory.create( website=website, metadata={ "resourcetype": RESOURCE_TYPE_IMAGE, "video_files": { "video_captions_file": "bad_data" }, }, ) unassigned_content = videos_missing_captions(website) if is_ocw: assert len(unassigned_content) == 2 for content in videos_without_captions: assert content in unassigned_content else: assert len(unassigned_content) == 0
def db_data(): """Fixture that seeds the database with data needed for this test suite""" users = UserFactory.create_batch(2) website = WebsiteFactory.create() website_contents = WebsiteContentFactory.create_batch( 5, website=website, updated_by=factory.Iterator( [users[0], users[0], users[0], users[1], users[1]]), ) return SimpleNamespace(users=users, website=website, website_contents=website_contents)
def test_websites_content_list_page_content(drf_client, global_admin_user): """The list view of WebsiteContent should be able to filter by page content only""" drf_client.force_login(global_admin_user) website = WebsiteFactory.create() WebsiteContentFactory.create_batch( 3, website=website, type=factory.Iterator(["type1", "type2", "type3"]), is_page_content=factory.Iterator([True, False, False]), ) api_url = reverse( "websites_content_api-list", kwargs={ "parent_lookup_website": website.name, }, ) resp = drf_client.get( api_url, {"page_content": True}, ) assert resp.data["count"] == 1 results = resp.data["results"] assert results[0]["type"] == "type1"