def test_is_ocw_site(settings): """is_ocw_site() should return expected bool value for a website""" settings.OCW_IMPORT_STARTER_SLUG = "ocw-course" ocw_site = WebsiteFactory.create(starter=WebsiteStarterFactory.create( slug="ocw-course")) other_site = WebsiteFactory.create(starter=WebsiteStarterFactory.create( slug="not-ocw-course")) assert is_ocw_site(ocw_site) is True assert is_ocw_site(other_site) is False
def test_fetch_website_not_found(): """fetch_website should raise if a matching website was not found""" WebsiteFactory.create( uuid=UUID(EXAMPLE_UUID_STR, version=4), title="my title", name="my name", ) with pytest.raises(Website.DoesNotExist): fetch_website("bad values")
def test_website_endpoint_search(drf_client): """ should limit the queryset based on the search param """ superuser = UserFactory.create(is_superuser=True) drf_client.force_login(superuser) WebsiteFactory.create(title="Apple", name="Bacon", short_id="Cheese").save() WebsiteFactory.create(title="Xylophone", name="Yellow", short_id="Zebra").save() WebsiteFactory.create( title="U.S. Military Power", name="17-482-u-s-military-power-spring-2015", short_id="17.482-Spring-2015", ).save() WebsiteFactory.create( title="Biomedical Signal and Image Processing", name="hst-582j-biomedical-signal-and-image-processing-spring-2007", short_id="HST.582J-Spring-2007", ).save() for word in ["Apple", "Bacon", "Cheese"]: resp = drf_client.get(reverse("websites_api-list"), {"search": word}) assert [website["title"] for website in resp.data.get("results")] == ["Apple"] for word in ["Xylophone", "Yellow", "Zebra"]: resp = drf_client.get(reverse("websites_api-list"), {"search": word}) assert [website["title"] for website in resp.data.get("results")] == ["Xylophone"] for word in ["U.S. military", "17-482", "17.482"]: resp = drf_client.get(reverse("websites_api-list"), {"search": word}) assert [website["title"] for website in resp.data.get("results") ] == ["U.S. Military Power"] for word in ["signal and image", "HsT.582", "hSt-582"]: resp = drf_client.get(reverse("websites_api-list"), {"search": word}) assert [website["title"] for website in resp.data.get("results") ] == ["Biomedical Signal and Image Processing"]
def test_upsert_mass_publish_pipeline(settings, pipeline_settings, mocker, mock_auth, pipeline_exists, version): # pylint:disable=too-many-locals,too-many-arguments """The mass publish pipeline should have expected configuration""" hugo_projects_path = "https://github.com/org/repo" WebsiteFactory.create( starter=WebsiteStarterFactory.create( source=STARTER_SOURCE_GITHUB, path=f"{hugo_projects_path}/site"), name=settings.ROOT_WEBSITE_NAME, ) instance_vars = f'?vars={quote(json.dumps({"version": version}))}' url_path = f"/api/v1/teams/{settings.CONCOURSE_TEAM}/pipelines/{BaseMassPublishPipeline.PIPELINE_NAME}/config{instance_vars}" if not pipeline_exists: mock_get = mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.get_with_headers", side_effect=HTTPError(), ) else: mock_get = mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.get_with_headers", return_value=({}, { "X-Concourse-Config-Version": "3" }), ) mock_put_headers = mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.put_with_headers") pipeline = MassPublishPipeline(version) pipeline.upsert_pipeline() mock_get.assert_any_call(url_path) mock_put_headers.assert_any_call( url_path, data=mocker.ANY, headers=({ "X-Concourse-Config-Version": "3" } if pipeline_exists else None), ) _, kwargs = mock_put_headers.call_args_list[0] if version == VERSION_DRAFT: bucket = settings.AWS_PREVIEW_BUCKET_NAME api_url = settings.OCW_STUDIO_DRAFT_URL else: bucket = settings.AWS_PUBLISH_BUCKET_NAME api_url = settings.OCW_STUDIO_LIVE_URL config_str = json.dumps(kwargs) assert settings.OCW_GTM_ACCOUNT_ID in config_str assert bucket in config_str assert version in config_str assert f"{hugo_projects_path}.git" in config_str assert api_url in config_str
def test_website_starter_unpublished(): """Website should set has_unpublished_live and has_unpublished_draft if the starter is updated""" website = WebsiteFactory.create(has_unpublished_live=False, has_unpublished_draft=False) second_website = WebsiteFactory.create(has_unpublished_live=False, has_unpublished_draft=False, starter=website.starter) website.starter.save() website.refresh_from_db() assert website.has_unpublished_draft is True assert website.has_unpublished_live is True second_website.refresh_from_db() assert second_website.has_unpublished_draft is True assert second_website.has_unpublished_live is True
def test_update_sync_status(file_errors, site_errors, status): """update_sync_status should update the website sync_status field as expected""" now = now_in_utc() website = WebsiteFactory.create(synced_on=now, sync_status=WebsiteSyncStatus.PROCESSING, sync_errors=site_errors) for error in file_errors: DriveFileFactory.create( website=website, sync_error=error, sync_dt=now, resource=(WebsiteContentFactory.create(type=CONTENT_TYPE_RESOURCE, website=website) if not error else None), status=(DriveFileStatus.COMPLETE if error is None else DriveFileStatus.FAILED), ) DriveFileFactory.create( website=website, sync_dt=now_in_utc() + timedelta(seconds=10), resource=WebsiteContentFactory.create(type=CONTENT_TYPE_RESOURCE, website=website), ) update_sync_status(website, now) website.refresh_from_db() assert website.sync_status == status assert sorted(website.sync_errors) == sorted( [error for error in file_errors if error] + (site_errors or []))
def test_setup_website_groups_permissions(): """ Permissions should be assigned as expected """ owner, admin, editor = UserFactory.create_batch(3) website = WebsiteFactory.create(owner=owner) # permissions should have all been added via signal assert setup_website_groups_permissions(website) == (0, 0, False) website.admin_group.delete() assert setup_website_groups_permissions(website) == (1, 0, False) remove_perm(constants.PERMISSION_VIEW, website.editor_group, website) assert setup_website_groups_permissions(website) == (0, 1, False) remove_perm(constants.PERMISSION_PUBLISH, website.owner, website) assert setup_website_groups_permissions(website) == (0, 0, True) admin.groups.add(website.admin_group) editor.groups.add(website.editor_group) for permission in constants.PERMISSIONS_EDITOR: assert editor.has_perm(permission, website) is True for permission in constants.PERMISSIONS_ADMIN: for user in [owner, admin]: assert user.has_perm(permission, website) is True for permission in [ constants.PERMISSION_PUBLISH, constants.PERMISSION_COLLABORATE ]: assert editor.has_perm(permission, website) is False
def test_websites_content_create_with_textid(drf_client, global_admin_user): """If a text_id is added when POSTing to the WebsiteContent, we should use that instead of creating a uuid""" drf_client.force_login(global_admin_user) website = WebsiteFactory.create() payload = { "type": "sitemetadata", "metadata": { "course_title": "a title", }, "text_id": "sitemetadata", } resp = drf_client.post( reverse( "websites_content_api-list", kwargs={ "parent_lookup_website": website.name, }, ), data=payload, ) assert resp.status_code == 201 content = website.websitecontent_set.get() assert content.type == payload["type"] assert resp.data["text_id"] == str(content.text_id) assert content.text_id == "sitemetadata"
def test_websites_content_publish_sorting(drf_client, global_admin_user, published): # pylint: disable=unused-argument """should be able to filter to just published or not""" drf_client.force_login(global_admin_user) website = WebsiteFactory.create(published=True) unpublished = WebsiteContentFactory.create_batch( 3, website=website, # they were created after the publish date created_on=website.publish_date + datetime.timedelta(days=2), ) published = WebsiteContentFactory.create_batch( 3, website=website, ) for content in published: content.created_on = website.publish_date - datetime.timedelta(days=2) content.save() api_url = reverse( "websites_content_api-list", kwargs={ "parent_lookup_website": website.name, }, ) resp = drf_client.get(api_url, {"published": published}) content = published if published else unpublished expected_ids = sorted([c.text_id for c in content]) assert resp.data["count"] == 3 assert expected_ids == sorted([c["text_id"] for c in resp.data["results"]])
def test_websites_content_list_multiple_type(drf_client, global_admin_user): """The list view of WebsiteContent should be able to filter by multiple type values""" drf_client.force_login(global_admin_user) website = WebsiteFactory.create() WebsiteContentFactory.create_batch( 3, website=website, type=factory.Iterator(["page", "resource", "other"]), ) api_url = reverse( "websites_content_api-list", kwargs={ "parent_lookup_website": website.name, }, ) resp = drf_client.get( api_url, { "type[0]": "page", "type[1]": "resource" }, ) assert resp.data["count"] == 2 results = resp.data["results"] assert {result["type"] for result in results} == {"page", "resource"}
def test_content_create_page_added_context_with_slug(drf_client, global_admin_user): """ POSTing to the WebsiteContent list view without a filename should add a generated filename based on the slug field """ drf_client.force_login(global_admin_user) title = "My Title" website = WebsiteFactory.create() website.starter.config["collections"][0]["slug"] = "text_id" website.starter.save() payload = { "title": title, "markdown": "some markdown", "type": "blog", } # "folder" path for the config item with type="blog" in basic-site-config.yml expected_dirpath = "content/blog" resp = drf_client.post( reverse( "websites_content_api-list", kwargs={ "parent_lookup_website": website.name, }, ), data=payload, ) assert resp.status_code == 201 content = website.websitecontent_set.order_by("-created_on").first() assert content.website == website assert content.filename == content.text_id # "folder" path for the config item with type="blog" in basic-site-config.yml assert content.dirpath == expected_dirpath assert content.is_page_content is True
def test_data_file_deserialize(serializer_cls, file_content): """ JsonFileSerializer and YamlFileSerializer.deserialize should create the expected content object from some data file contents """ website = WebsiteFactory.create() site_config = SiteConfig(website.starter.config) file_config_item = next( config_item for config_item in site_config.iter_items() if "file" in config_item.item ) filepath = file_config_item.item["file"] website_content = serializer_cls(site_config).deserialize( website=website, filepath=filepath, file_contents=file_content, ) assert website_content.title == "Content Title" assert website_content.type == file_config_item.item["name"] assert website_content.text_id == file_config_item.item["name"] assert website_content.is_page_content is False assert website_content.metadata == { "tags": ["Design"], "description": "**This** is the description", }
def test_upsert_pipeline_public_vs_private(settings, mocker, mock_auth, is_private_repo): """Pipeline config shoould have expected course-markdown git url and private git key setting if applicable""" settings.CONCOURSE_IS_PRIVATE_REPO = is_private_repo settings.GIT_DOMAIN = "github.test.edu" settings.GIT_ORGANIZATION = "testorg" settings.OCW_STUDIO_DRAFT_URL = "https://draft.test.edu" settings.OCW_STUDIO_LIVE_URL = "https://live.test.edu" mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.get_with_headers", return_value=(None, { "X-Concourse-Config-Version": 1 }), ) mock_put_headers = mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.put_with_headers") starter = WebsiteStarterFactory.create( source=STARTER_SOURCE_GITHUB, path="https://github.com/org/repo/site") website = WebsiteFactory.create(starter=starter) private_key_str = "((git-private-key))" if is_private_repo: repo_url_str = f"git@{settings.GIT_DOMAIN}:{settings.GIT_ORGANIZATION}/{website.short_id}.git" else: repo_url_str = f"https://{settings.GIT_DOMAIN}/{settings.GIT_ORGANIZATION}/{website.short_id}.git" pipeline = SitePipeline(website) pipeline.upsert_pipeline() _, kwargs = mock_put_headers.call_args_list[0] config_str = json.dumps(kwargs) assert repo_url_str in config_str assert (private_key_str in config_str) is is_private_repo
def test_mail_transcripts_complete_notification(settings, mocker): """mail_transcripts_complete_notification should send correct email to correct users""" website = WebsiteFactory.create() users = UserFactory.create_batch(4) for user in users[:2]: user.groups.add(website.admin_group) for user in users[2:]: user.groups.add(website.editor_group) mock_get_message_sender = mocker.patch("videos.tasks.get_message_sender") mock_sender = mock_get_message_sender.return_value.__enter__.return_value mail_transcripts_complete_notification(website) mock_get_message_sender.assert_called_once_with(VideoTranscriptingCompleteMessage) assert mock_sender.build_and_send_message.call_count == len(users) + 1 for user in users: mock_sender.build_and_send_message.assert_any_call( user, { "site": { "title": website.title, "url": urljoin(settings.SITE_BASE_URL, f"/sites/{website.name}"), }, }, )
def test_get_valid_s3_key(): """get_valid_s3_key avoids dupe s3 keys""" site = WebsiteFactory.create() site_prefix = site.starter.config.get("root-url-path").rstrip("/") file_1 = DriveFileFactory.create(name="(file).PnG", website=site, mime_type="image/png", s3_key=None) file_1.s3_key = file_1.get_valid_s3_key() assert file_1.s3_key == f"{site_prefix}/{site.name}/file.png" file_1.save() file_2 = DriveFileFactory.create(name="File!.pNG", website=site, mime_type="image/png", s3_key=None) file_2.s3_key = file_2.get_valid_s3_key() assert file_2.s3_key == f"{site_prefix}/{site.name}/file2.png" file_2.save() file_3 = DriveFileFactory.create(name="FILE?.png", website=site, mime_type="image/png", s3_key=None) file_3.s3_key = file_3.get_valid_s3_key() assert file_3.s3_key == f"{site_prefix}/{site.name}/file3.png" # Different website file_4 = DriveFileFactory.create(name="(file).PnG", mime_type="image/png", s3_key=None) assert file_4.get_valid_s3_key( ) == f"{site_prefix}/{file_4.website.name}/file.png"
def test_update_youtube_metadata_no_videos(mocker): """Youtube API should not be instantiated if there are no videos""" mocker.patch("videos.youtube.is_ocw_site", return_value=True) mocker.patch("videos.youtube.is_youtube_enabled", return_value=True) mock_youtube = mocker.patch("videos.youtube.YouTubeApi") update_youtube_metadata(WebsiteFactory.create()) mock_youtube.assert_not_called()
def test_websitecontent_autogen_filename_unique(mocker, filename_base, existing_filenames, exp_result_filename, exclude_content): """ get_valid_new_filename should return a filename that obeys uniqueness constraints, adding a suffix and removing characters from the end of the string as necessary. """ # Set a lower limit for max filename length to test that filenames are truncated appropriately mocker.patch("websites.api.CONTENT_FILENAME_MAX_LEN", 14) content_type = "page" dirpath = "path/to" website = WebsiteFactory.create() contents = WebsiteContentFactory.create_batch( len(existing_filenames), website=website, type=content_type, dirpath=dirpath, filename=factory.Iterator(existing_filenames), ) exclude_text_id = contents[ 0].text_id if exclude_content and contents else None assert (get_valid_new_filename( website_pk=website.pk, dirpath=dirpath, filename_base=filename_base, exclude_text_id=exclude_text_id, ) == (exp_result_filename if not exclude_content else filename_base))
def test_incomplete_content_warnings(mocker, has_missing_ids, has_missing_captions, has_truncatable_text): """incomplete_content_warnings should return expected warning messages""" website = WebsiteFactory.create() video_content = WebsiteContentFactory.create_batch(3, website=website) no_yt_ids = video_content[0:2] if has_missing_ids else [] no_caps = video_content[1:3] if has_missing_captions else [] truncatable_vids = [video_content[2]] if has_truncatable_text else [] mocker.patch("websites.api.videos_with_truncatable_text", return_value=truncatable_vids) mocker.patch( "websites.api.videos_with_unassigned_youtube_ids", return_value=no_yt_ids, ) mocker.patch( "websites.api.videos_missing_captions", return_value=no_caps, ) warnings = incomplete_content_warnings(website) warnings_len = 0 if has_missing_ids: warnings_len += 1 for content in no_yt_ids: assert content.title in warnings[0] if has_missing_captions: warnings_len += 1 for content in no_caps: assert content.title in warnings[1 if has_missing_ids else 0] if has_truncatable_text: warnings_len += 1 assert len(warnings) == warnings_len assert video_content[2].title in warnings[warnings_len - 1] if not has_missing_ids and not has_missing_captions and not has_truncatable_text: assert warnings == []
def test_create_gdrive_resource_content(mime_type, mock_get_s3_content_type): """create_resource_from_gdrive should create a WebsiteContent object linked to a DriveFile object""" filenames = ["word.docx", "word!.docx", "(word?).docx"] deduped_names = ["word", "word2", "word3"] website = WebsiteFactory.create() for filename, deduped_name in zip(filenames, deduped_names): drive_file = DriveFileFactory.create( website=website, name=filename, s3_key=f"test/path/{deduped_name}.docx", mime_type=mime_type, ) create_gdrive_resource_content(drive_file) content = WebsiteContent.objects.filter( website=website, title=filename, type="resource", is_page_content=True, ).first() assert content is not None assert content.dirpath == "content/resource" assert content.filename == deduped_name assert content.metadata["resourcetype"] == RESOURCE_TYPE_DOCUMENT assert content.metadata["file_type"] == mime_type assert content.metadata["image"] == "" drive_file.refresh_from_db() assert drive_file.resource == content
def test_trigger_publish_live(settings, mocker): """Verify publish_website calls the appropriate task""" settings.CONTENT_SYNC_BACKEND = "content_sync.backends.SampleBackend" mock_task = mocker.patch("content_sync.tasks.publish_website_backend_live") website = WebsiteFactory.create() api.trigger_publish(website.name, VERSION_LIVE) mock_task.delay.assert_called_once_with(website.name)
def test_videos_with_truncatable_text(mocker, is_ocw): """Videos with titles or descriptions that are too long should be returned""" mocker.patch("websites.api.is_ocw_site", return_value=is_ocw) website = WebsiteFactory.create() title_descs = ( (" ".join(["TooLongTitle" for _ in range(10)]), "desc"), ("title", " ".join(["TooLongDescription" for _ in range(500)])), ("title", "desc"), ) resources = [] for title, desc in title_descs: resources.append( WebsiteContentFactory.create( website=website, title=title, metadata={ "description": desc, "resourcetype": RESOURCE_TYPE_VIDEO, "video_files": { "video_captions_file": "abc123" }, }, )) truncatable_content = videos_with_truncatable_text(website) assert len(resources[1].metadata["description"]) > 5000 if is_ocw: assert len(truncatable_content) == 2 for content in resources[0:2]: assert content in truncatable_content else: assert truncatable_content == []
def test_publish_website( # pylint:disable=redefined-outer-name,too-many-arguments settings, mocker, mock_api_funcs, prepublish, prepublish_actions, has_api, version, status, trigger, ): """Verify that the appropriate backend calls are made by the publish_website function""" settings.PREPUBLISH_ACTIONS = prepublish_actions website = WebsiteFactory.create() setattr(website, f"{version}_publish_status", status) if status: setattr(website, f"{version}_publish_status_updated_on", now_in_utc()) website.save() build_id = 123456 pipeline_api = mocker.Mock() if has_api else None backend = mock_api_funcs.mock_get_backend.return_value pipeline = mock_api_funcs.mock_get_pipeline.return_value pipeline.trigger_pipeline_build.return_value = build_id api.publish_website( website.name, version, pipeline_api=pipeline_api, prepublish=prepublish, trigger_pipeline=trigger, ) mock_api_funcs.mock_get_backend.assert_called_once_with(website) backend.sync_all_content_to_backend.assert_called_once() if version == VERSION_DRAFT: backend.merge_backend_draft.assert_called_once() else: backend.merge_backend_live.assert_called_once() website.refresh_from_db() if trigger: mock_api_funcs.mock_get_pipeline.assert_called_once_with( website, api=pipeline_api) pipeline.trigger_pipeline_build.assert_called_once_with(version) pipeline.unpause_pipeline.assert_called_once_with(version) assert getattr(website, f"latest_build_id_{version}") == build_id else: mock_api_funcs.mock_get_pipeline.assert_not_called() pipeline.trigger_pipeline_build.assert_not_called() pipeline.unpause_pipeline.assert_not_called() assert getattr(website, f"latest_build_id_{version}") is None assert getattr(website, f"{version}_publish_status") == PUBLISH_STATUS_NOT_STARTED assert getattr( website, f"has_unpublished_{version}") is (status == PUBLISH_STATUS_NOT_STARTED) assert getattr(website, f"{version}_last_published_by") is None assert getattr(website, f"{version}_publish_status_updated_on") is not None if len(prepublish_actions) > 0 and prepublish: mock_api_funcs.mock_import_string.assert_any_call("some.Action") mock_api_funcs.mock_import_string.return_value.assert_any_call( website, version=version)
def test_websites_endpoint_detail_methods_denied(drf_client, method, status): """Certain request methods should always be denied""" website = WebsiteFactory.create() drf_client.force_login(UserFactory.create(is_superuser=True)) client_func = getattr(drf_client, method) resp = client_func( reverse("websites_api-detail", kwargs={"name": website.name})) assert resp.status_code == status
def test_assign_group_permissions_error(): """ An exception should be raised if an invalid permission is used""" website = WebsiteFactory.create() bad_perm = "fake_perm_website" with pytest.raises(Permission.DoesNotExist) as exc: assign_website_permissions(website.editor_group, [bad_perm], website=website) assert exc.value.args == (f"Permission '{bad_perm}' not found", )
def test_update_website_backend(settings, mocker): """Verify update_website_backend calls the appropriate task""" settings.CONTENT_SYNC_BACKEND = "content_sync.backends.SampleBackend" mocker.patch("content_sync.api.is_sync_enabled", return_value=True) mock_task = mocker.patch("content_sync.tasks.sync_website_content") website = WebsiteFactory.create() api.update_website_backend(website) mock_task.delay.assert_called_once_with(website.name)
def test_get_sync_backend(settings, mocker): """ Verify that get_sync_backend() imports the backend based on settings.py """ settings.CONTENT_SYNC_BACKEND = "custom.backend.Backend" import_string_mock = mocker.patch("content_sync.api.import_string") website = WebsiteFactory.create() api.get_sync_backend(website) import_string_mock.assert_any_call("custom.backend.Backend") import_string_mock.return_value.assert_any_call(website)
def test_create_website_publishing_pipeline_disabled(settings, mocker): """upsert_website_publishing_pipeline task should not be called if pipelines are disabled""" settings.CONTENT_SYNC_PIPELINE_BACKEND = None mock_task = mocker.patch( "content_sync.api.tasks.upsert_website_publishing_pipeline.delay") website = WebsiteFactory.create() api.create_website_publishing_pipeline(website) mock_task.assert_not_called()
def test_create_website_publishing_pipeline(settings, mocker): """upsert_website_publishing_pipeline task should be called if pipelines are enabled""" settings.CONTENT_SYNC_PIPELINE_BACKEND = "concourse" mock_task = mocker.patch( "content_sync.api.tasks.upsert_website_publishing_pipeline.delay") website = WebsiteFactory.create() api.create_website_publishing_pipeline(website) mock_task.assert_called_once_with(website.name)
def test_get_sync_pipeline(settings, mocker, pipeline_api): """ Verify that get_sync_pipeline() imports the pipeline class based on settings.py """ settings.CONTENT_SYNC_PIPELINE_BACKEND = "concourse" import_string_mock = mocker.patch( "content_sync.pipelines.concourse.SitePipeline") website = WebsiteFactory.create() api.get_sync_pipeline(website, api=pipeline_api) import_string_mock.assert_any_call(website, api=pipeline_api)
def test_update_website_backend_disabled(settings, mocker): """Verify update_website_backend doesn't do anything if syncing is disabled""" settings.CONTENT_SYNC_BACKEND = None mocker.patch("content_sync.api.is_sync_enabled", return_value=False) mock_task = mocker.patch("content_sync.tasks.sync_website_content") website = WebsiteFactory.create() api.update_website_backend(website) mock_task.delay.assert_not_called()