def test_baseurl_replacer_replaces_content_in_same_course(
    website_uuid, should_markdown_change
):
    """
    Double check that if the dirpath + filename match multiple times, the
    content chosen is from the same course as the markdown being edited
    """

    markdown = R"""
    Kittens [meow]({{< baseurl >}}/resources/pets/cat) meow.
    """
    w1 = WebsiteFactory.build(uuid="website-uuid-111")
    w2 = WebsiteFactory.build(uuid="website-uuid-222")
    websites = {w.uuid: w for w in [w1, w2]}
    target_content = WebsiteContentFactory.build(markdown=markdown, website=w1)

    linkable = WebsiteContentFactory.build(
        website=websites[website_uuid],
        dirpath="content/resources/pets",
        filename="cat",
        text_id="uuid-111",
    )

    cleaner = get_markdown_cleaner([linkable])
    cleaner.update_website_content(target_content)

    is_markdown_changed = target_content.markdown != markdown
    assert is_markdown_changed == should_markdown_change
Exemple #2
0
def test_website_serializer(has_starter):
    """WebsiteSerializer should serialize a Website object with the correct fields"""
    website = (WebsiteFactory.build() if has_starter else WebsiteFactory.build(
        starter=None))
    serialized_data = WebsiteSerializer(instance=website).data
    assert serialized_data["name"] == website.name
    assert serialized_data["title"] == website.title
    assert serialized_data["metadata"] == website.metadata
    assert "config" not in serialized_data
Exemple #3
0
def test_url_site_relativiser(url, expected_index, expected_relative_url):
    w1 = WebsiteFactory.build(name="website_zero")
    w2 = WebsiteFactory.build(name="website_one")
    sites = [w1, w2]
    with patch_website_all(sites):
        get_site_relative_url = UrlSiteRelativiser()

        assert get_site_relative_url(url) == (
            sites[expected_index],
            expected_relative_url,
        )
Exemple #4
0
    def get_updated_content_and_parent(update_field):
        """Run update_content_from_s3_data with test data and return content, parent"""
        website = WebsiteFactory.build()
        content = WebsiteContentFactory.build(
            markdown="original markdown",
            metadata={"title": "original title"},
            website=website,
        )
        content.save = Mock()
        # prepare the parent, but do not set content.parent_id.
        # that's one of the things we'll test
        parent = WebsiteContentFactory.build(id=123)

        s3_content_data = {
            "markdown": "s3 markdown",
            "metadata": {
                "title": "s3 title",
                "author": "s3 author",
                "parent_uid": "s3_parent_uid",
            },
            "parent": parent,
        }
        with patch("websites.models.WebsiteContent.objects") as mock:
            mock.filter.return_value.first.return_value = content
            website = content.website
            text_id = content.text_id
            update_content_from_s3_data(website, text_id, s3_content_data,
                                        update_field)

        return content, parent
Exemple #5
0
def test_website_detail_serializer(settings, has_starter, drive_folder,
                                   drive_credentials):
    """WebsiteDetailSerializer should serialize a Website object with the correct fields, including config"""
    settings.DRIVE_SERVICE_ACCOUNT_CREDS = drive_credentials
    settings.DRIVE_SHARED_ID = "abc123"
    settings.DRIVE_UPLOADS_PARENT_FOLDER_ID = None
    website = WebsiteFactory.build(
        gdrive_folder=drive_folder,
        starter=(WebsiteStarterFactory.create() if has_starter else None),
    )
    serialized_data = WebsiteDetailSerializer(instance=website).data
    assert serialized_data["name"] == website.name
    assert serialized_data["title"] == website.title
    assert serialized_data["metadata"] == website.metadata
    assert serialized_data["starter"] == (WebsiteStarterDetailSerializer(
        instance=website.starter).data if has_starter else None)
    assert serialized_data["source"] == website.source
    assert parse_date(serialized_data["publish_date"]) == website.publish_date
    assert (parse_date(
        serialized_data["draft_publish_date"]) == website.draft_publish_date)
    assert serialized_data["live_url"] == website.get_url("live")
    assert serialized_data["draft_url"] == website.get_url("draft")
    assert serialized_data[
        "has_unpublished_live"] == website.has_unpublished_live
    assert serialized_data[
        "has_unpublished_draft"] == website.has_unpublished_draft
    assert serialized_data["gdrive_url"] == (
        f"https://drive.google.com/drive/folders/abc123/{website.gdrive_folder}"
        if drive_credentials is not None and drive_folder is not None else
        None)
Exemple #6
0
def test_website_status_serializer(mocker, settings, drive_folder, warnings):
    """WebsiteStatusSerializer should serialize a Website object with the correct status fields"""
    mocker.patch("websites.serializers.incomplete_content_warnings",
                 return_value=warnings)
    settings.DRIVE_UPLOADS_PARENT_FOLDER_ID = "dfg789"
    settings.DRIVE_SERVICE_ACCOUNT_CREDS = {"key": "value"}
    settings.DRIVE_SHARED_ID = "abc123"
    values = {
        "publish_date": "2021-11-01T00:00:00Z",
        "draft_publish_date": "2021-11-02T00:00:00Z",
        "has_unpublished_live": True,
        "has_unpublished_draft": False,
        "live_publish_status": "succeeded",
        "live_publish_status_updated_on": "2021-11-03T00:00:00Z",
        "draft_publish_status": "errored",
        "draft_publish_status_updated_on": "2021-11-04T00:00:00Z",
        "sync_status": "Complete",
        "sync_errors": ["error1"],
        "synced_on": "2021-11-05T00:00:00Z",
    }
    website = WebsiteFactory.build(gdrive_folder=drive_folder, **values)
    serialized_data = WebsiteStatusSerializer(instance=website).data
    assert serialized_data["gdrive_url"] == (
        f"https://drive.google.com/drive/folders/{settings.DRIVE_UPLOADS_PARENT_FOLDER_ID}/{website.gdrive_folder}"
        if drive_folder is not None else None)
    assert sorted(serialized_data["content_warnings"]) == sorted(warnings)
    for (key, value) in values.items():
        assert serialized_data.get(key) == value
def test_resolveuid_conversion_cross_site(markdown, expected):
    """Check shortcodes are used within same site."""
    target_content = WebsiteContentFactory.build(
        markdown=markdown, website=WebsiteFactory.build()
    )
    linked_content = WebsiteContentFactory.build(
        text_id="5cf754b2-b97b-4ac1-8dab-deed1201de94",
        dirpath="content/pages/path/to",
        filename="thing",
        website=WebsiteFactory.build(name="other-site-name"),
    )

    cleaner = get_markdown_cleaner([target_content, linked_content])
    cleaner.update_website_content(target_content)

    assert target_content.markdown == expected
def test_baseurl_replacer_handle_specific_url_replacements(
    url, content_relative_dirpath, filename
):
    """
    Test specific replacements

    This test could perhaps be dropped. It was written before ContentLookup was
    moved to a separate module, and the functionality is tested their, now, too.
    """
    website_uuid = "website-uuid"
    website = WebsiteFactory.build(uuid=website_uuid)
    markdown = f"my [pets]({{{{< baseurl >}}}}{url}) are legion"
    expected_markdown = 'my {{% resource_link content-uuid "pets" %}} are legion'
    target_content = WebsiteContentFactory.build(markdown=markdown, website=website)

    linkable = WebsiteContentFactory.build(
        website=website,
        dirpath=f"content{content_relative_dirpath}",
        filename=filename,
        text_id="content-uuid",
    )

    cleaner = get_markdown_cleaner([linkable])
    cleaner.update_website_content(target_content)

    assert target_content.markdown == expected_markdown
def test_rootrel_rule_handles_site_homeages_correctly(markdown, site_name,
                                                      expected_markdown):
    w1 = WebsiteFactory.build(name="site_one")
    w2 = WebsiteFactory.build(name="site_two")
    websites = {w.name: w for w in [w1, w2]}
    c1 = WebsiteContentFactory.build(website=w1,
                                     type="sitemetadata",
                                     filename="",
                                     dirpath="",
                                     text_id="uuid-1")
    content_to_clean = WebsiteContentFactory.build(website=websites[site_name],
                                                   markdown=markdown)

    cleaner = get_markdown_cleaner([w1], [c1])
    cleaner.update_website_content(content_to_clean)

    assert content_to_clean.markdown == expected_markdown
Exemple #10
0
def test_legacy_shortcode_fix_one(markdown, expected_markdown):
    """Test specific replacements"""
    website = WebsiteFactory.build()
    target_content = WebsiteContentFactory.build(markdown=markdown, website=website)

    cleaner = Cleaner(LegacyShortcodeFixOne())
    cleaner.update_website_content(target_content)
    assert target_content.markdown == expected_markdown
Exemple #11
0
def test_content_finder_returns_metadata_for_site(site_uuid, content_index):
    contents = [
        WebsiteContentFactory.build(
            website=WebsiteFactory.build(uuid="website_one"),
            type="sitemetadata",
            text_id="content-1",
        ),
        WebsiteContentFactory.build(
            website=WebsiteFactory.build(uuid="website_two"),
            type="sitemetadata",
            text_id="content-2",
        ),
    ]
    with patch_website_contents_all(contents):
        content_lookup = ContentLookup()
        assert (content_lookup.find_within_site(
            site_uuid, "/") == contents[content_index])
def test_rootrel_rule_only_uses_resource_lines_for_same_site(
        markdown, site_name, expected_markdown):
    w1 = WebsiteFactory.build(name="site_one")
    w2 = WebsiteFactory.build(name="site_two")
    websites = {w.name: w for w in [w1, w2]}
    c1 = WebsiteContentFactory.build(website=w1,
                                     filename="page1",
                                     dirpath="content/pages/stuff",
                                     text_id="uuid-1")

    content_to_clean = WebsiteContentFactory.build(website=websites[site_name],
                                                   markdown=markdown)

    cleaner = get_markdown_cleaner([w1], [c1])
    cleaner.update_website_content(content_to_clean)

    assert content_to_clean.markdown == expected_markdown
Exemple #13
0
def test_shortcode_standardizer(text, expected):
    """Check that it removes extra args from resource shortcodes"""
    target_content = WebsiteContentFactory.build(
        markdown=text, website=WebsiteFactory.build())

    cleaner = get_markdown_cleaner()
    cleaner.update_website_content(target_content)

    assert target_content.markdown == expected
def test_resolveuid_leaves_stuff_alone_if_it_should(markdown, expected):
    """Check shortcodes are used within same site."""
    target_content = WebsiteContentFactory.build(
        markdown=markdown, website=WebsiteFactory.build()
    )

    cleaner = get_markdown_cleaner([target_content])
    cleaner.update_website_content(target_content)

    assert target_content.markdown == expected
Exemple #15
0
def test_get_rootrelative_url_from_content():
    c1 = WebsiteContentFactory.build(
        website=WebsiteFactory.build(name="site-name-1"),
        dirpath="content/pages/path/to",
        filename="file1",
    )
    c2 = WebsiteContentFactory.build(
        website=WebsiteFactory.build(name="site-name-2"),
        dirpath="content/pages/assignments",
        filename="_index",
    )
    c3 = WebsiteContentFactory.build(
        website=WebsiteFactory.build(name="site-THREE"),
        dirpath="content/resources/long/path/to",
        filename="file3",
    )
    urls = [get_rootrelative_url_from_content(c) for c in [c1, c2, c3]]

    assert urls[0] == "/courses/site-name-1/pages/path/to/file1"
    assert urls[1] == "/courses/site-name-2/pages/assignments"
    assert urls[2] == "/courses/site-THREE/resources/long/path/to/file3"
def test_resolveuid_conversion_within_same_site(markdown, expected):
    """Check shortcodes are used within same site."""
    website = WebsiteFactory.build()
    target_content = WebsiteContentFactory.build(markdown=markdown, website=website)
    linked_content = WebsiteContentFactory.build(
        text_id="5cf754b2-b97b-4ac1-8dab-deed1201de94", website=website
    )

    cleaner = get_markdown_cleaner([target_content, linked_content])
    cleaner.update_website_content(target_content)

    assert target_content.markdown == expected
def test_rootrel_rule_uses_images_for_image(markdown, site_name,
                                            expected_markdown):
    w1 = WebsiteFactory.build(name="site_one")
    w2 = WebsiteFactory.build(name="site_two")
    websites = {w.name: w for w in [w1, w2]}
    c1 = WebsiteContentFactory.build(
        website=w1,
        text_id="uuid-1",
        file=
        f"only/last/part/matters/for/now/{string_uuid()}_old_image_filename123.jpg",
        # in general the new filename is the same as old,
        # possibly appended with "-1" or "-2" if there were duplicates
        filename="new_image_filename123.jpg",
        dirpath="content/resources",
    )
    content_to_clean = WebsiteContentFactory.build(website=websites[site_name],
                                                   markdown=markdown)
    cleaner = get_markdown_cleaner([w1], [c1])
    cleaner.update_website_content(content_to_clean)

    assert content_to_clean.markdown == expected_markdown
Exemple #18
0
def test_content_finder_is_site_specific():
    """Test that ContentLookup is site specific"""
    content_w1 = WebsiteContentFactory.build(
        website=WebsiteFactory.build(uuid="website-uuid-1"),
        dirpath="content/resources/path/to",
        filename="file1",
        text_id="content-uuid-1",
    )
    content_w2 = WebsiteContentFactory.build(
        website=WebsiteFactory.build(uuid="website-uuid-2"),
        dirpath="content/resources/path/to",
        filename="file1",
        text_id="content-uuid-1",
    )

    with patch_website_contents_all([content_w1, content_w2]):
        content_lookup = ContentLookup()

        url = "/resources/path/to/file1"
        assert content_lookup.find_within_site(content_w1.website_id,
                                               url) == content_w1
        assert content_lookup.find_within_site(content_w2.website_id,
                                               url) == content_w2
Exemple #19
0
def test_content_finder_specific_url_replacements(url,
                                                  content_relative_dirpath,
                                                  filename):
    content = WebsiteContentFactory.build(
        website=WebsiteFactory.build(uuid="website_uuid"),
        dirpath=f"content{content_relative_dirpath}",
        filename=filename,
        text_id="content-uuid",
    )

    with patch_website_contents_all([content]):
        content_lookup = ContentLookup()

        assert content_lookup.find_within_site("website_uuid", url) == content
Exemple #20
0
def test_updates_multiple_metadata_fields():
    """
    Check that a single call to update_website_content modifies multiple fields
    for rules that have multiple fields associated.
    """
    assert len(MetadataRelativeUrlsFix.fields) > 1

    website = WebsiteFactory.build(name="site-1")
    wc1 = WebsiteContentFactory.build(
        filename="thing1",
        dirpath="content/resources",
        website=website,
    )
    wc2 = WebsiteContentFactory.build(filename="thing2",
                                      dirpath="content/pages/two",
                                      website=website)

    content_to_clean = WebsiteContentFactory.build(
        metadata={
            "related_resources_text": """Hello
                Change this: [to thing1](resources/thing1#fragment "And a title!") cool'
                
                Leave this alone: [wiki](https://wikipedia.org) same

                And this [course link](/courses/8-02/pages/jigawatts)
            """,
            "image_metadata": {
                "caption": "And now [thing2](pages/two/thing2)"
            },
        },
        website=website,
    )

    cleaner = get_markdown_cleaner([wc1, wc2])
    cleaner.update_website_content(content_to_clean)

    expected_related_resources = """Hello
                Change this: [to thing1](/courses/site-1/resources/thing1#fragment) cool'
                
                Leave this alone: [wiki](https://wikipedia.org) same

                And this [course link](/courses/8-02/pages/jigawatts)
            """
    expected_caption = "And now [thing2](/courses/site-1/pages/two/thing2)"
    assert (content_to_clean.metadata["related_resources_text"] ==
            expected_related_resources)
    assert content_to_clean.metadata["image_metadata"][
        "caption"] == expected_caption
Exemple #21
0
def test_shortcode_standardizer():
    """Check that it replaces resource_file links as expected"""
    markdown = R"""
    Roar {{< cat uuid    "some \"text\" cool">}}

    {{< dog a     b >}}

    Hello world {{< wolf "a     b" >}}
    """
    target_content = WebsiteContentFactory.build(
        markdown=markdown, website=WebsiteFactory.build())

    cleaner = get_markdown_cleaner()
    cleaner.update_website_content(target_content)

    assert target_content.markdown == markdown
def test_baseurl_replacer_specific_title_replacements(markdown, expected_markdown):
    """Test specific replacements"""
    website_uuid = "website-uuid"
    website = WebsiteFactory.build(uuid=website_uuid)
    target_content = WebsiteContentFactory.build(markdown=markdown, website=website)

    linkable = WebsiteContentFactory.build(
        website=website,
        dirpath="content/resources/path/to",
        filename="file1",
        text_id="content-uuid-1",
    )

    cleaner = get_markdown_cleaner([linkable])
    cleaner.update_website_content(target_content)

    assert target_content.markdown == expected_markdown
def test_baseurl_replacer_handles_index_files():
    """Test specific replacements"""
    website_uuid = "website-uuid"
    website = WebsiteFactory.build(uuid=website_uuid)
    markdown = R"my [pets]({{< baseurl >}}/pages/cute/pets) are legion"
    expected_markdown = R'my {{% resource_link content-uuid "pets" %}} are legion'
    target_content = WebsiteContentFactory.build(markdown=markdown, website=website)

    linkable = WebsiteContentFactory.build(
        website=website,
        dirpath="content/pages/cute/pets",
        filename="_index",
        text_id="content-uuid",
    )

    cleaner = get_markdown_cleaner([linkable])
    cleaner.update_website_content(target_content)

    assert linkable.filename not in target_content.markdown
    assert target_content.markdown == expected_markdown
Exemple #24
0
def test_deserialize_file_to_website_content(mocker):
    """deserialize_file_to_website_content should pick the correct serializer class and deserialize file contents"""
    mock_serializer = mocker.MagicMock(spec=BaseContentFileSerializer)
    patched_serializer_factory = mocker.patch(
        "content_sync.serializers.ContentFileSerializerFactory", autospec=True
    )
    patched_serializer_factory.for_file.return_value = mock_serializer
    website = WebsiteFactory.build()
    site_config = SiteConfig(website.starter.config)
    filepath, file_contents = "/my/file.md", "..."
    deserialized = deserialize_file_to_website_content(
        website=website,
        site_config=site_config,
        filepath=filepath,
        file_contents=file_contents,
    )

    patched_serializer_factory.for_file.assert_called_once_with(site_config, filepath)
    mock_serializer.deserialize.assert_called_once_with(
        website=website,
        filepath=filepath,
        file_contents=file_contents,
    )
    assert deserialized == mock_serializer.deserialize.return_value
Exemple #25
0
def test_permissions_group_name_for_role(role, group_prefix):
    """permissions_group_for_role should return the correct group name for a website and role"""
    website = WebsiteFactory.build()
    assert (permissions_group_name_for_role(
        role, website) == f"{group_prefix}{website.uuid.hex}")
Exemple #26
0
def test_permissions_group_name_for_global_admin():
    """permissions_group_for_role should return the correct group name for global admins"""
    website = WebsiteFactory.build()
    assert (permissions_group_name_for_role(constants.ROLE_GLOBAL,
                                            website) == constants.GLOBAL_ADMIN)
Exemple #27
0
def test_permissions_group_for_role_invalid(role):
    """permissions_group_for_role should raise a ValueError for an invalid role"""
    website = WebsiteFactory.build()
    with pytest.raises(ValueError) as exc:
        permissions_group_name_for_role(role, website)
    assert exc.value.args == (f"Invalid role for a website group: {role}", )
def test_baseurl_replacer_replaces_baseurl_links():
    """replace_baseurl_links should replace multiple links with expected values"""

    markdown = R"""
    « [Previous]({{< baseurl >}}/pages/reducing-problem) | [Next]({{< baseurl >}}/pages/vibration-analysis) »

    ### Lecture Videos

    *   Watch [Lecture 21: Vibration Isolation]({{< baseurl >}}/resources/lecture-21)
        *   Video Chapters
            *   [Demonstration of a vibration isolation system-strobe light and vibrating beam]({{< baseurl >}}/resources/demos/vibration-isolation)
            * Euler's formula

    Wasn't [the video]({{< baseurl >}}/resources/lecture-21) fun? Yes it was!
    """

    expected = R"""
    « {{% resource_link uuid-111 "Previous" %}} | {{% resource_link uuid-222 "Next" %}} »

    ### Lecture Videos

    *   Watch {{% resource_link uuid-333 "Lecture 21: Vibration Isolation" %}}
        *   Video Chapters
            *   {{% resource_link uuid-444 "Demonstration of a vibration isolation system-strobe light and vibrating beam" %}}
            * Euler's formula

    Wasn't {{% resource_link uuid-333 "the video" %}} fun? Yes it was!
    """

    website = WebsiteFactory.build()
    target_content = WebsiteContentFactory.build(markdown=markdown, website=website)

    linked_contents = [
        WebsiteContentFactory.build(website=website, **kwargs)
        for kwargs in [
            {
                "dirpath": "content/pages",
                "filename": "reducing-problem",
                "text_id": "uuid-111",
            },
            {
                "dirpath": "content/pages",
                "filename": "vibration-analysis",
                "text_id": "uuid-222",
            },
            {
                "dirpath": "content/resources",
                "filename": "lecture-21",
                "text_id": "uuid-333",
            },
            {
                "dirpath": "content/resources/demos",
                "filename": "vibration-isolation",
                "text_id": "uuid-444",
            },
        ]
    ]

    cleaner = get_markdown_cleaner(linked_contents)
    cleaner.update_website_content(target_content)
    assert target_content.markdown == expected