Beispiel #1
0
def test_publish_endpoint_list(settings, drf_client, version):
    """The WebsitePublishView endpoint should return the appropriate info for correctly filtered sites"""
    ocw_sites = WebsiteFactory.create_batch(
        2, source=constants.WEBSITE_SOURCE_OCW_IMPORT)
    draft_published = WebsiteFactory.create_batch(
        2,
        source=constants.WEBSITE_SOURCE_STUDIO,
        draft_publish_status=constants.PUBLISH_STATUS_NOT_STARTED,
        live_publish_status=None,
    )
    live_published = WebsiteFactory.create_batch(
        2,
        source=constants.WEBSITE_SOURCE_STUDIO,
        draft_publish_status=None,
        live_publish_status=constants.PUBLISH_STATUS_SUCCEEDED,
    )
    expected_sites = ocw_sites + (draft_published if version == VERSION_DRAFT
                                  else live_published)
    settings.API_BEARER_TOKEN = "abc123"
    drf_client.credentials(
        HTTP_AUTHORIZATION=f"Bearer {settings.API_BEARER_TOKEN}")
    resp = drf_client.get(f'{reverse("publish_api-list")}?version={version}')
    assert resp.status_code == 200
    site_dict = {site["name"]: site for site in resp.data["sites"]}
    assert len(site_dict.keys()) == 4
    for expected_site in expected_sites:
        publish_site = site_dict.get(expected_site.name, None)
        assert publish_site is not None
        assert publish_site["short_id"] == expected_site.short_id
Beispiel #2
0
def websites(course_starter):
    """ Create some websites for tests """
    courses = WebsiteFactory.create_batch(3,
                                          published=True,
                                          starter=course_starter)
    noncourses = WebsiteFactory.create_batch(2, published=True)
    WebsiteFactory.create(published=True,
                          starter=course_starter,
                          metadata=None)
    WebsiteFactory.create(unpublished=True, starter=course_starter)
    WebsiteFactory.create(future_publish=True)
    return SimpleNamespace(courses=courses, noncourses=noncourses)
Beispiel #3
0
def permission_groups():
    """Set up groups, users and websites for permission testing"""
    (
        global_admin,
        global_author,
        site_owner,
        site_admin,
        site_editor,
    ) = UserFactory.create_batch(5)
    websites = WebsiteFactory.create_batch(2, owner=site_owner)
    global_admin.groups.add(Group.objects.get(name=constants.GLOBAL_ADMIN))
    global_author.groups.add(Group.objects.get(name=constants.GLOBAL_AUTHOR))
    site_admin.groups.add(websites[0].admin_group)
    site_editor.groups.add(websites[0].editor_group)

    website = websites[0]
    owner_content = WebsiteContentFactory.create(website=website, owner=website.owner)
    editor_content = WebsiteContentFactory.create(website=website, owner=site_editor)

    yield SimpleNamespace(
        global_admin=global_admin,
        global_author=global_author,
        site_admin=site_admin,
        site_editor=site_editor,
        websites=websites,
        owner_content=owner_content,
        editor_content=editor_content,
    )
Beispiel #4
0
def test_publish_website_batch(mocker, version, prepublish, trigger):
    """publish_website_batch should make the expected function calls"""
    mock_import_string = mocker.patch("content_sync.tasks.import_string")
    mock_publish_website = mocker.patch("content_sync.api.publish_website")
    mock_throttle = mocker.patch(
        "content_sync.tasks.api.throttle_git_backend_calls")
    website_names = sorted(
        [website.name for website in WebsiteFactory.create_batch(3)])
    expected_api = (mock_import_string.return_value.get_api.return_value
                    if trigger else None)
    tasks.publish_website_batch(website_names,
                                version,
                                prepublish=prepublish,
                                trigger_pipeline=trigger)
    for name in website_names:
        mock_publish_website.assert_any_call(
            name,
            version,
            pipeline_api=expected_api,
            prepublish=prepublish,
            trigger_pipeline=trigger,
        )
    assert mock_throttle.call_count == len(website_names)
    assert mock_import_string.call_count == (len(website_names) + 1 if trigger
                                             else len(website_names))
Beispiel #5
0
def test_upsert_website_pipeline_batch(mocker, settings, create_backend,
                                       unpause, check_limit):
    """upsert_website_pipeline_batch should make the expected function calls"""
    settings.GITHUB_RATE_LIMIT_CHECK = check_limit
    mock_get_backend = mocker.patch("content_sync.tasks.api.get_sync_backend")
    mock_get_pipeline = mocker.patch(
        "content_sync.tasks.api.get_sync_pipeline")
    mock_throttle = mocker.patch(
        "content_sync.tasks.api.throttle_git_backend_calls")
    websites = WebsiteFactory.create_batch(2)
    website_names = sorted([website.name for website in websites])
    tasks.upsert_website_pipeline_batch(website_names,
                                        create_backend=create_backend,
                                        unpause=unpause)
    mock_get_pipeline.assert_any_call(websites[0], api=None)
    mock_get_pipeline.assert_any_call(websites[1], api=mocker.ANY)
    if create_backend:
        for website in websites:
            mock_get_backend.assert_any_call(website)
        mock_throttle.assert_any_call(mock_get_backend.return_value)
        mock_backend = mock_get_backend.return_value
        assert mock_backend.create_website_in_backend.call_count == 2
        assert mock_backend.sync_all_content_to_backend.call_count == 2
    else:
        mock_get_backend.assert_not_called()
    mock_pipeline = mock_get_pipeline.return_value
    assert mock_pipeline.upsert_pipeline.call_count == 2
    if unpause:
        mock_pipeline.unpause_pipeline.assert_any_call(VERSION_DRAFT)
        mock_pipeline.unpause_pipeline.assert_any_call(VERSION_LIVE)
    else:
        mock_pipeline.unpause_pipeline.assert_not_called()
Beispiel #6
0
def test_create_gdrive_folders_batch(mocker):
    """create_gdrive_folders should make the expected function calls"""
    mock_create_gdrive_folders = mocker.patch(
        "gdrive_sync.tasks.api.create_gdrive_folders")
    websites = WebsiteFactory.create_batch(2)
    short_ids = sorted([website.short_id for website in websites])
    tasks.create_gdrive_folders_batch.delay(short_ids)
    for short_id in short_ids:
        mock_create_gdrive_folders.assert_any_call(short_id)
Beispiel #7
0
def test_create_gdrive_folders_batch_errors(mocker, has_error):
    """create_gdrive_folders_batch should return a list of short_ids that errored, or True if no errors"""
    short_ids = sorted(
        [website.short_id for website in WebsiteFactory.create_batch(2)])
    side_effects = [None, Exception("api error")
                    ] if has_error else [None, None]
    mocker.patch("gdrive_sync.tasks.api.create_gdrive_folders",
                 side_effect=side_effects)
    result = create_gdrive_folders_batch(sorted(short_ids))
    assert result == ([short_ids[1]] if has_error else True)
Beispiel #8
0
def test_create_gdrive_folders_chunked(  # pylint:disable=unused-argument
        mocker, mocked_celery, chunk_size, chunks):
    """create_gdrive_folders_chunked calls create_gdrive_folders_batch with correct arguments"""
    websites = WebsiteFactory.create_batch(3)
    short_ids = sorted([website.short_id for website in websites])
    mock_batch = mocker.patch(
        "gdrive_sync.tasks.create_gdrive_folders_batch.s")
    with pytest.raises(TabError):
        tasks.create_gdrive_folders_chunked.delay(
            short_ids,
            chunk_size=chunk_size,
        )
    mock_batch.assert_any_call(short_ids[0:chunk_size])
    if chunks > 1:
        mock_batch.assert_any_call(short_ids[chunk_size:])
Beispiel #9
0
def test_sync_unsynced_websites(api_mock, backend_exists, create_backend,
                                delete):
    """
    Test that sync_all_content_to_backend is run on all websites needing a sync
    """
    api_mock.get_sync_backend.return_value.backend_exists.return_value = backend_exists
    website_synced = WebsiteFactory.create(
        has_unpublished_live=False,
        has_unpublished_draft=False,
        live_publish_status=PUBLISH_STATUS_SUCCEEDED,
        draft_publish_status=PUBLISH_STATUS_SUCCEEDED,
        latest_build_id_live=1,
        latest_build_id_draft=2,
    )
    websites_unsynced = WebsiteFactory.create_batch(2)
    with mute_signals(post_save):
        ContentSyncStateFactory.create(
            current_checksum="a1",
            synced_checksum="a1",
            content=WebsiteContentFactory.create(website=website_synced),
        )
    ContentSyncStateFactory.create_batch(
        2, content=WebsiteContentFactory.create(website=websites_unsynced[0]))
    ContentSyncStateFactory.create_batch(
        2, content=WebsiteContentFactory.create(website=websites_unsynced[1]))

    tasks.sync_unsynced_websites.delay(create_backends=create_backend,
                                       delete=delete)
    for website in websites_unsynced:
        api_mock.get_sync_backend.assert_any_call(website)
        website.refresh_from_db()
        assert website.has_unpublished_live is True
        assert website.has_unpublished_draft is True
        assert website.live_publish_status is None
        assert website.draft_publish_status is None
        assert website.latest_build_id_live is None
        assert website.latest_build_id_draft is None
    with pytest.raises(AssertionError):
        api_mock.get_sync_backend.assert_any_call(website_synced)
    assert (api_mock.get_sync_backend.return_value.sync_all_content_to_backend.
            call_count == (2 if (create_backend or backend_exists) else 0))
    assert (api_mock.get_sync_backend.return_value.
            delete_orphaned_content_in_backend.call_count == (
                2 if delete and (create_backend or backend_exists) else 0))
Beispiel #10
0
def test_publish_websites(  # pylint:disable=unused-argument,too-many-arguments
    mocker,
    mocked_celery,
    api_mock,
    version,
    chunk_size,
    chunks,
    prepublish,
    has_mass_publish,
):
    """publish_websites calls upsert_pipeline_batch with correct arguments"""
    websites = WebsiteFactory.create_batch(3)
    website_names = sorted([website.name for website in websites])
    mock_batch = mocker.patch("content_sync.tasks.publish_website_batch.s")
    mocker.patch(
        "content_sync.tasks.api.get_mass_publish_pipeline",
        return_value=(mocker.Mock() if has_mass_publish else None),
    )
    mock_mass_pub = mocker.patch("content_sync.tasks.trigger_mass_publish.si")
    with pytest.raises(TabError):
        tasks.publish_websites.delay(website_names,
                                     version,
                                     chunk_size=chunk_size,
                                     prepublish=prepublish)
    mock_batch.assert_any_call(
        website_names[0:chunk_size],
        version,
        prepublish=prepublish,
        trigger_pipeline=not has_mass_publish,
    )
    if chunks > 1:
        mock_batch.assert_any_call(
            website_names[chunk_size:],
            version,
            prepublish=prepublish,
            trigger_pipeline=not has_mass_publish,
        )
    if has_mass_publish:
        mock_mass_pub.assert_called_once_with(version)
    else:
        mock_mass_pub.assert_not_called()
Beispiel #11
0
def test_upsert_pipelines(  # pylint:disable=too-many-arguments, unused-argument
        mocker, mocked_celery, create_backend, unpause, chunk_size, chunks):
    """upsert_pipelines calls upsert_pipeline_batch with correct arguments"""
    websites = WebsiteFactory.create_batch(3)
    website_names = sorted([website.name for website in websites])
    mock_batch = mocker.patch(
        "content_sync.tasks.upsert_website_pipeline_batch.s")
    with pytest.raises(TabError):
        tasks.upsert_pipelines.delay(
            website_names,
            create_backend=create_backend,
            unpause=unpause,
            chunk_size=chunk_size,
        )
    mock_batch.assert_any_call(website_names[0:chunk_size],
                               create_backend=create_backend,
                               unpause=unpause)
    if chunks > 1:
        mock_batch.assert_any_call(website_names[chunk_size:],
                                   create_backend=create_backend,
                                   unpause=unpause)