def test_import_all_xpro_files(settings, mocker, mocked_celery, mock_blacklist): """import_all_xpro_files should start chunked tasks which """ setup_s3(settings) get_xpro_files_mock = mocker.patch( "course_catalog.tasks.get_xpro_files", autospec=True ) courses = CourseFactory.create_batch( 3, platform=PlatformType.xpro.value, published=True ) CourseFactory.create_batch(3, platform=PlatformType.oll.value, published=False) with pytest.raises(mocked_celery.replace_exception_class): import_all_xpro_files.delay(3) assert mocked_celery.group.call_count == 1 get_xpro_files_mock.si.assert_called_once_with([course.id for course in courses])
def test_serialize_bulk_courses(mocker): """ Test that serialize_bulk_courses calls serialize_course_for_bulk for every existing course """ mock_serialize_course = mocker.patch("search.serializers.serialize_course_for_bulk") courses = CourseFactory.create_batch(5) list(serialize_bulk_courses([course.id for course in Course.objects.all()])) for course in courses: mock_serialize_course.assert_any_call(course)
def test_index_content_files(mocker): """ ES should try indexing content files for all runs in a course """ mock_index_run_content_files = mocker.patch( "search.indexing_api.index_run_content_files", autospec=True) courses = CourseFactory.create_batch(2) index_course_content_files([course.id for course in courses]) for course in courses: for run in course.runs.all(): mock_index_run_content_files.assert_any_call(run.id)
def test_popular_content_types(client, user, mocker): """Test the popular content types API""" # create 2 of each, generate interactions for only the first one # second one shouldn't show up in the results course = CourseFactory.create_batch(2)[0] bootcamp = BootcampFactory.create_batch(2)[0] program = ProgramFactory.create_batch(2)[0] user_list = UserListFactory.create_batch(2)[0] video = VideoFactory.create_batch(2)[0] # generate interactions with an increasing count interactions = [ ContentTypeInteractionFactory.create_batch(count + 1, content=content)[0] for count, content in enumerate( [user_list, bootcamp, video, course, program]) ] response = client.get(reverse("popular_content-list")) # the response should be ordered such that items with a higher count of interactions are first # this ends up being the reverse order of `interactions` since we used `enumerate()` assert response.json() == { "results": PopularContentSerializer( [{ "content_type_id": interaction.content_type_id, "content_id": interaction.content_id, } for interaction in reversed(interactions)], many=True, context={ "request": mocker.Mock(user=user) }, ).data, "next": None, "previous": None, "count": len(interactions), }
def test_load_program( mock_upsert_tasks, program_exists, is_published, courses_exist, has_prices, has_retired_course, ): # pylint: disable=too-many-arguments """Test that load_program loads the program""" program = (ProgramFactory.create(published=is_published, runs=[]) if program_exists else ProgramFactory.build(published=is_published, runs=[])) courses = (CourseFactory.create_batch(2, platform="fake-platform") if courses_exist else CourseFactory.build_batch( 2, platform="fake-platform")) prices = CoursePriceFactory.build_batch(2) if has_prices else [] before_course_count = len(courses) if courses_exist else 0 after_course_count = len(courses) if program_exists and has_retired_course: course = CourseFactory.create(platform="fake-platform") before_course_count += 1 after_course_count += 1 ProgramItem.objects.create( program=program, content_type=ContentType.objects.get(model="course"), object_id=course.id, position=1, ) assert program.items.count() == 1 else: assert program.items.count() == 0 assert Program.objects.count() == (1 if program_exists else 0) assert Course.objects.count() == before_course_count run_data = { "prices": [{ "price": price.price, "mode": price.mode, "upgrade_deadline": price.upgrade_deadline, } for price in prices], "platform": PlatformType.mitx.value, "run_id": program.program_id, "enrollment_start": "2017-01-01T00:00:00Z", "start_date": "2017-01-20T00:00:00Z", "end_date": "2017-06-20T00:00:00Z", "best_start_date": "2017-06-20T00:00:00Z", "best_end_date": "2017-06-20T00:00:00Z", } result = load_program( { "program_id": program.program_id, "title": program.title, "url": program.url, "image_src": program.image_src, "published": is_published, "runs": [run_data], "courses": [{ "course_id": course.course_id, "platform": course.platform } for course in courses], }, [], [], ) if program_exists and not is_published: mock_upsert_tasks.delete_program.assert_called_with(result) elif is_published: mock_upsert_tasks.upsert_program.assert_called_with(result.id) else: mock_upsert_tasks.delete_program.assert_not_called() mock_upsert_tasks.upsert_program.assert_not_called() assert Program.objects.count() == 1 assert Course.objects.count() == after_course_count # assert we got a program back and that each course is in a program assert isinstance(result, Program) assert result.items.count() == len(courses) assert result.runs.count() == 1 assert result.runs.first().prices.count() == len(prices) assert sorted([(price.price, price.mode, price.upgrade_deadline) for price in result.runs.first().prices.all()]) == sorted([ (price.price, price.mode, price.upgrade_deadline) for price in prices ]) assert result.runs.first().best_start_date == _parse_datetime( run_data["best_start_date"]) for item, data in zip(result.items.all(), courses): course = item.item assert isinstance(course, Course) assert course.course_id == data.course_id