def test_upcoming_courses_endpoint(client, kwargs, is_upcoming): """Test upcoming courses endpoint""" course = CourseFactory.create(runs=None) LearningResourceRunFactory.create(content_object=course, **kwargs) # this should be filtered out CourseFactory.create(runs=None) resp = client.get(reverse("courses-list") + "upcoming/") assert resp.data.get("count") == (1 if is_upcoming else 0) if is_upcoming: assert resp.data.get("results")[0]["id"] == course.id
def test_es_run_serializer(has_full_name): """ Test that ESRunSerializer correctly serializes a run object """ learning_resource_run = ( LearningResourceRunFactory.create() if has_full_name else LearningResourceRunFactory.create(instructors__full_name=None) ) serialized = ESRunSerializer(learning_resource_run).data assert_json_equal( serialized, { "id": learning_resource_run.id, "run_id": learning_resource_run.run_id, "short_description": learning_resource_run.short_description, "full_description": learning_resource_run.full_description, "language": learning_resource_run.language, "semester": learning_resource_run.semester, "year": int(learning_resource_run.year), "level": learning_resource_run.level, "start_date": learning_resource_run.start_date.strftime(ISOFORMAT), "end_date": learning_resource_run.end_date.strftime(ISOFORMAT), "enrollment_start": learning_resource_run.enrollment_start.strftime( ISOFORMAT ), "enrollment_end": learning_resource_run.enrollment_end.strftime(ISOFORMAT), "best_start_date": learning_resource_run.best_start_date, "best_end_date": learning_resource_run.best_end_date, "title": learning_resource_run.title, "image_src": learning_resource_run.image_src, "instructors": [ ( instructor.full_name if has_full_name else " ".join([instructor.first_name, instructor.last_name]) ) for instructor in learning_resource_run.instructors.all() ], "prices": [ ESCoursePriceSerializer(price).data for price in learning_resource_run.prices.all() ], "published": True, "availability": learning_resource_run.availability, "offered_by": list( learning_resource_run.offered_by.values_list("name", flat=True) ), }, )
def test_load_content_file_error(mocker): """ Test that an exception in load_content_file is logged """ learning_resource_run = LearningResourceRunFactory.create() mock_log = mocker.patch("course_catalog.etl.loaders.log.exception") load_content_file(learning_resource_run, {"uid": "badfile", "bad": "data"}) mock_log.assert_called_once_with("ERROR syncing course file %s for run %d", "badfile", learning_resource_run.id)
def test_load_run(run_exists): """Test that load_run loads the course run""" course = CourseFactory.create(runs=None) learning_resource_run = (LearningResourceRunFactory.create( content_object=course) if run_exists else LearningResourceRunFactory.build()) props = model_to_dict( LearningResourceRunFactory.build( run_id=learning_resource_run.run_id, platform=learning_resource_run.platform)) del props["content_type"] del props["object_id"] del props["id"] assert LearningResourceRun.objects.count() == (1 if run_exists else 0) result = load_run(course, props) assert LearningResourceRun.objects.count() == 1 assert result.content_object == course # assert we got a course run back assert isinstance(result, LearningResourceRun) for key, value in props.items(): assert getattr(result, key) == value, f"Property {key} should equal {value}"
def test_sync_xpro_course_files(mock_xpro_learning_bucket, mocker): """sync xpro courses from a tarball stored in S3""" mock_xpro_learning_bucket.bucket.put_object( Key="path/to/exported_courses_123.tar.gz", Body=open("test_json/exported_courses_12345.tar.gz", "rb").read(), ACL="public-read", ) mock_load_content_files = mocker.patch( "course_catalog.api.load_content_files", autospec=True, return_value=[] ) course_content_type = ContentType.objects.get_for_model(Course) run = LearningResourceRunFactory.create( platform=PlatformType.xpro.value, run_id="content-devops-0001", content_type=course_content_type, ) course_id = run.object_id fake_data = '{"key": "data"}' mock_log = mocker.patch("course_catalog.api.log.exception") mock_transform = mocker.patch( "course_catalog.api.transform_content_files_xpro", return_value=fake_data ) sync_xpro_course_files(ids=[course_id]) assert mock_transform.call_count == 1 assert mock_transform.call_args[0][0].endswith("content-devops-0001.tar.gz") is True mock_load_content_files.assert_called_once_with(run, fake_data) mock_log.assert_not_called()
def test_sync_xpro_course_files_no_courses(mock_xpro_learning_bucket, mocker): """If there are no matching runs for the given courses, it should be skipped""" mock_xpro_learning_bucket.bucket.put_object( Key="path/to/exported_courses_123.tar.gz", Body=open("test_json/exported_courses_12345.tar.gz", "rb").read(), ACL="public-read", ) mock_load_content_files = mocker.patch( "course_catalog.api.load_content_files", autospec=True, return_value=[] ) course_content_type = ContentType.objects.get_for_model(Course) LearningResourceRunFactory.create( platform=PlatformType.xpro.value, run_id="content-devops-0001", content_type=course_content_type, ) sync_xpro_course_files(ids=[]) mock_load_content_files.assert_not_called()
def test_load_instructors(instructor_exists): """Test that load_instructors creates and/or assigns instructors to the course run""" instructors = (CourseInstructorFactory.create_batch(3) if instructor_exists else CourseInstructorFactory.build_batch(3)) run = LearningResourceRunFactory.create(no_instructors=True) assert run.instructors.count() == 0 load_instructors(run, [{ "full_name": instructor.full_name } for instructor in instructors]) assert run.instructors.count() == len(instructors)
def test_bulk_index_content_files(mocked_es, mocker, settings, errors, indexing_func_name, doc): # pylint: disable=too-many-arguments """ index functions for content files should call bulk with correct arguments """ settings.ELASTICSEARCH_INDEXING_CHUNK_SIZE = 3 course = CourseFactory.create() run = LearningResourceRunFactory.create(content_object=course) content_files = ContentFileFactory.create_batch(5, run=run) mock_get_aliases = mocker.patch("search.indexing_api.get_active_aliases", autospec=True, return_value=["a", "b"]) bulk_mock = mocker.patch("search.indexing_api.bulk", autospec=True, return_value=(0, errors)) mocker.patch( f"search.indexing_api.serialize_content_file_for_bulk", autospec=True, return_value=doc, ) mocker.patch( f"search.indexing_api.serialize_content_file_for_bulk_deletion", autospec=True, return_value=doc, ) index_func = getattr(indexing_api, indexing_func_name) if errors: with pytest.raises(ReindexException): index_func(run.id) else: index_func(run.id) for alias in mock_get_aliases.return_value: for chunk in chunks( [doc for _ in content_files], chunk_size=settings.ELASTICSEARCH_INDEXING_CHUNK_SIZE, ): bulk_mock.assert_any_call( mocked_es.conn, chunk, index=alias, doc_type=GLOBAL_DOC_TYPE, chunk_size=settings.ELASTICSEARCH_INDEXING_CHUNK_SIZE, routing=gen_course_id(course.platform, course.course_id), )
def test_load_content_files(mocker, is_published): """Test that load_content_files calls the expected functions""" course_run = LearningResourceRunFactory.create(published=is_published) content_data = [{"a": "b"}, {"a": "c"}] mock_load_content_file = mocker.patch( "course_catalog.etl.loaders.load_content_file", autospec=True) mock_bulk_index = mocker.patch( "course_catalog.etl.loaders.search_task_helpers.index_run_content_files", autospec=True, ) mock_bulk_delete = mocker.patch( "course_catalog.etl.loaders.search_task_helpers.delete_run_content_files", autospec=True, ) load_content_files(course_run, content_data) assert mock_load_content_file.call_count == len(content_data) assert mock_bulk_index.call_count == (1 if is_published else 0) assert mock_bulk_delete.call_count == (0 if is_published else 1)
def test_load_prices(prices_exist): """Test that load_prices creates and/or assigns prices to the parent object""" prices = (CoursePriceFactory.create_batch(3) if prices_exist else CoursePriceFactory.build_batch(3)) course_run = LearningResourceRunFactory.create(no_prices=True) assert course_run.prices.count() == 0 load_prices( course_run, [{ "price": price.price, "mode": price.mode, "upgrade_deadline": price.upgrade_deadline, } for price in prices], ) assert course_run.prices.count() == len(prices)
def test_load_content_file(): """Test that load_content_file saves a ContentFile object""" learning_resource_run = LearningResourceRunFactory.create() props = model_to_dict( ContentFileFactory.build(run_id=learning_resource_run.id)) props.pop("run") props.pop("id") result = load_content_file(learning_resource_run, props) assert ContentFile.objects.count() == 1 assert result.run == learning_resource_run # assert we got a content file back assert isinstance(result, ContentFile) for key, value in props.items(): assert getattr(result, key) == value, f"Property {key} should equal {value}"
def test_get_most_relevant_run(): """Verify that most_relevant_run returns the correct run""" most_relevant_run = LearningResourceRunFactory.create( availability=AvailabilityType.archived.value, best_start_date=datetime(2019, 10, 1, tzinfo=pytz.utc), run_id="1", ) LearningResourceRunFactory.create( availability=AvailabilityType.archived.value, best_start_date=datetime(2018, 10, 1, tzinfo=pytz.utc), run_id="2", ) assert (get_most_relevant_run( LearningResourceRun.objects.filter( run_id__in=["1", "2"])) == most_relevant_run) most_relevant_run = LearningResourceRunFactory.create( availability=AvailabilityType.upcoming.value, best_start_date=datetime(2017, 10, 1, tzinfo=pytz.utc), run_id="3", ) LearningResourceRunFactory.create( availability=AvailabilityType.upcoming.value, best_start_date=datetime(2020, 10, 1, tzinfo=pytz.utc), run_id="4", ) assert (get_most_relevant_run( LearningResourceRun.objects.filter( run_id__in=["1", "2", "3", "4"])) == most_relevant_run) most_relevant_run = LearningResourceRunFactory.create( availability=AvailabilityType.current.value, run_id="5") assert (get_most_relevant_run( LearningResourceRun.objects.filter( run_id__in=["1", "2", "3", "4", "5"])) == most_relevant_run)