def test_list_method_filter_method_user_ids(self, monkeypatch, enrollment_test_data): site = enrollment_test_data['site'] users = enrollment_test_data['users'] caller = self.make_caller(site, users) other_site = SiteFactory() assert site.domain != other_site.domain check_user = users[0] LearnerCourseGradeMetricsFactory(site=other_site) LearnerCourseGradeMetricsFactory(site=site) # Make sure we show only for our site for the selected user LearnerCourseGradeMetricsFactory(site=other_site, user=check_user) user_lcgm = [LearnerCourseGradeMetricsFactory( site=site, user=check_user) for i in range(3)] request_path = self.base_request_path + '?user_ids=' + str(check_user.id) response = self.make_request(request_path=request_path, monkeypatch=monkeypatch, site=site, caller=caller, action='list') assert response.status_code == status.HTTP_200_OK assert is_response_paginated(response.data) results = response.data['results'] # Check keys result_ids = [obj['id'] for obj in results] assert set(result_ids) == set([obj.id for obj in user_lcgm]) # Spot check the first record obj = LearnerCourseGradeMetrics.objects.get(id=results[0]['id']) self.check_serialized_data(results[0], obj)
def test_completed_method(self, monkeypatch, enrollment_test_data): site = enrollment_test_data['site'] users = enrollment_test_data['users'] caller = self.make_caller(site, users) other_site = SiteFactory() assert site.domain != other_site.domain # Create an LCGM record for the other site LearnerCourseGradeMetricsFactory(site=other_site, sections_worked=1, sections_possible=1) # Create an LCGM record for our site that is not completed LearnerCourseGradeMetricsFactory(site=site, sections_worked=1, sections_possible=5) completed_lcgm = [LearnerCourseGradeMetricsFactory(site=site, sections_worked=5, sections_possible=5) for i in range(3)] request_path = self.base_request_path + '/completed/' response = self.make_request(request_path=request_path, monkeypatch=monkeypatch, site=site, caller=caller, action='completed') assert response.status_code == status.HTTP_200_OK assert is_response_paginated(response.data) results = response.data['results'] # Check keys result_ids = [obj['id'] for obj in results] assert set(result_ids) == set([obj.id for obj in completed_lcgm]) # Spot check the first record obj = LearnerCourseGradeMetrics.objects.get(id=results[0]['id']) self.check_serialized_data(results[0], obj)
def setUp(self): self.site = SiteFactory() self.not_complete = LearnerCourseGradeMetricsFactory( site=self.site, sections_worked=1, sections_possible=2) self.complete = LearnerCourseGradeMetricsFactory(site=self.site, sections_worked=2, sections_possible=2) self.site_qs = LearnerCourseGradeMetrics.objects.filter(site=self.site) self.filter = EnrollmentMetricsFilter(queryset=self.site_qs)
def test_bulk_calculate_course_progress_data_happy_path(db, monkeypatch): """Tests 'bulk_calculate_course_progress_data' function The function under test iterates over a set of course enrollment records, So we create a couple of records to iterate over and mock the collect function """ course_overview = CourseOverviewFactory() course_enrollments = [ CourseEnrollmentFactory(course_id=course_overview.id) for i in range(2) ] mapping = { ce.course_id: LearnerCourseGradeMetricsFactory(course_id=str(ce.course_id), user=ce.user, sections_worked=1, sections_possible=2) for ce in course_enrollments } def mock_metrics(course_enrollment, **_kwargs): return mapping[course_enrollment.course_id] monkeypatch.setattr( 'figures.pipeline.enrollment_metrics.get_site_for_course', lambda val: SiteFactory()) monkeypatch.setattr( 'figures.pipeline.enrollment_metrics.collect_metrics_for_enrollment', mock_metrics) data = bulk_calculate_course_progress_data(course_overview.id) assert data['average_progress'] == 0.5
def test_get_progress_data(self): """ Method should return data of the form: {'course_progress_history': [], 'course_progress_details': { 'sections_worked': 5, 'points_possible': 30.0, 'sections_possible': 10, 'points_earned': 15.0 }, 'course_progress': (0.5,), 'course_completed': datetime.datetime(2018, 4, 1, 0, 0, tzinfo=<UTC>) } """ metrics_data = dict(points_possible=1, points_earned=2, sections_worked=3, sections_possible=4) lcgm = LearnerCourseGradeMetricsFactory( user=self.course_enrollment.user, course_id=str(self.course_enrollment.course_id), **metrics_data) data = self.serializer.get_progress_data(self.course_enrollment) details = data['course_progress_details'] for key, val in metrics_data.items(): assert details[key] == val assert data['course_progress'] == lcgm.progress_percent assert data[ 'course_completed'] == self.generated_certificate.created_date
def test_no_update_has_lcgm_no_sm(self, monkeypatch): """We have an LCGM but not an SM record The function under test should return the existing LCGM """ monkeypatch.setattr( 'figures.pipeline.enrollment_metrics.get_site_for_course', lambda val: self.site) monkeypatch.setattr( 'figures.pipeline.enrollment_metrics._collect_progress_data', lambda val: self.progress_data) # Create a course enrollment for which we won't have student module records ce = CourseEnrollmentFactory( course_id=self.course_enrollment.course_id) if organizations_support_sites(): UserOrganizationMappingFactory(organization=self.org, user=ce.user) lcgm = LearnerCourseGradeMetricsFactory(course_id=ce.course_id, user=ce.user) ce_sm = StudentModule.objects.filter(course_id=ce.course_id, student_id=ce.user.id) assert not ce_sm metrics = collect_metrics_for_enrollment(site=self.site, course_enrollment=ce, date_for=self.today, student_modules=ce_sm) assert not metrics
def test_bulk_calculate_course_progress_unlinked_course_error(db, monkeypatch): """Tests 'bulk_calculate_course_progress_data' function The function under test iterates over a set of course enrollment records, So we create a couple of records to iterate over and mock the collect function """ course_overview = CourseOverviewFactory() course_enrollments = [ CourseEnrollmentFactory(course_id=course_overview.id) for i in range(2) ] mapping = { ce.course_id: LearnerCourseGradeMetricsFactory(course_id=str(ce.course_id), user=ce.user, sections_worked=1, sections_possible=2) for ce in course_enrollments } def mock_metrics(course_enrollment, **_kwargs): return mapping[course_enrollment.course_id] monkeypatch.setattr( 'figures.pipeline.enrollment_metrics.collect_metrics_for_enrollment', mock_metrics) with pytest.raises(UnlinkedCourseError) as e_info: data = bulk_calculate_course_progress_data(course_overview.id)
def test_edrec_exists_older_lcgm(self): ce = self.enrollments[0] older_date = days_from(self.date_for, -2) # Create existing Figures records EnrollmentDataFactory(site=self.site, user=ce.user, course_id=str(ce.course_id), date_for=older_date) older_lcgm = LearnerCourseGradeMetricsFactory(site=self.site, user=ce.user, course_id=str( ce.course_id), date_for=older_date) # Make sure that the LCGM we created is the most recent one assert LearnerCourseGradeMetrics.objects.latest_lcgm( ce.user, ce.course_id) == older_lcgm # assert lcgm1 == older_lcgm # run our code under test ed, created = EnrollmentData.objects.update_metrics(self.site, ce) # verify our Figures records are updated after_lcgm = LearnerCourseGradeMetrics.objects.latest_lcgm( ce.user, ce.course_id) after_ed = EnrollmentData.objects.get(site=self.site, user=ce.user, course_id=str(ce.course_id)) assert after_lcgm.date_for == self.date_for assert after_ed.date_for == self.date_for
def test_dates_lcgm_is_future_is_false(self): """ Note: This should probably be an error state """ lcgm = LearnerCourseGradeMetricsFactory( date_for=self.student_module.modified.date() + relativedelta(days=1)) assert not _enrollment_metrics_needs_update(lcgm, self.student_module)
def test_existence_yes_lcgm_no_sm_is_false(self, caplog): lcgm = LearnerCourseGradeMetricsFactory() assert not _enrollment_metrics_needs_update(lcgm, None) last_log = caplog.records[-1] assert last_log.message.startswith('FIGURES:PIPELINE:LCGM') # import pdb; pdb.set_trace() assert lcgm.course_id in last_log.message assert str(lcgm.id) in last_log.message assert str(lcgm.user.id) in last_log.message
def create_lcgm(self, date_for): """Helper to create an LCGM record with the given `date_for` """ return LearnerCourseGradeMetricsFactory( course_id=str(self.course_enrollment.course_id), user=self.course_enrollment.user, date_for=date_for, points_possible=self.progress_data['points_possible'], points_earned=self.progress_data['points_earned'], sections_worked=self.progress_data['sections_worked'], sections_possible=self.progress_data['count'])
def test_most_recent_with_data(db): """Make sure the query works with a couple of existing models We create two LearnerCourseGradeMetrics models and test that the function retrieves the newer one """ user = UserFactory() first_date = as_date('2020-02-02') second_date = as_date('2020-04-01') course_overview = CourseOverviewFactory() older_lcgm = LearnerCourseGradeMetricsFactory(user=user, course_id=str(course_overview.id), date_for=first_date) newer_lcgm = LearnerCourseGradeMetricsFactory(user=user, course_id=str(course_overview.id), date_for=second_date) assert older_lcgm.date_for != newer_lcgm.date_for obj = LearnerCourseGradeMetrics.objects.most_recent_for_learner_course( user=user, course_id=course_overview.id) assert obj == newer_lcgm
def test_completed_ids_method(self, monkeypatch, enrollment_test_data): site = enrollment_test_data['site'] users = enrollment_test_data['users'] caller = self.make_caller(site, users) other_site = SiteFactory() assert site.domain != other_site.domain LearnerCourseGradeMetricsFactory(site=other_site, sections_worked=1, sections_possible=1) # Create an incomplete LCGM rec for our site LearnerCourseGradeMetricsFactory(site=site, sections_worked=1, sections_possible=5) completed_lcgm = [ LearnerCourseGradeMetricsFactory(site=site, sections_worked=5, sections_possible=5) for i in range(3) ] request_path = self.base_request_path + '/completed_ids/' response = self.make_request(request_path=request_path, monkeypatch=monkeypatch, site=site, caller=caller, action='completed_ids') assert response.status_code == status.HTTP_200_OK assert is_response_paginated(response.data) results = response.data['results'] # Check that the results have our expected keys and only our expected keys res_keys_list = [elem.keys() for elem in results] results_key_set = set( [item for sublist in res_keys_list for item in sublist]) assert results_key_set == set(['course_id', 'user_id']) # Check that we have the data we're looking for results_values = [elem.values() for elem in results] expected_values = [[obj.course_id, obj.user_id] for obj in completed_lcgm] assert set(map(tuple, results_values)) == set(map(tuple, expected_values))
def test_user_link(self, monkeypatch): """Tests the two cases of user link A) there is a user in the record B) there is not a user in the record """ mock_uri = '/mock-uri-to-user-admin-page' def mock_reverse(*args, **kwargs): return mock_uri users = [UserFactory(), UserFactory()] lcg_metrics = [ LearnerCourseGradeMetricsFactory(user=users[0]), LearnerCourseGradeMetricsFactory(user=users[1]) ] admin_obj = figures.admin.LearnerCourseGradeMetricsAdmin( LearnerCourseGradeMetrics, self.admin_site) monkeypatch.setattr(figures.admin, 'reverse', mock_reverse) data = admin_obj.user_link(lcg_metrics[0]) assert data == '<a href="{url}"></a>'.format(url=mock_uri) data = admin_obj.user_link(LearnerCourseGradeMetricsFactory(user=None)) assert data == 'no user in this record'
def test_exists_no_force(self): ce = self.enrollments[0] construct_kwargs = dict(site=self.site, user=ce.user, course_id=str(ce.course_id), date_for=self.date_for) before_ed = EnrollmentDataFactory(**construct_kwargs) LearnerCourseGradeMetricsFactory(**construct_kwargs) with patch( 'figures.models.EnrollmentProgress._get_progress') as get_prog: ed, created = EnrollmentData.objects.update_metrics(self.site, ce) assert not get_prog.called assert ed == before_ed
def create_sample_completed_lcgm(site, user_count, course_count): """Generate test data TODO: Make this a parametrized fixture https://docs.pytest.org/en/3.1.3/example/parametrize.html We don't create CourseEnrollment objects because we don't need them as Figures models try to rely on the content and context of the data in the LMS and not the LMS models specifically """ users = [UserFactory() for i in range(user_count)] # We need just the course_ids course_ids = [COURSE_ID_STR_TEMPLATE.format(i) for i in range(course_count)] # Two records for each enrollment, one shows not complete, one shows complete lcgm_data = [ dict( date_for='2020-04-01', points_possible=40, points_earned=40, sections_possible=5, sections_worked=4), dict( date_for='2020-05-05', points_possible=50, points_earned=50, sections_possible=5, sections_worked=5) ] lcgm_list = [] for user in users: for course_id in course_ids: for lcgm in lcgm_data: lcgm_list.append(LearnerCourseGradeMetricsFactory( site=site, user=user, course_id=course_id, date_for=lcgm['date_for'], points_possible=lcgm['points_possible'], points_earned=lcgm['points_earned'], sections_possible=lcgm['sections_possible'], sections_worked=lcgm['sections_worked'] ) ) return dict( lcgm_list=lcgm_list, users=users, course_ids=course_ids, site=site, )
def site_data(db, settings): """Simple fake site data """ if organizations_support_sites(): settings.FEATURES['FIGURES_IS_MULTISITE'] = True site_data = make_site_data() ce = site_data['enrollments'][0] lcgm = [ LearnerCourseGradeMetricsFactory(site=site_data['site'], user=ce.user, course_id=str(ce.course_id), date_for='2020-10-01'), ] site_data['lcgm'] = lcgm return site_data
def test_force_update(self): ce = self.enrollments[0] # Create existing Figures records # We only need to assign one progress value but we assign the possible # and earned for one to make sure that the earned is not more than the # possible. We arbitrarily choose points. We could have also chosen # sections or assigned both construct_kwargs = dict(site=self.site, user=ce.user, course_id=str(ce.course_id), date_for=self.date_for, points_earned=5, points_possible=10) EnrollmentDataFactory(**construct_kwargs) before_lcgm = LearnerCourseGradeMetricsFactory(**construct_kwargs) fake_progress = dict(points_possible=50, points_earned=25, sections_possible=10, sections_worked=5) with patch('figures.models.EnrollmentProgress._get_progress', return_value=fake_progress): ed, created = EnrollmentData.objects.update_metrics( self.site, ce, force_update=True) # verify our Figures records are updated lcgm = LearnerCourseGradeMetrics.objects.latest_lcgm( ce.user, ce.course_id) check_ed = EnrollmentData.objects.get(site=self.site, user=ce.user, course_id=str(ce.course_id)) assert check_ed == ed assert not created assert check_ed.date_for == self.date_for assert check_ed.points_earned == fake_progress['points_earned'] assert lcgm.date_for == self.date_for assert lcgm.id == before_lcgm.id # We only need to check one of the progress fields to know it was updated assert lcgm.points_earned == fake_progress['points_earned']
def test_existence_yes_lcgm_no_sm_is_false(self): path = 'figures.pipeline.enrollment_metrics.log_error' with mock.patch(path) as mock_log_error: assert not _enrollment_metrics_needs_update( LearnerCourseGradeMetricsFactory(), None) mock_log_error.assert_called()
def test_dates_lcgm_is_past_is_true(self): lcgm = LearnerCourseGradeMetricsFactory( date_for=self.student_module.modified.date() - relativedelta(days=1)) assert _enrollment_metrics_needs_update(lcgm, self.student_module)
def test_dates_lcgm_is_current_is_false(self): lcgm = LearnerCourseGradeMetricsFactory( date_for=self.student_module.modified.date()) assert not _enrollment_metrics_needs_update(lcgm, self.student_module)