def test_two_users_same_id_same_cohort(self): same_id_same_cohort = WikiUserStore( mediawiki_userid=self.editors[0].user_id, mediawiki_username='******', project=second_mediawiki_project, valid=True, validating_cohort=self.cohort.id, ) self.session.add(same_id_same_cohort) self.session.commit() self.session.add(CohortWikiUserStore( cohort_id=self.cohort.id, wiki_user_id=same_id_same_cohort.id, )) self.session.commit() response = self.client.post('/reports/create/', data=dict( responses=self.json_to_post )) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) # Check the csv result response = self.client.get('/reports/result/{0}.csv'.format(result_key)) assert_true(response.data.find( '{0},{1},edits,0,0,0,0'.format( self.editors[0].user_id, 'Editor X with same id') ) >= 0)
def test_user_in_two_projects(self): same_name_different_project = WikiUserStore( mediawiki_userid=self.editors[0].user_id, mediawiki_username='******', project=second_mediawiki_project, valid=True, validating_cohort=self.cohort.id, ) self.session.add(same_name_different_project) self.session.commit() self.session.add( CohortWikiUserStore( cohort_id=self.cohort.id, wiki_user_id=same_name_different_project.id, )) self.session.commit() response = self.client.post('/reports/create/', data=dict(responses=self.json_to_post)) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) # Check the csv result response = self.client.get( '/reports/result/{0}.csv'.format(result_key)) assert_true( response.data.find('{0},{1},{2},edits,0,0,0,0'.format( self.editors[0].user_id, 'Editor 0 in second wiki', second_mediawiki_project)) >= 0)
def test_report_create_and_result(self): response = self.client.post( '/reports/program-global-metrics', data=dict( name='TestCohort2', project='wiki', centralauth=True, validate_as_user_ids=False, paste_ids_or_names= 'Editor test-specific-0\nEditor test-specific-1', start_date='2015-11-01 00:00:00', end_date='2015-11-30 00:00:00', )) assert_equal(response.status_code, 302) assert_true(response.data.find('/reports/') >= 0) # Wait for the task to get processed time.sleep(3) response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) assert_true(task is not None)
def test_save_public_report(self): fake_path = "fake_path" file_manager = PublicReportFileManager(self.logger, '/some/fake/absolute/path') file_manager.write_data = Mock() file_manager.remove_file = Mock() file_manager.get_public_report_path = MagicMock(return_value=fake_path) with file_manager_set(app, file_manager): desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.cohort.id, 'name': self.cohort.name, }, 'metric': { 'name': 'NamespaceEdits', 'timeseries': 'month', 'namespaces': [0, 1, 2], 'start_date': '2013-01-01 00:00:00', 'end_date': '2013-05-01 00:00:00', 'individualResults': True, 'aggregateResults': True, 'aggregateSum': False, 'aggregateAverage': True, 'aggregateStandardDeviation': False, }, }] json_to_post = json.dumps(desired_responses) response = self.client.post('/reports/create/', data=dict(responses=json_to_post)) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) assert_true(task and report) # Make the report publically accessible (save it to static/public) response = self.client.post('/reports/set-public/{}'.format( report.id)) assert_true(response.status_code == 200) assert_equal(file_manager.write_data.call_count, 1) # Now make the report private (remove it from static/public) response = self.client.post('/reports/unset-public/{}'.format( report.id)) assert_true(response.status_code == 200) file_manager.remove_file.assert_called_with(fake_path) assert_equal(file_manager.remove_file.call_count, 1)
def test_full_report_create_and_result(self): # Make the request desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.cohort.id, 'name': self.cohort.name, }, 'metric': { 'name': 'NamespaceEdits', 'namespaces': [0, 1, 2], 'start_date': '2013-06-01 00:00:00', 'end_date': '2013-09-01 00:00:00', 'individualResults': True, 'aggregateResults': True, 'aggregateSum': True, 'aggregateAverage': True, 'aggregateStandardDeviation': True, }, }] json_to_post = json.dumps(desired_responses) response = self.client.post('/reports/create/', data=dict(responses=json_to_post)) assert_equal(response.status_code, 200) assert_true(response.data.find('isRedirect') >= 0) assert_true(response.data.find('/reports/') >= 0) # Wait a second for the task to get processed time.sleep(1) response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) assert_true(task is not None) # Get the result directly result = task.get() assert_true(result is not None) # Check the status via get response = self.client.get('/reports/status/{0}'.format(result_key)) assert_true(response.data.find('SUCCESS') >= 0) # Check the csv result response = self.client.get( '/reports/result/{0}.csv'.format(result_key)) assert_true(response.data.find('Average') >= 0) # Check the json result response = self.client.get( '/reports/result/{0}.json'.format(result_key)) assert_true(response.data.find('Average') >= 0)
def test_save_public_report(self): fake_path = "fake_path" file_manager = PublicReportFileManager(self.logger, '/some/fake/absolute/path') file_manager.write_data = Mock() file_manager.remove_file = Mock() file_manager.get_public_report_path = MagicMock(return_value=fake_path) with file_manager_set(app, file_manager): desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.cohort.id, 'name': self.cohort.name, }, 'metric': { 'name': 'NamespaceEdits', 'timeseries': 'month', 'namespaces': [0, 1, 2], 'start_date': '2013-01-01 00:00:00', 'end_date': '2013-05-01 00:00:00', 'individualResults': True, 'aggregateResults': True, 'aggregateSum': False, 'aggregateAverage': True, 'aggregateStandardDeviation': False, }, }] json_to_post = json.dumps(desired_responses) response = self.client.post('/reports/create/', data=dict( responses=json_to_post )) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) assert_true(task and report) # Make the report publically accessible (save it to static/public) response = self.client.post('/reports/set-public/{}'.format(report.id)) assert_true(response.status_code == 200) assert_equal(file_manager.write_data.call_count, 1) # Now make the report private (remove it from static/public) response = self.client.post('/reports/unset-public/{}'.format(report.id)) assert_true(response.status_code == 200) file_manager.remove_file.assert_called_with(fake_path) assert_equal(file_manager.remove_file.call_count, 1)
def test_full_report_create_and_result(self): # Make the request desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.cohort.id, 'name': self.cohort.name, }, 'metric': { 'name': 'NamespaceEdits', 'namespaces': [0, 1, 2], 'start_date': '2013-06-01 00:00:00', 'end_date': '2013-09-01 00:00:00', 'individualResults': True, 'aggregateResults': True, 'aggregateSum': True, 'aggregateAverage': True, 'aggregateStandardDeviation': True, }, }] json_to_post = json.dumps(desired_responses) response = self.client.post('/reports/create/', data=dict( responses=json_to_post )) assert_equal(response.status_code, 200) assert_true(response.data.find('isRedirect') >= 0) assert_true(response.data.find('/reports/') >= 0) # Wait a second for the task to get processed time.sleep(1) response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) assert_true(task is not None) # Get the result directly result = task.get() assert_true(result is not None) # Check the status via get response = self.client.get('/reports/status/{0}'.format(result_key)) assert_true(response.data.find('SUCCESS') >= 0) # Check the csv result response = self.client.get('/reports/result/{0}.csv'.format(result_key)) assert_true(response.data.find('Average') >= 0) # Check the json result response = self.client.get('/reports/result/{0}.json'.format(result_key)) assert_true(response.data.find('Average') >= 0)
def test_report_result_json(self): desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.cohort.id, 'name': self.cohort.name, }, 'metric': { 'name': 'NamespaceEdits', 'timeseries': 'month', 'namespaces': [0, 1, 2], 'start_date': '2013-01-01 00:00:00', 'end_date': '2013-05-01 00:00:00', 'individualResults': True, 'aggregateResults': True, 'aggregateSum': False, 'aggregateAverage': True, 'aggregateStandardDeviation': False, }, }] json_to_post = json.dumps(desired_responses) response = self.client.post('/reports/create/', data=dict( responses=json_to_post )) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) response = self.client.get('/reports/result/{0}.json'.format(result_key)) # Check that user names are included assert_true(response.data.find( '{0}|{1}|{2}|{3}'.format( self.editors[0].user_name, self.editors[0].user_id, mediawiki_project, self.cohort.id) ) >= 0) assert_true(response.data.find( '{0}|{1}|{2}|{3}'.format( self.editors[1].user_name, self.editors[1].user_id, mediawiki_project, self.cohort.id) ) >= 0)
def test_report_result_average_only_csv(self): # Make the request desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.cohort.id, 'name': self.cohort.name, }, 'metric': { 'name': 'NamespaceEdits', 'namespaces': [0, 1, 2], 'start_date': '2013-01-01 00:00:00', 'end_date': '2013-05-01 00:00:00', 'individualResults': False, 'aggregateResults': True, 'aggregateSum': False, 'aggregateAverage': True, 'aggregateStandardDeviation': False, }, }] json_to_post = json.dumps(desired_responses) response = self.client.post('/reports/create/', data=dict(responses=json_to_post)) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) # Check the csv result response = self.client.get( '/reports/result/{0}.csv'.format(result_key)) assert_true(response.data.find('Average,,,2.0') >= 0) # Testing to see if the parameters are also in the CSV # (related to Mingle 1089) assert_true(response.data.find('parameters') >= 0) assert_true(response.data.find('start_date') >= 0) assert_true(response.data.find('end_date') >= 0) assert_true(response.data.find('namespaces') >= 0) cohort_size = 'Cohort Size,{0}'.format(len(self.cohort)) assert_true(response.data.find(cohort_size) >= 0)
def test_report_result_average_only_csv(self): # Make the request desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.cohort.id, 'name': self.cohort.name, }, 'metric': { 'name': 'NamespaceEdits', 'namespaces': [0, 1, 2], 'start_date': '2013-01-01 00:00:00', 'end_date': '2013-05-01 00:00:00', 'individualResults': False, 'aggregateResults': True, 'aggregateSum': False, 'aggregateAverage': True, 'aggregateStandardDeviation': False, }, }] json_to_post = json.dumps(desired_responses) response = self.client.post('/reports/create/', data=dict( responses=json_to_post )) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) # Check the csv result response = self.client.get('/reports/result/{0}.csv'.format(result_key)) assert_true(response.data.find('Average,,2.0') >= 0) # Testing to see if the parameters are also in the CSV # (related to Mingle 1089) assert_true(response.data.find('parameters') >= 0) assert_true(response.data.find('start_date') >= 0) assert_true(response.data.find('end_date') >= 0) assert_true(response.data.find('namespaces') >= 0) cohort_size = 'Cohort Size,{0}'.format(len(self.cohort)) assert_true(response.data.find(cohort_size) >= 0)
def test_report_result_json(self): desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.cohort.id, 'name': self.cohort.name, }, 'metric': { 'name': 'NamespaceEdits', 'timeseries': 'month', 'namespaces': [0, 1, 2], 'start_date': '2013-01-01 00:00:00', 'end_date': '2013-05-01 00:00:00', 'individualResults': True, 'aggregateResults': True, 'aggregateSum': False, 'aggregateAverage': True, 'aggregateStandardDeviation': False, }, }] json_to_post = json.dumps(desired_responses) response = self.client.post('/reports/create/', data=dict(responses=json_to_post)) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) response = self.client.get( '/reports/result/{0}.json'.format(result_key)) # Check that user names are included assert_true( response.data.find('{0}|{1}|{2}|{3}'.format( self.editors[0].user_name, self.editors[0].user_id, mediawiki_project, self.cohort.id)) >= 0) assert_true( response.data.find('{0}|{1}|{2}|{3}'.format( self.editors[1].user_name, self.editors[1].user_id, mediawiki_project, self.cohort.id)) >= 0)
def test_report_fails_with_invalid_cohort(self): response = self.client.post('/reports/program-global-metrics', data=dict( name='TestCohort1', project='wiki', centralauth=True, validate_as_user_ids=False, paste_ids_or_names='test-specific-0\ntest-specific-1', start_date='2015-11-01 00:00:00', end_date='2015-11-30 00:00:00', )) assert_equal(response.status_code, 302) assert_true(response.data.find('/reports/') >= 0) # Wait for the task to get processed time.sleep(3) response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) assert_true(task is not None)
def test_report_result_std_dev_only_csv(self): # Make the request desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.test_cohort_id, }, 'metric': { 'name': 'NamespaceEdits', 'namespaces': [0, 1, 2], 'start_date': '2013-06-01', 'end_date': '2013-09-01', 'individualResults': False, 'aggregateResults': True, 'aggregateSum': False, 'aggregateAverage': False, 'aggregateStandardDeviation': True, }, }] json_to_post = json.dumps(desired_responses) response = self.app.post('/reports/create/', data=dict( responses=json_to_post )) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.app.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) # Check the csv result response = self.app.get('/reports/result/{0}.csv'.format(result_key)) assert_true(response.data.find('Standard Deviation') >= 0)
def test_report_result_std_dev_only_csv(self): # Make the request desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.test_cohort_id, }, 'metric': { 'name': 'NamespaceEdits', 'namespaces': [0, 1, 2], 'start_date': '2013-06-01', 'end_date': '2013-09-01', 'individualResults': False, 'aggregateResults': True, 'aggregateSum': False, 'aggregateAverage': False, 'aggregateStandardDeviation': True, }, }] json_to_post = json.dumps(desired_responses) response = self.app.post('/reports/create/', data=dict(responses=json_to_post)) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.app.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) # Check the csv result response = self.app.get('/reports/result/{0}.csv'.format(result_key)) assert_true(response.data.find('Standard Deviation') >= 0)
def test_full_report_create_and_result(self): # Make the request desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.test_cohort_id, }, 'metric': { 'name': 'NamespaceEdits', 'namespaces': [0, 1, 2], 'start_date': '2013-06-01', 'end_date': '2013-09-01', 'individualResults': True, 'aggregateResults': True, 'aggregateSum': True, 'aggregateAverage': True, 'aggregateStandardDeviation': True, }, }] json_to_post = json.dumps(desired_responses) response = self.app.post('/reports/create/', data=dict(responses=json_to_post)) assert_equal(response.status_code, 200) assert_true(response.data.find('isRedirect') >= 0) assert_true(response.data.find('/reports/') >= 0) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.app.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) assert_true(task is not None) # Get the result directly result = get_celery_task_result(task, report) assert_true(result is not None) # Check the status via get response = self.app.get('/reports/status/{0}'.format(result_key)) assert_true(response.data.find('SUCCESS') >= 0) # Check the csv result response = self.app.get('/reports/result/{0}.csv'.format(result_key)) assert_true(response.data.find('Average') >= 0) # Check the json result response = self.app.get('/reports/result/{0}.json'.format(result_key)) assert_true(response.data.find('Average') >= 0) # Purposefully change the report status to make sure update_status works report.status = celery.states.STARTED self.session.add(report) self.session.commit() report_new = self.session.query(PersistentReport).get(report.id) self.session.expunge(report_new) report_new.update_status() assert_equal(report_new.status, celery.states.SUCCESS) # Change this report to look like the old style, to test that still works # TODO: delete this test on October 1st report.result_key = report.queue_result_key self.session.commit() result = get_celery_task_result(task, report) assert_true(result is not None)
def test_get_celery_task_no_key(self): (r1, r2) = get_celery_task(None) assert_equal(r1, None) assert_equal(r2, None)
def test_full_report_create_and_result(self): # Make the request desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.test_cohort_id, }, 'metric': { 'name': 'NamespaceEdits', 'namespaces': [0, 1, 2], 'start_date': '2013-06-01', 'end_date': '2013-09-01', 'individualResults': True, 'aggregateResults': True, 'aggregateSum': True, 'aggregateAverage': True, 'aggregateStandardDeviation': True, }, }] json_to_post = json.dumps(desired_responses) response = self.app.post('/reports/create/', data=dict( responses=json_to_post )) assert_equal(response.status_code, 200) assert_true(response.data.find('isRedirect') >= 0) assert_true(response.data.find('/reports/') >= 0) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.app.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) assert_true(task is not None) # Get the result directly result = get_celery_task_result(task, report) assert_true(result is not None) # Check the status via get response = self.app.get('/reports/status/{0}'.format(result_key)) assert_true(response.data.find('SUCCESS') >= 0) # Check the csv result response = self.app.get('/reports/result/{0}.csv'.format(result_key)) assert_true(response.data.find('Average') >= 0) # Check the json result response = self.app.get('/reports/result/{0}.json'.format(result_key)) assert_true(response.data.find('Average') >= 0) # Purposefully change the report status to make sure update_status works report.status = celery.states.STARTED self.session.add(report) self.session.commit() report_new = self.session.query(PersistentReport).get(report.id) self.session.expunge(report_new) report_new.update_status() assert_equal(report_new.status, celery.states.SUCCESS) # Change this report to look like the old style, to test that still works # TODO: delete this test on October 1st report.result_key = report.queue_result_key self.session.commit() result = get_celery_task_result(task, report) assert_true(result is not None)
def test_report_result_timeseries_csv(self): desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.cohort.id, 'name': self.cohort.name, }, 'metric': { 'name': 'NamespaceEdits', 'timeseries': 'month', 'namespaces': [0, 1, 2], 'start_date': '2013-01-01 00:00:00', 'end_date': '2013-05-01 00:00:00', 'individualResults': True, 'aggregateResults': True, 'aggregateSum': False, 'aggregateAverage': True, 'aggregateStandardDeviation': False, }, }] json_to_post = json.dumps(desired_responses) response = self.client.post('/reports/create/', data=dict( responses=json_to_post )) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) # Check the csv result response = self.client.get('/reports/result/{0}.csv'.format(result_key)) ''' TODO csv format now looks like: Cohort,test-specific-cohort,,,,, Cohort Size,4,,,,, Created On,2014-03-14 17:26:55,,,,, Metric,NamespaceEdits,,,,, Metric_aggregateAverage,True,,,,, Metric_aggregateResults,True,,,,, Metric_aggregateStandardDeviation,False,,,,, Metric_aggregateSum,False,,,,, Metric_end_date,2013-05-01 00:00:00,,,,, Metric_individualResults,True,,,,, Metric_namespaces,"[0, 1, 2]",,,,, Metric_start_date,2013-01-01 00:00:00,,,,, Metric_timeseries,month,,,,, ''' assert_true(response.data.find( 'user_id,user_name,submetric,' '2013-01-01 00:00:00,2013-02-01 00:00:00,' '2013-03-01 00:00:00,2013-04-01 00:00:00' ) >= 0) assert_true(response.data.find( '{0},{1},edits,1,2,1,0'.format( self.editors[0].user_id, self.editors[0].user_name) ) >= 0) assert_true(response.data.find( '{0},{1},edits,1,2,1,0'.format( self.editors[1].user_id, self.editors[1].user_name) ) >= 0) assert_true(response.data.find( 'Average,,edits,0.5000,1.0000,0.5000,0.0000' ) >= 0) # Testing to see if the parameters are also in the CSV assert_true(response.data.find('parameters') >= 0) assert_true(response.data.find('start_date') >= 0) assert_true(response.data.find('end_date') >= 0) assert_true(response.data.find('namespaces') >= 0) cohort_size = 'Cohort Size,{0}'.format(len(self.cohort)) assert_true(response.data.find(cohort_size) >= 0)
def test_report_result_timeseries_csv(self): desired_responses = [{ 'name': 'Edits - test', 'cohort': { 'id': self.cohort.id, 'name': self.cohort.name, }, 'metric': { 'name': 'NamespaceEdits', 'timeseries': 'month', 'namespaces': [0, 1, 2], 'start_date': '2013-01-01 00:00:00', 'end_date': '2013-05-01 00:00:00', 'individualResults': True, 'aggregateResults': True, 'aggregateSum': False, 'aggregateAverage': True, 'aggregateStandardDeviation': False, }, }] json_to_post = json.dumps(desired_responses) response = self.client.post('/reports/create/', data=dict(responses=json_to_post)) # Wait a second for the task to get processed time.sleep(1) # Check that the task has been created response = self.client.get('/reports/list/') parsed = json.loads(response.data) result_key = parsed['reports'][-1]['result_key'] task, report = get_celery_task(result_key) # Check the csv result response = self.client.get( '/reports/result/{0}.csv'.format(result_key)) ''' TODO csv format now looks like: Cohort,test-specific-cohort,,,,, Cohort Size,4,,,,, Created On,2014-03-14 17:26:55,,,,, Metric,NamespaceEdits,,,,, Metric_aggregateAverage,True,,,,, Metric_aggregateResults,True,,,,, Metric_aggregateStandardDeviation,False,,,,, Metric_aggregateSum,False,,,,, Metric_end_date,2013-05-01 00:00:00,,,,, Metric_individualResults,True,,,,, Metric_namespaces,"[0, 1, 2]",,,,, Metric_start_date,2013-01-01 00:00:00,,,,, Metric_timeseries,month,,,,, ''' assert_true( response.data.find('user_id,user_name,project,submetric,' '2013-01-01 00:00:00,2013-02-01 00:00:00,' '2013-03-01 00:00:00,2013-04-01 00:00:00') >= 0) assert_true( response.data.find('{0},{1},{2},edits,1,2,1,0'.format( self.editors[0].user_id, self.editors[0].user_name, mediawiki_project)) >= 0) assert_true( response.data.find('{0},{1},{2},edits,1,2,1,0'.format( self.editors[1].user_id, self.editors[1].user_name, mediawiki_project)) >= 0) assert_true( response.data.find('Average,,,edits,0.5000,1.0000,0.5000,0.0000') >= 0) # Testing to see if the parameters are also in the CSV assert_true(response.data.find('parameters') >= 0) assert_true(response.data.find('start_date') >= 0) assert_true(response.data.find('end_date') >= 0) assert_true(response.data.find('namespaces') >= 0) cohort_size = 'Cohort Size,{0}'.format(len(self.cohort)) assert_true(response.data.find(cohort_size) >= 0)