def test_bulk_timecards(self): response = client().get(reverse('reports:BulkTimecardList')) rows = decode_streaming_csv(response) expected_fields = set(( 'project_name', 'project_id', 'billable', 'employee', 'start_date', 'end_date', 'hours_spent', 'agency', 'flat_rate', 'active', 'mbnumber', 'notes', 'revenue_profit_loss_account', 'revenue_profit_loss_account_name', 'expense_profit_loss_account', 'expense_profit_loss_account_name', 'employee_organization', 'project_organization', )) rows_read = 0 for row in rows: self.assertEqual(set(row.keys()), expected_fields) self.assertEqual(row['project_id'], '1') rows_read += 1 self.assertNotEqual(rows_read, 0, 'no rows read, expecting 1 or more')
def test_user_reporting_period_report(self): response = client(self).get(reverse( 'reports:ReportingPeriodUserDetailView', kwargs={'reporting_period':'1999-12-31', 'username':'******'} )) self.assertEqual(response.context['user_utilization'], '67.5%') self.assertEqual(response.context['user_all_hours'], 40.00) self.assertEqual(response.context['user_billable_hours'], 27) self.assertContains(response, '67.5%')
def test_project_csv(self): """Test that correct fields are returned for project data CSV request.""" response = client(self).get(reverse('reports:ProjectList')) rows = decode_streaming_csv(response) for row in rows: num_of_fields = len(row) num_of_expected_fields = len( ProjectSerializer.__dict__['Meta'].__dict__['fields']) self.assertEqual(num_of_expected_fields, num_of_fields)
def test_user_data_csv(self): """Test that correct fields are returned for user data CSV request.""" response = client(self).get(reverse('reports:UserDataView')) rows = decode_streaming_csv(response) for row in rows: num_of_fields = len(row) num_of_expected_fields = len( UserDataSerializer.__dict__['_declared_fields']) self.assertEqual(num_of_expected_fields, num_of_fields)
def test_user_data_csv(self): """ Test that CSV is returned and contains the correct fields for user data CSV request. """ response = client().get(reverse('reports:UserDataView')) rows = decode_streaming_csv(response) # Make sure we even have a response to work with. self.assertNotEqual(len(rows), 0) num_of_fields = 0 for row in rows: num_of_fields = len(row) num_of_expected_fields = len( UserDataSerializer.__dict__['_declared_fields']) self.assertEqual(num_of_expected_fields, num_of_fields)
def test_slim_bulk_timecards(self): response = client(self).get(reverse('reports:SlimBulkTimecardList')) rows = decode_streaming_csv(response) expected_fields = set(( 'project_name', 'billable', 'employee', 'start_date', 'end_date', 'hours_spent', 'mbnumber', )) rows_read = 0 for row in rows: self.assertEqual(set(row.keys()), expected_fields) self.assertEqual(row['project_name'], 'Out Of Office') self.assertEqual(row['billable'], 'False') rows_read += 1 self.assertNotEqual(rows_read, 0, 'no rows read, expecting 1 or more')
def test_general_snippets(self): """Test that snippets are returned in correct CSV format.""" project = projects.models.Project.objects.get_or_create( name='General')[0] tco = hours.models.TimecardObject.objects.first() tco.project = project tco.hours_spent = 40 tco.notes = 'Some notes about things!' tco.save() response = client().get(reverse('reports:GeneralSnippetsView')) rows = decode_streaming_csv(response) entry_found = False for row in rows: num_of_fields = len(row) if tco.notes in row['notes']: entry_found = True break expected_num_of_fields = \ len(GeneralSnippetsTimecardSerializer.__dict__['_declared_fields']) self.assertEqual(num_of_fields, expected_num_of_fields) self.assertTrue(entry_found)
def test_project_timeline(self): res = client().get(reverse('reports:UserTimelineView')) self.assertIn('aaron.snow,,2015-06-01,2015-06-08,False,20.00', str(res.content))