def test_unite_dates(self): self.assertEqual( list(utils.unite_dates([ (datetime(2012, 1, 1), datetime(2012, 1, 2)) ])), [ (datetime(2012, 1, 1), datetime(2012, 1, 2)) ] ) self.assertEqual( list(utils.unite_dates([ (datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 3), datetime(2012, 1, 4)) ])), [ (datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 3), datetime(2012, 1, 4)) ] ) self.assertEqual( list(utils.unite_dates([ (datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 2), datetime(2012, 1, 3)), (datetime(2012, 1, 4), datetime(2012, 1, 5)), ])), [ (datetime(2012, 1, 1), datetime(2012, 1, 3)), (datetime(2012, 1, 4), datetime(2012, 1, 5)), ] ) self.assertEqual( list(utils.unite_dates([ (datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 3), datetime(2012, 1, 4)), (datetime(2012, 1, 4), datetime(2012, 1, 5)), ])), [ (datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 3), datetime(2012, 1, 5)), ] ) self.assertEqual( list(utils.unite_dates([ (datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 2), datetime(2012, 1, 3)), (datetime(2012, 1, 10), datetime(2012, 1, 11)), (datetime(2012, 1, 3), datetime(2012, 1, 4)), (datetime(2012, 1, 11), datetime(2012, 1, 12)), ])), [ (datetime(2012, 1, 1), datetime(2012, 1, 4)), (datetime(2012, 1, 10), datetime(2012, 1, 12)), ] )
def test_unite_dates(self): self.assertEqual( list( utils.unite_dates([(datetime(2012, 1, 1), datetime(2012, 1, 2))])), [(datetime(2012, 1, 1), datetime(2012, 1, 2))]) self.assertEqual( list( utils.unite_dates([(datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 3), datetime(2012, 1, 4))])), [(datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 3), datetime(2012, 1, 4))]) self.assertEqual( list( utils.unite_dates([ (datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 2), datetime(2012, 1, 3)), (datetime(2012, 1, 4), datetime(2012, 1, 5)), ])), [ (datetime(2012, 1, 1), datetime(2012, 1, 3)), (datetime(2012, 1, 4), datetime(2012, 1, 5)), ]) self.assertEqual( list( utils.unite_dates([ (datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 3), datetime(2012, 1, 4)), (datetime(2012, 1, 4), datetime(2012, 1, 5)), ])), [ (datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 3), datetime(2012, 1, 5)), ]) self.assertEqual( list( utils.unite_dates([ (datetime(2012, 1, 1), datetime(2012, 1, 2)), (datetime(2012, 1, 2), datetime(2012, 1, 3)), (datetime(2012, 1, 10), datetime(2012, 1, 11)), (datetime(2012, 1, 3), datetime(2012, 1, 4)), (datetime(2012, 1, 11), datetime(2012, 1, 12)), ])), [ (datetime(2012, 1, 1), datetime(2012, 1, 4)), (datetime(2012, 1, 10), datetime(2012, 1, 12)), ])
def dataset(resources, language, year, month, transform_record=None, compact=False): """ Takes a list of resources and returns a tablib dataset filled with all reservations of these resources. The json data of the reservations is filled using a single column for each type (form + field). transform_record is called before each record is added to the dataset. It allows for changes to the dataset, which is hard otherwise because the records are stored as tuples in the dataset and not meant to be changed. If compact is True, whole day group reservations spanning multiple days are merged into one using utils.unite_dates. """ translator = Translator(language) reservations = fetch_records(resources, year, month) # create the headers headers = translator.translate(basic_headers()) dataheaders = additional_headers(reservations) headers.extend(dataheaders) # use dataview for display info helper view (yep, could be nicer) dataview = ReservationDataView() # for each reservation get a record per timeslot (which is a single slot # for reservations targeting an allocation and n slots for a reservation # targeting a group) records = [] for r in reservations: token = utils.string_uuid(r.token) resource = resources[utils.string_uuid(r.resource)] if compact: timespans = utils.unite_dates(r.timespans()) else: timespans = r.timespans() for start, end in timespans: record = [ get_parent_title(resource), resource.title, token, r.email, start, end, utils.whole_day(start, end), _(r.status.capitalize()), r.quota, r.created, r.modified and r.modified or None, ] record.extend( additional_columns(r, dataheaders, dataview.display_reservation_data)) if callable(transform_record): transform_record(record) translator.translate(record) records.append(record) # put the results in a tablib dataset return generate_dataset(headers, records)
def dataset(resources, language, compact=False): """ Takes a list of resources and returns a tablib dataset filled with all reservations of these resources. The json data of the reservations is filled using a single column for each type (form + field). If compact is True, whole day group reservations spanning multiple days are merged into one using utils.unite_dates. """ translator = Translator(language) reservations = fetch_records(resources) # create the headers headers = translator.translate(basic_headers()) dataheaders = additional_headers(reservations) headers.extend(dataheaders) # use dataview for display info helper view (yep, could be nicer) dataview = ReservationDataView() # for each reservation get a record per timeslot (which is a single slot # for reservations targeting an allocation and n slots for a reservation # targeting a group) records = [] for r in reservations: token = utils.string_uuid(r.token) resource = resources[utils.string_uuid(r.resource)] if compact: timespans = utils.unite_dates(r.timespans()) else: timespans = r.timespans() datetime_format = '%Y-%m-%d %H:%M' for start, end in timespans: record = [ get_parent_title(resource), resource.title, token, r.email, start.strftime(datetime_format), end.strftime(datetime_format), dataview.display_reservation_data(utils.whole_day(start, end)), _(r.status.capitalize()), r.quota, r.created.strftime(datetime_format), r.modified and r.modified.strftime(datetime_format) or None, ] record.extend( additional_columns( r, dataheaders, dataview.display_reservation_data ) ) translator.translate(record) records.append(record) # put the results in a tablib dataset return generate_dataset(headers, records)