def test_csv_export(self): """Ensures exported CSV data matches source data""" qs_filter = {"pk__in": [x.pk for x in self.snapshots]} qs = BuildingSnapshot.objects.filter(**qs_filter) export_id = str(uuid.uuid4()) exporter = Exporter(export_id, 'test_export', 'csv') fields = list(Exporter.fields_from_queryset(qs)) raw = [ 'owner_address', 'owner_postal_code', 'owner_email', 'postal_code', 'occupied_floor_area', 'custom_id_1', 'extra_data', 'state_province', 'tax_lot_id', 'address_line_2', 'address_line_1', 'lot_number', 'year_ending', 'property_notes', 'generation_date', 'energy_alerts', 'space_alerts', 'site_eui_weather_normalized', 'created', 'energy_score', 'block_number', 'building_count', 'owner', 'source_eui', 'extra_data_sources', 'city', 'confidence', 'district', 'best_guess_confidence', 'site_eui', 'building_certification', 'modified', 'match_type', 'source_eui_weather_normalized', u'id', 'property_name', 'conditioned_floor_area', 'pm_property_id', 'use_description', 'source_type', 'year_built', 'release_date', 'gross_floor_area', 'owner_city_state', 'owner_telephone', 'recent_sale_date' ] self.assertItemsEqual(fields, raw) fields.append("canonical_building__id") export_filename = exporter.export_csv(qs, fields) self.assertTrue(os.path.exists(export_filename)) export_file = open(export_filename) reader = csv.reader(export_file) header = reader.next() self.assertEqual(header[len(fields) - 1], 'ID') for i in range(len(self.snapshots)): row = reader.next() for j in range(len(fields)): field = fields[j] components = field.split("__") qs_val = qs[i] for component in components: qs_val = getattr(qs_val, component) if qs_val is None: break if isinstance(qs_val, Manager) or qs_val is None: qs_val = u'' else: qs_val = unicode(qs_val) csv_val = row[j] self.assertEqual(qs_val, csv_val) export_file.close() os.remove(export_filename)
def test_csv_export_extra_data(self): """Ensures exported CSV data matches source data""" qs_filter = {"pk__in": [x.pk for x in self.snapshots]} qs = BuildingSnapshot.objects.filter(**qs_filter) export_id = str(uuid.uuid4()) exporter = Exporter(export_id, 'test_export', 'csv') fields = list(Exporter.fields_from_queryset(qs)) fields.append("canonical_building__id") fields.append('my new field') export_filename = exporter.export_csv(qs, fields) export_file = open(export_filename) reader = csv.reader(export_file) header = reader.next() self.assertEqual(header[len(fields) - 1], 'my new field') for i in range(len(self.snapshots)): row = reader.next() for j in range(len(fields)): field = fields[j] components = field.split("__") qs_val = qs[i] for component in components: try: qs_val = getattr(qs_val, component) except AttributeError: qs_val = qs_val.extra_data.get(component) if qs_val == None: break if isinstance(qs_val, Manager) or qs_val == None: qs_val = u'' else: qs_val = unicode(qs_val) csv_val = row[j] self.assertEqual(qs_val, csv_val) export_file.close() os.remove(export_filename)
def test_csv_export_extra_data(self): """Ensures exported CSV data matches source data""" qs_filter = {"pk__in": [x.pk for x in self.snapshots]} qs = BuildingSnapshot.objects.filter(**qs_filter) export_id = str(uuid.uuid4()) exporter = Exporter(export_id, 'test_export', 'csv') fields = list(Exporter.fields_from_queryset(qs)) fields.append("canonical_building__id") fields.append('my new field') export_filename = exporter.export_csv(qs, fields) export_file = open(export_filename) reader = csv.reader(export_file) header = reader.next() self.assertEqual(header[len(fields) - 1], 'my new field') for i in range(len(self.snapshots)): row = reader.next() for j in range(len(fields)): field = fields[j] components = field.split("__") qs_val = qs[i] for component in components: try: qs_val = getattr(qs_val, component) except AttributeError: qs_val = qs_val.extra_data.get(component) if qs_val is None: break if isinstance(qs_val, Manager) or qs_val is None: qs_val = u'' else: qs_val = unicode(qs_val) csv_val = row[j] self.assertEqual(qs_val, csv_val) export_file.close() os.remove(export_filename)