def export_buildings(export_id, export_name, export_type, building_ids, export_model='seed.BuildingSnapshot', selected_fields=None): model = apps.get_model(*export_model.split(".")) selected_buildings = model.objects.filter(pk__in=building_ids) def _row_cb(i): data = get_cache("export_buildings__%s" % export_id) data['buildings_processed'] = i if data['total_buildings'] == 0 or not data['total_buildings']: data['progress'] = 100 else: data['progress'] = (i * 100) / data['total_buildings'] set_cache("export_buildings__%s" % export_id, data['status'], data) exporter = Exporter(export_id, export_name, export_type) if not exporter.valid_export_type(): _row_cb(-1) # this means there was an error return exporter.export(selected_buildings, selected_fields, _row_cb) # file return value is not used _row_cb(selected_buildings.count()) # means we're done!
def test_xls_export(self): """Ensures exported XLS data matches source data""" qs_filter = {"pk__in": [x.pk for x in self.snapshots]} qs = BuildingSnapshot.objects.filter(**qs_filter) export_id = str(uuid.uuid4()) exporter = Exporter(export_id, 'test_export', 'csv') fields = list(Exporter.fields_from_queryset(qs)) fields.append("canonical_building__id") export_filename = exporter.export_xls(qs, fields) export_file = xlrd.open_workbook(export_filename) worksheet = export_file.sheet_by_name(export_file.sheet_names()[0]) self.assertEqual(worksheet.cell_value(0, len(fields) - 1), 'ID') for i in range(len(self.snapshots)): for j in range(len(fields)): field = fields[j] components = field.split("__") qs_val = qs[i] for component in components: qs_val = getattr(qs_val, component) if qs_val == None: break if isinstance(qs_val, Manager) or qs_val == None: qs_val = u'' else: qs_val = unicode(qs_val) xls_val = worksheet.cell_value(i + 1, j) self.assertEqual(qs_val, xls_val)
def test_xls_export(self): """Ensures exported XLS data matches source data""" qs_filter = {"pk__in": [x.pk for x in self.snapshots]} qs = BuildingSnapshot.objects.filter(**qs_filter) export_id = str(uuid.uuid4()) exporter = Exporter(export_id, 'test_export', 'csv') fields = list(Exporter.fields_from_queryset(qs)) fields.append("canonical_building__id") export_filename = exporter.export_xls(qs, fields) export_file = xlrd.open_workbook(export_filename) worksheet = export_file.sheet_by_name(export_file.sheet_names()[0]) self.assertEqual(worksheet.cell_value(0, len(fields) - 1), 'ID') for i in range(len(self.snapshots)): for j in range(len(fields)): field = fields[j] components = field.split("__") qs_val = qs[i] for component in components: qs_val = getattr(qs_val, component) if qs_val is None: break if isinstance(qs_val, Manager) or qs_val is None: qs_val = u'' else: qs_val = unicode(qs_val) xls_val = worksheet.cell_value(i + 1, j) self.assertEqual(qs_val, xls_val)
def test_csv_export(self): """Ensures exported CSV data matches source data""" qs_filter = {"pk__in": [x.pk for x in self.snapshots]} qs = BuildingSnapshot.objects.filter(**qs_filter) export_id = str(uuid.uuid4()) exporter = Exporter(export_id, 'test_export', 'csv') fields = list(Exporter.fields_from_queryset(qs)) raw = [ 'owner_address', 'owner_postal_code', 'owner_email', 'postal_code', 'occupied_floor_area', 'custom_id_1', 'extra_data', 'state_province', 'tax_lot_id', 'address_line_2', 'address_line_1', 'lot_number', 'year_ending', 'property_notes', 'generation_date', 'energy_alerts', 'space_alerts', 'site_eui_weather_normalized', 'created', 'energy_score', 'block_number', 'building_count', 'owner', 'source_eui', 'extra_data_sources', 'city', 'confidence', 'district', 'best_guess_confidence', 'site_eui', 'building_certification', 'modified', 'match_type', 'source_eui_weather_normalized', u'id', 'property_name', 'conditioned_floor_area', 'pm_property_id', 'use_description', 'source_type', 'year_built', 'release_date', 'gross_floor_area', 'owner_city_state', 'owner_telephone', 'recent_sale_date' ] self.assertItemsEqual(fields, raw) fields.append("canonical_building__id") export_filename = exporter.export_csv(qs, fields) self.assertTrue(os.path.exists(export_filename)) export_file = open(export_filename) reader = csv.reader(export_file) header = reader.next() self.assertEqual(header[len(fields) - 1], 'ID') for i in range(len(self.snapshots)): row = reader.next() for j in range(len(fields)): field = fields[j] components = field.split("__") qs_val = qs[i] for component in components: qs_val = getattr(qs_val, component) if qs_val is None: break if isinstance(qs_val, Manager) or qs_val is None: qs_val = u'' else: qs_val = unicode(qs_val) csv_val = row[j] self.assertEqual(qs_val, csv_val) export_file.close() os.remove(export_filename)
def export_buildings(export_id, export_name, export_type, building_ids, export_model='seed.BuildingSnapshot', selected_fields=None): model = get_model(*export_model.split(".")) selected_buildings = model.objects.filter(pk__in=building_ids) def _row_cb(i): set_cache_raw("export_buildings__%s" % export_id, i) exporter = Exporter(export_id, export_name, export_type) if not exporter.valid_export_type(): _row_cb(-1) # this means there was an error return file = exporter.export(selected_buildings, selected_fields, _row_cb) # file return value is not used _row_cb(selected_buildings.count()) # means we're done!
def test_csv_export_extra_data(self): """Ensures exported CSV data matches source data""" qs_filter = {"pk__in": [x.pk for x in self.snapshots]} qs = BuildingSnapshot.objects.filter(**qs_filter) export_id = str(uuid.uuid4()) exporter = Exporter(export_id, 'test_export', 'csv') fields = list(Exporter.fields_from_queryset(qs)) fields.append("canonical_building__id") fields.append('my new field') export_filename = exporter.export_csv(qs, fields) export_file = open(export_filename) reader = csv.reader(export_file) header = reader.next() self.assertEqual(header[len(fields) - 1], 'my new field') for i in range(len(self.snapshots)): row = reader.next() for j in range(len(fields)): field = fields[j] components = field.split("__") qs_val = qs[i] for component in components: try: qs_val = getattr(qs_val, component) except AttributeError: qs_val = qs_val.extra_data.get(component) if qs_val == None: break if isinstance(qs_val, Manager) or qs_val == None: qs_val = u'' else: qs_val = unicode(qs_val) csv_val = row[j] self.assertEqual(qs_val, csv_val) export_file.close() os.remove(export_filename)
def test_csv_export_extra_data(self): """Ensures exported CSV data matches source data""" qs_filter = {"pk__in": [x.pk for x in self.snapshots]} qs = BuildingSnapshot.objects.filter(**qs_filter) export_id = str(uuid.uuid4()) exporter = Exporter(export_id, 'test_export', 'csv') fields = list(Exporter.fields_from_queryset(qs)) fields.append("canonical_building__id") fields.append('my new field') export_filename = exporter.export_csv(qs, fields) export_file = open(export_filename) reader = csv.reader(export_file) header = reader.next() self.assertEqual(header[len(fields) - 1], 'my new field') for i in range(len(self.snapshots)): row = reader.next() for j in range(len(fields)): field = fields[j] components = field.split("__") qs_val = qs[i] for component in components: try: qs_val = getattr(qs_val, component) except AttributeError: qs_val = qs_val.extra_data.get(component) if qs_val is None: break if isinstance(qs_val, Manager) or qs_val is None: qs_val = u'' else: qs_val = unicode(qs_val) csv_val = row[j] self.assertEqual(qs_val, csv_val) export_file.close() os.remove(export_filename)
def export_buildings(export_id, export_name, export_type, building_ids, export_model='seed.BuildingSnapshot', selected_fields=None): model = get_model(*export_model.split(".")) selected_buildings = model.objects.filter(pk__in=building_ids) def _row_cb(i): cache.set("export_buildings__%s" % export_id, i) exporter = Exporter(export_id, export_name, export_type) if not exporter.valid_export_type(): _row_cb(-1) # this means there was an error return file = exporter.export(selected_buildings, selected_fields, _row_cb) # file return value is not used _row_cb(selected_buildings.count()) # means we're done!