def upload_storage_locations_spreadsheet(request): if request.method == 'POST': form = BulkImportForm(request.POST, request.FILES) if form.is_valid(): spreadsheet = form.files['spreadsheet'] updated_records = {'value': 0} def update_storage_locations(headers, values): try: reg_num = values[headers.index('registration number')] reg_num = int(reg_num) except ValueError: messages.add_message(request, messages.ERROR, "Registration number not a number: '%s'" % reg_num) return try: mo = MuseumObject.objects.get(registration_number=reg_num) mo.storage_row = str(values[headers.index('row')]) mo.storage_bay = str(values[headers.index('bay')]) mo.storage_shelf_drawer = str(values[headers.index('shelf/drawer')]) mo.save() updated_records['value'] += 1 except MuseumObject.DoesNotExist: messages.add_message(request, messages.ERROR, "Unable to find registration number: %s" % reg_num) db.reset_queries() if (updated_records['value'] % 100) == 0: logger.info("Updated %s records, lasted update was: %s" % (updated_records['value'], reg_num)) db.transaction.commit() bi = BulkDataImportHandler() bi.add_function_mapping(update_storage_locations) bi.process_spreadsheet(spreadsheet) db.transaction.commit() messages.add_message(request, messages.SUCCESS, "Updated %s records" % updated_records['value']) else: form = BulkImportForm() return render(request, 'storage_bulkupdate.html', { 'form': form, 'title': 'Bulk update storage locations' })
def test_missing_unique_field(self): # Contains fields 'First Name', "Last Name', 'Age', 'ID' spreadsheet = 'bulkimport/testdata/names.xlsx' bi = BulkDataImportHandler() bi.add_mapping(Person, { 'First Name': 'first_name', 'Last Name': 'last_name', 'Age': 'age' }, 'PersonID', 'id') with self.assertRaises(MissingUniqueHeaderException): affected_records, stats = bi.process_spreadsheet(spreadsheet)
def test_unique_field(self): # Contains fields 'First Name', "Last Name', 'Age', 'ID' spreadsheet = 'bulkimport/testdata/names.xlsx' bi = BulkDataImportHandler() bi.add_mapping(Person, { 'First Name': 'first_name', 'Last Name': 'last_name', 'Age': 'age', 'ID': 'id' }, 'ID', 'id') affected_records, stats = bi.process_spreadsheet(spreadsheet) self.assertEqual(3, len(affected_records)) self.assertEqual('Bob', affected_records[0][0].first_name) self.assertEqual(50, affected_records[2][0].age)
def test_read_simple_spreadsheet(self): """ Load in a simple spreadsheet """ spreadsheet = 'bulkimport/testdata/names.xlsx' bi = BulkDataImportHandler() bi.add_mapping(Person, { 'First Name': 'first_name', 'Last Name': 'last_name', 'Age': 'age' }) affected_records, stats = bi.process_spreadsheet(spreadsheet) self.assertEqual(3, len(affected_records)) self.assertEqual('Bob', affected_records[0][0].first_name) self.assertEqual(50, affected_records[2][0].age)
def test_read_spreadsheet_case_insensitive(self): """ Test the column names to be mapped are case insensitive """ spreadsheet = 'bulkimport/testdata/names.xlsx' bi = BulkDataImportHandler() bi.add_mapping(Person, { 'First name': 'first_name', 'Last NaMe': 'last_name', 'Age': 'age' }) affected_records, stats = bi.process_spreadsheet(spreadsheet) self.assertEqual(3, len(affected_records)) self.assertEqual('Bob', affected_records[0][0].first_name) self.assertEqual(50, affected_records[2][0].age)
def test_unique_field(self): # Contains fields 'First Name', "Last Name', 'Age', 'ID' spreadsheet = 'bulkimport/testdata/names.xlsx' bi = BulkDataImportHandler() bi.add_mapping( Person, { 'First Name': 'first_name', 'Last Name': 'last_name', 'Age': 'age', 'ID': 'id' }, 'ID', 'id') affected_records, stats = bi.process_spreadsheet(spreadsheet) self.assertEqual(3, len(affected_records)) self.assertEqual('Bob', affected_records[0][0].first_name) self.assertEqual(50, affected_records[2][0].age)