def generic_import_csv(self, request, import_info): log_object_change( request.user.id, f"Import CSV: '{import_info.form_title}'", ) header_list = import_info.header_list form_title = import_info.form_title # because the files are small, upload them to memory # instead of using S3 request.upload_handlers.insert(0, TemporaryFileUploadHandler(request)) request.upload_handlers.insert(0, MemoryFileUploadHandler(request)) if request.method == "POST": form = CsvImportForm( header_list, form_title, request.POST, request.FILES, ) if form.is_valid(): success, message = self.process_csv(request, import_info) if success: return redirect("..") else: form = CsvImportForm(header_list, form_title) payload = {"form": form} return render(request, "admin/csv_form.html", payload)
def export_csv(self, request): log_object_change( request.user.id, "Export CSV", ) e = export_csv_from_import(self.import_info.key) return export_to_csv(e.queryset, e.yield_data)
def test_log_object_change_no_object(self): log_object_change( self.test_user.id, "test", ) log_entry = LogEntry.objects.last() assert log_entry.change_message == "test"
def export_all_xls(self, request): log_object_change( request.user.id, f"Export all as .xlsx from {self.__class__.__name__}", ) try: queryset = self.queryset_all except AttributeError: queryset = self.model.objects.all() return export_to_excel(queryset, self.export_func)
def export_selection_xlsx(self, _, request, queryset): log_object_change( request.user.id, "Export selection as .xlsx", ) # _ is required because the # function get called with # self passed in twice. # Something to do with adding # the action in 'get_actions' return export_to_excel(queryset, self.export_func)
def test_log_object_change_with_object(self): financial_year = FinancialYear.objects.last() log_object_change( self.test_user.id, "test", obj=financial_year, ) log_entry = LogEntry.objects.last() assert log_entry.change_message == f"{str(financial_year)} test" assert log_entry.object_id == str(financial_year.pk)
def save_model(self, request, obj, form, change): for group in form.cleaned_data["groups"]: if group.name in [ "Finance Business Partner/BSCE", "Finance Administrator", "Gift and Hospitality Admin", ]: obj.is_staff = True break else: if not obj.is_superuser: obj.is_staff = False if len(form.cleaned_data["groups"]) > 0: log_object_change( request.user.id, f'user added to "{form.cleaned_data["groups"]}"', obj=obj, ) super().save_model(request, obj, form, change)
def change_active_flag(self, request, queryset, new_active_value): if new_active_value is True: msg = "activated" else: msg = "deactivated" q = queryset.filter(active=not new_active_value) for obj in q: log_object_change( request.user.id, msg, obj=obj, ) rows_updated = q.update(active=new_active_value) if rows_updated == 1: message_bit = "1 {} was".format(queryset.model._meta.verbose_name) else: message_bit = "{} {} were ".format( rows_updated, queryset.model._meta.verbose_name_plural) self.message_user(request, "{} successfully {}.".format(message_bit, msg))
def process_csv(self, request, import_info): log_object_change( request.user.id, "Processing CSV", ) import_file = request.FILES["csv_file"] # read() gives you the file # contents as a bytes object, # on which you can call decode(). # decode('cp1252') turns your # bytes into a string, with known # encoding. cp1252 is used to # handle single quotes in the strings t = io.StringIO(import_file.read().decode("cp1252")) success, message = import_info.my_check_headers(t) if success: t.seek(0) success, message = import_info.import_func(t) if not success: messages.error(request, "Error: " + message) return success, message