def _do_import_entries_v2(job: Job): user: User = job.user entity: Entity = Entity.objects.get(id=job.target.id) import_serializer = EntryImportEntitySerializer(data=json.loads(job.params)) import_serializer.is_valid() context = {"request": DRFRequest(user)} total_count = len(import_serializer.validated_data["entries"]) err_msg = [] for index, entry_data in enumerate(import_serializer.validated_data["entries"]): job.text = "Now importing... (progress: [%5d/%5d])" % (index + 1, total_count) job.save(update_fields=["text"]) # abort processing when job is canceled if job.is_canceled(): job.status = Job.STATUS["CANCELED"] job.save(update_fields=["status"]) return entry_data["schema"] = entity entry = Entry.objects.filter(name=entry_data["name"], schema=entity, is_active=True).first() if entry: serializer = EntryUpdateSerializer(instance=entry, data=entry_data, context=context) else: serializer = EntryCreateSerializer(data=entry_data, context=context) try: serializer.is_valid(raise_exception=True) serializer.save() except ValidationError: err_msg.append(entry_data["name"]) if err_msg: text = "Imported Entry count: %d, Failed import Entry: %s" % (total_count, err_msg) job.update(status=Job.STATUS["WARNING"], text=text) else: text = "Imported Entry count: %d" % total_count job.update(status=Job.STATUS["DONE"], text=text)
def _do_import_entries(job: Job): user: User = job.user entity: Entity = Entity.objects.get(id=job.target.id) import_data = json.loads(job.params) # get custom_view method to prevent executing check method in every loop processing custom_view_handler = None if custom_view.is_custom("after_import_entry", entity.name): custom_view_handler = "after_import_entry" total_count = len(import_data) # create or update entry for (index, entry_data) in enumerate(import_data): job.text = "Now importing... (progress: [%5d/%5d] for %s)" % ( index + 1, total_count, entity.name, ) job.save(update_fields=["text"]) # abort processing when job is canceled if job.is_canceled(): return entry: Entry = Entry.objects.filter(name=entry_data["name"], schema=entity).first() if not entry: entry = Entry.objects.create(name=entry_data["name"], schema=entity, created_user=user) # create job to notify create event to the WebHook URL job_notify: Job = Job.new_notify_create_entry(user, entry) elif not user.has_permission(entry, ACLType.Writable): continue else: # create job to notify edit event to the WebHook URL job_notify = Job.new_notify_update_entry(user, entry) entry.complement_attrs(user) for attr_name, value in entry_data["attrs"].items(): # If user doesn't have readable permission for target Attribute, # it won't be created. if not entry.attrs.filter(schema__name=attr_name).exists(): continue # There should be only one EntityAttr that is specified by name and Entity. # Once there are multiple EntityAttrs, it must be an abnormal situation. # In that case, this aborts import processing for this entry and reports it # as an error. attr_query = entry.attrs.filter( schema__name=attr_name, is_active=True, schema__parent_entity=entry.schema, ) if attr_query.count() > 1: Logger.error( "[task.import_entry] Abnormal entry was detected(%s:%d)" % (entry.name, entry.id) ) break attr: Attribute = attr_query.last() if not user.has_permission(attr.schema, ACLType.Writable) or not user.has_permission( attr, ACLType.Writable ): continue input_value = attr.convert_value_to_register(value) if user.has_permission(attr.schema, ACLType.Writable) and attr.is_updated(input_value): attr.add_value(user, input_value) # call custom-view processing corresponding to import entry if custom_view_handler: custom_view.call_custom(custom_view_handler, entity.name, user, entry, attr, value) # register entry to the Elasticsearch entry.register_es() # run notification job job_notify.run() job.update(status=Job.STATUS["DONE"], text="")