def restore_entry(self, job_id): job = Job.objects.get(id=job_id) if job.proceed_if_ready(): job.update(Job.STATUS["PROCESSING"]) entry = Entry.objects.get(id=job.target.id) entry.restore() # remove status flag which is set before calling this entry.del_status(Entry.STATUS_CREATING) # Send notification to the webhook URL job_notify = Job.new_notify_create_entry(job.user, entry) job_notify.run() # calling custom view processing if necessary if custom_view.is_custom("after_restore_entry", entry.schema.name): custom_view.call_custom("after_restore_entry", entry.schema.name, job.user, entry) # update job status and save it job.update(Job.STATUS["DONE"])
def restore_entry(self, job_id): job = Job.objects.get(id=job_id) if job.proceed_if_ready(): job.update(Job.STATUS['PROCESSING']) entry = Entry.objects.get(id=job.target.id) entry.restore() # remove status flag which is set before calling this entry.del_status(Entry.STATUS_CREATING) # calling custom view processing if necessary if custom_view.is_custom("after_restore_entry", entry.schema.name): custom_view.call_custom("after_restore_entry", entry.schema.name, job.user, entry) # update entry information to Elasticsearch entry.register_es() # update job status and save it job.update(Job.STATUS['DONE'])
def copy_entry(self, job_id): job = Job.objects.get(id=job_id) if job.proceed_if_ready(): # update job status job.update(Job.STATUS['PROCESSING']) user = User.objects.get(id=job.user.id) src_entry = Entry.objects.get(id=job.target.id) params = json.loads(job.params) dest_entry = Entry.objects.filter(schema=src_entry.schema, name=params['new_name']).first() if not dest_entry: dest_entry = src_entry.clone(user, name=params['new_name']) dest_entry.register_es() if custom_view.is_custom("after_copy_entry", src_entry.schema.name): custom_view.call_custom("after_copy_entry", src_entry.schema.name, user, src_entry, dest_entry, params['post_data']) # update job status and save it job.update(Job.STATUS['DONE'], 'original entry: %s' % src_entry.name, dest_entry)
def edit_entry_attrs(self, job_id): job = Job.objects.get(id=job_id) if job.proceed_if_ready(): # At the first time, update job status to prevent executing this job duplicately job.update(Job.STATUS['PROCESSING']) user = User.objects.get(id=job.user.id) entry = Entry.objects.get(id=job.target.id) recv_data = json.loads(job.params) for info in recv_data['attrs']: attr = Attribute.objects.get(id=info['id']) try: converted_value = _convert_data_value(attr, info) except ValueError as e: Logger.warning('(%s) attr_data: %s' % (e, str(info))) continue # Check a new update value is specified, or not if not attr.is_updated(converted_value): continue # Add new AttributeValue instance to Attribute instnace attr.add_value(user, converted_value) if custom_view.is_custom("after_edit_entry", entry.schema.name): custom_view.call_custom("after_edit_entry", entry.schema.name, recv_data, user, entry) # update entry information to Elasticsearch entry.register_es() # clear flag to specify this entry has been completed to edit entry.del_status(Entry.STATUS_EDITING) # update job status and save it job.update(Job.STATUS['DONE'])
def do_edit(request, entry_id, recv_data): user = User.objects.get(id=request.user.id) entry = Entry.objects.get(id=entry_id) # checks that a same name entry corresponding to the entity is existed. query = Q(schema=entry.schema, name=recv_data['entry_name']) & ~Q(id=entry.id) if Entry.objects.filter(query).exists(): return HttpResponse('Duplicate name entry is existed', status=400) # validate contexts of each attributes err = _validate_input(recv_data, entry) if err: return err if entry.get_status(Entry.STATUS_CREATING): return HttpResponse('Target entry is now under processing', status=400) if custom_view.is_custom("do_edit_entry", entry.schema.name): # resp is HttpReponse instance or its subclass (e.g. JsonResponse) resp = custom_view.call_custom("do_edit_entry", entry.schema.name, request, recv_data, user, entry) if resp: return resp # update name of Entry object. If name would be updated, the elasticsearch data of entries that # refers this entry also be updated by creating REGISTERED_REFERRALS task. job_register_referrals = None if entry.name != recv_data['entry_name']: job_register_referrals = Job.new_register_referrals(user, entry) entry.name = recv_data['entry_name'] # set flags that indicates target entry is under processing entry.set_status(Entry.STATUS_EDITING) entry.save() # Create new jobs to edit entry and notify it to registered webhook endpoint if it's necessary job_edit_entry = Job.new_edit(user, entry, params=recv_data) job_edit_entry.run() # running job of re-register referrals because of chaning entry's name if job_register_referrals: job_register_referrals.dependent_job = job_edit_entry job_register_referrals.run() return JsonResponse({ 'entry_id': entry.id, 'entry_name': entry.name, })
def do_delete(request, entry_id, recv_data): user = User.objects.get(id=request.user.id) ret = {} if not Entry.objects.filter(id=entry_id).exists(): return HttpResponse('Failed to get an Entry object of specified id', status=400) # update name of Entry object entry = Entry.objects.filter(id=entry_id).get() if custom_view.is_custom("do_delete_entry", entry.schema.name): # do_delete custom view resp = custom_view.call_custom("do_delete_entry", entry.schema.name, request, user, entry) # If custom_view returns available response this returns it to user, # or continues default processing. if resp: return resp # set deleted flag in advance because deleting processing taks long time entry.is_active = False entry.save(update_fields=['is_active']) # save deleting Entry name before do it ret['name'] = entry.name # register operation History for deleting entry user.seth_entry_del(entry) # Create a new job to delete entry and run it job_delete_entry = Job.new_delete(user, entry) job_notify_event = Job.new_notify_delete_entry(user, entry) # This prioritizes notifying job rather than deleting entry if job_delete_entry.dependent_job: job_notify_event.dependent_job = job_delete_entry.dependent_job job_notify_event.save(update_fields=['dependent_job']) job_notify_event.run() # This update dependent job of deleting entry job job_delete_entry.dependent_job = job_notify_event job_delete_entry.save(update_fields=['dependent_job']) job_delete_entry.run() return JsonResponse(ret)
def create(request, entity_id): user = User.objects.get(id=request.user.id) if not Entity.objects.filter(id=entity_id).exists(): return HttpResponse('Failed to get entity of specified id', status=400) entity = Entity.objects.get(id=entity_id) if custom_view.is_custom("create_entry_without_context", entity.name): # show custom view return custom_view.call_custom("create_entry_without_context", entity.name, request, user, entity) context = { 'entity': entity, 'form_url': '/entry/do_create/%s/' % entity.id, 'redirect_url': '/entry/%s' % entity.id, 'groups': Group.objects.filter(is_active=True), 'attributes': [{ 'id': x.id, 'type': x.type, 'name': x.name, 'is_mandatory': x.is_mandatory, } for x in entity.attrs.filter(is_active=True).order_by('index') if user.has_permission(x, ACLType.Writable)] } if custom_view.is_custom("create_entry", entity.name): # show custom view return custom_view.call_custom("create_entry", entity.name, request, user, entity, context) else: return render(request, 'create_entry.html', context)
def restore(self, request, pk): entry: Entry = self.get_object() if entry.is_active: raise ValidationError("specified entry has not deleted") # checks that a same name entry corresponding to the entity is existed, or not. if Entry.objects.filter(schema=entry.schema, name=re.sub(r"_deleted_[0-9_]*$", "", entry.name), is_active=True).exists(): raise ValidationError("specified entry has already exist other") user: User = request.user if custom_view.is_custom("before_restore_entry_v2", entry.schema.name): custom_view.call_custom("before_restore_entry_v2", entry.schema.name, user, entry) entry.set_status(Entry.STATUS_CREATING) # restore entry entry.restore() if custom_view.is_custom("after_restore_entry_v2", entry.schema.name): custom_view.call_custom("after_restore_entry_v2", entry.schema.name, user, entry) # remove status flag which is set before calling this entry.del_status(Entry.STATUS_CREATING) # Send notification to the webhook URL job_notify_event = Job.new_notify_create_entry(user, entry) job_notify_event.run() return Response(status=status.HTTP_201_CREATED)
def do_edit(request, entry_id, recv_data): user = User.objects.get(id=request.user.id) entry = Entry.objects.get(id=entry_id) tasks = [] # checks that a same name entry corresponding to the entity is existed. query = Q(schema=entry.schema, name=recv_data['entry_name']) & ~Q(id=entry.id) if Entry.objects.filter(query).exists(): return HttpResponse('Duplicate name entry is existed', status=400) # validate contexts of each attributes err = _validate_input(recv_data, entry) if err: return err if entry.get_status(Entry.STATUS_CREATING): return HttpResponse('Target entry is now under processing', status=400) if custom_view.is_custom("do_edit_entry", entry.schema.name): # resp is HttpReponse instance or its subclass (e.g. JsonResponse) resp = custom_view.call_custom( "do_edit_entry", entry.schema.name, request, recv_data, user, entry ) if resp: return resp # update name of Entry object. If name would be updated, the elasticsearch data of entries that # refers this entry also be updated by creating REGISTERED_REFERRALS task. if entry.name != recv_data['entry_name']: tasks.append(Job.new_register_referrals(user, entry)) entry.name = recv_data['entry_name'] # set flags that indicates target entry is under processing entry.set_status(Entry.STATUS_EDITING) entry.save() # Create a new job to edit entry tasks.append(Job.new_edit(user, entry, params=recv_data)) # Run all tasks which are created in this request [t.run() for t in tasks] return JsonResponse({ 'entry_id': entry.id, 'entry_name': entry.name, })
def import_obj(self, instance, data, dry_run): # will not import duplicate entity if Entity.objects.filter(name=data["name"]).exists(): entity = Entity.objects.filter(name=data["name"]).get() if "id" not in data or not data["id"] or entity.id != data["id"]: raise RuntimeError("There is a duplicate entity object (%s)" % data["name"]) # Set event handler for custom-view. When it returns not None, then it abort to import. if custom_view.is_custom("import_entity"): error = custom_view.call_custom("import_entity", None, instance, data) if error: raise RuntimeError(error) super(EntityResource, self).import_obj(instance, data, dry_run)
def do_create(request, recv_data): # validation checks for attr in recv_data['attrs']: # formalize recv_data format if 'ref_ids' not in attr: attr['ref_ids'] = [] if int(attr['type']) & AttrTypeValue['object'] and not attr['ref_ids']: return HttpResponse('Need to specify enabled referral ids', status=400) if any([ not Entity.objects.filter(id=x).exists() for x in attr['ref_ids'] ]): return HttpResponse('Specified referral is invalid', status=400) # get user object that current access user = User.objects.get(id=request.user.id) if custom_view.is_custom('create_entity'): resp = custom_view.call_custom('create_entity', None, recv_data['name'], recv_data['attrs']) if resp: return resp # create EntityAttr objects entity = Entity(name=recv_data['name'], note=recv_data['note'], created_user=user, status=Entity.STATUS_CREATING) # set status parameters if recv_data['is_toplevel']: entity.status = Entity.STATUS_TOP_LEVEL entity.save() # Create a new job to edit entity and run it job = Job.new_create_entity(user, entity, params=recv_data) job.run() return JsonResponse({ 'entity_id': entity.id, 'entity_name': entity.name, 'msg': 'Success to create Entity "%s"' % entity.name, })
def do_delete(request, entry_id, recv_data): entry, error = get_obj_with_check_perm(request.user, Entry, entry_id, ACLType.Full) if error: return error if custom_view.is_custom("do_delete_entry", entry.schema.name): # do_delete custom view resp = custom_view.call_custom("do_delete_entry", entry.schema.name, request, request.user, entry) # If custom_view returns available response this returns it to user, # or continues default processing. if resp: return resp # set deleted flag in advance because deleting processing taks long time entry.is_active = False entry.save(update_fields=["is_active"]) ret = {} # save deleting Entry name before do it ret["name"] = entry.name # register operation History for deleting entry request.user.seth_entry_del(entry) # Create a new job to delete entry and run it job_delete_entry = Job.new_delete(request.user, entry) job_notify_event = Job.new_notify_delete_entry(request.user, entry) # This prioritizes notifying job rather than deleting entry if job_delete_entry.dependent_job: job_notify_event.dependent_job = job_delete_entry.dependent_job job_notify_event.save(update_fields=["dependent_job"]) job_notify_event.run() # This update dependent job of deleting entry job job_delete_entry.dependent_job = job_notify_event job_delete_entry.save(update_fields=["dependent_job"]) job_delete_entry.run() return JsonResponse(ret)
def do_edit(request, entry_id, recv_data): user = User.objects.get(id=request.user.id) entry = Entry.objects.get(id=entry_id) # checks that a same name entry corresponding to the entity is existed. query = Q(schema=entry.schema, name=recv_data['entry_name']) & ~Q(id=entry.id) if Entry.objects.filter(query).exists(): return HttpResponse('Duplicate name entry is existed', status=400) # validate contexts of each attributes err = _validate_input(recv_data, entry) if err: return err if entry.get_status(Entry.STATUS_CREATING): return HttpResponse('Target entry is now under processing', status=400) if custom_view.is_custom("do_edit_entry", entry.schema.name): (is_continue, code, msg) = custom_view.call_custom(*[ "do_edit_entry", entry.schema.name, request, recv_data, user, entry ]) if not is_continue: return HttpResponse(msg, status=code) # update name of Entry object entry.name = recv_data['entry_name'] # set flags that indicates target entry is under processing entry.set_status(Entry.STATUS_EDITING) entry.save() # Create a new job job = Job.new_edit(user, entry, params=recv_data) # register a task to edit entry attributes edit_entry_attrs.delay(user.id, entry.id, job.id) return JsonResponse({ 'entry_id': entry.id, 'entry_name': entry.name, })
def do_create(request, entity_id, recv_data): # get objects to be referred in the following processing user = User.objects.get(id=request.user.id) entity = Entity.objects.get(id=entity_id) # checks that a same name entry corresponding to the entity is existed, or not. if Entry.objects.filter(schema=entity_id, name=recv_data['entry_name']).exists(): return HttpResponse('Duplicate name entry is existed', status=400) # validate contexts of each attributes err = _validate_input(recv_data, entity) if err: return err if custom_view.is_custom("do_create_entry", entity.name): (is_continue, resp, msg) = custom_view.call_custom("do_create_entry", entity.name, request, recv_data, user, entity) if not is_continue: if isinstance(resp, int): return HttpResponse(msg, status=resp) elif isinstance(resp, JsonResponse): return resp # Create a new Entry object entry = Entry.objects.create(name=recv_data['entry_name'], created_user=user, schema=entity, status=Entry.STATUS_CREATING) # Create a new job job = Job.new_create(user, entry, params=recv_data) # register a task to create Attributes for the created entry create_entry_attrs.delay(user.id, entry.id, job.id) return JsonResponse({ 'entry_id': entry.id, 'entry_name': entry.name, })
def do_create(request, entity_id, recv_data): # get objects to be referred in the following processing entity, error = get_obj_with_check_perm(request.user, Entity, entity_id, ACLType.Writable) if error: return error # checks that a same name entry corresponding to the entity is existed, or not. if Entry.objects.filter(schema=entity_id, name=recv_data["entry_name"]).exists(): return HttpResponse("Duplicate name entry is existed", status=400) # validate contexts of each attributes err = _validate_input(recv_data, entity) if err: return err if custom_view.is_custom("do_create_entry", entity.name): # resp is HttpReponse instance or its subclass (e.g. JsonResponse) resp = custom_view.call_custom("do_create_entry", entity.name, request, recv_data, request.user, entity) if resp: return resp # Create a new Entry object entry = Entry.objects.create( name=recv_data["entry_name"], created_user=request.user, schema=entity, status=Entry.STATUS_CREATING, ) # Create a new job to create entry and run it job_create_entry = Job.new_create(request.user, entry, params=recv_data) job_create_entry.run() return JsonResponse({ "entry_id": entry.id, "entry_name": entry.name, })
def edit(request, entry_id): user = User.objects.get(id=request.user.id) if not Entry.objects.filter(id=entry_id).exists(): return HttpResponse('Failed to get an Entry object of specified id', status=400) entry = Entry.objects.get(id=entry_id) # prevent to show edit page under the creating processing if entry.get_status(Entry.STATUS_CREATING): return HttpResponse('Target entry is now under processing', status=400) if not entry.is_active: return HttpResponse('Target entry has been deleted', status=400) entry.complement_attrs(user) context = { 'entry': entry, 'groups': Group.objects.filter(is_active=True), 'attributes': entry.get_available_attrs(user, ACLType.Writable, get_referral_entries=True), 'form_url': '/entry/do_edit/%s' % entry.id, 'redirect_url': '/entry/show/%s' % entry.id, } if custom_view.is_custom("edit_entry", entry.schema.name): # show custom view return custom_view.call_custom("edit_entry", entry.schema.name, request, user, entry, context) else: return render(request, 'edit_entry.html', context)
def do_import_data(request, entity_id, context): entity = Entity.objects.filter(id=entity_id, is_active=True).first() if not entity: return HttpResponse("Couldn't parse uploaded file", status=400) try: data = yaml.load(context, Loader=yaml.FullLoader) except yaml.parser.ParserError: return HttpResponse("Couldn't parse uploaded file", status=400) except ValueError as e: return HttpResponse("Invalid value is found: %s" % e, status=400) except yaml.scanner.ScannerError: return HttpResponse("Couldn't scan uploaded file", status=400) except Exception as e: return HttpResponse("Unknown exception: %s" % e, status=500) if not Entry.is_importable_data(data): return HttpResponse( "Uploaded file has invalid data structure to import", status=400) if custom_view.is_custom("import_entry", entity.name): # import custom view resp = custom_view.call_custom("import_entry", entity.name, request.user, entity, data) # If custom_view returns available response this returns it to user, # or continues default processing. if resp: return resp # create job to import data to create or update entries and run it job = Job.new_import(request.user, entity, text="Preparing to import data", params=data) job.run() return HttpResponseSeeOther("/entry/%s/" % entity_id)
def show(request, entry_id): user = User.objects.get(id=request.user.id) entry, error = get_object_with_check_permission(user, Entry, entry_id, ACLType.Readable) if error: return error if entry.get_status(Entry.STATUS_CREATING): return HttpResponse('Target entry is now under processing', status=400) if not entry.is_active: return _redirect_restore_entry(entry) context = { 'entry': entry, 'attributes': entry.get_available_attrs(user), } if custom_view.is_custom("show_entry", entry.schema.name): # show custom view return custom_view.call_custom("show_entry", entry.schema.name, request, user, entry, context) else: # show ordinal view return render(request, 'show_entry.html', context)
def do_create(request, entity_id, recv_data): # get objects to be referred in the following processing user = User.objects.get(id=request.user.id) entity = Entity.objects.get(id=entity_id) # checks that a same name entry corresponding to the entity is existed, or not. if Entry.objects.filter(schema=entity_id, name=recv_data['entry_name']).exists(): return HttpResponse('Duplicate name entry is existed', status=400) # validate contexts of each attributes err = _validate_input(recv_data, entity) if err: return err if custom_view.is_custom("do_create_entry", entity.name): # resp is HttpReponse instance or its subclass (e.g. JsonResponse) resp = custom_view.call_custom("do_create_entry", entity.name, request, recv_data, user, entity) if resp: return resp # Create a new Entry object entry = Entry.objects.create(name=recv_data['entry_name'], created_user=user, schema=entity, status=Entry.STATUS_CREATING) # Create a new job to create entry and run it job_create_entry = Job.new_create(user, entry, params=recv_data) job_create_entry.run() return JsonResponse({ 'entry_id': entry.id, 'entry_name': entry.name, })
def show(request, entry_id): entry, error = get_obj_with_check_perm(request.user, Entry, entry_id, ACLType.Readable) if error: return error if entry.get_status(Entry.STATUS_CREATING): return HttpResponse("Target entry is now under processing", status=400) if not entry.is_active: return _redirect_restore_entry(entry) context = { "entry": entry, "attributes": entry.get_available_attrs(request.user), } if custom_view.is_custom("show_entry", entry.schema.name): # show custom view return custom_view.call_custom("show_entry", entry.schema.name, request, request.user, entry, context) else: # show ordinal view return render(request, "show_entry.html", context)
def copy(request, entry_id): user = User.objects.get(id=request.user.id) entry, error = get_object_with_check_permission(user, Entry, entry_id, ACLType.Writable) if error: return error # prevent to show edit page under the creating processing if entry.get_status(Entry.STATUS_CREATING) or entry.get_status(Entry.STATUS_EDITING): return HttpResponse('Target entry is now under processing', status=400) if not entry.is_active: return _redirect_restore_entry(entry) context = { 'form_url': '/entry/do_copy/%s' % entry.id, 'redirect_url': '/entry/%s' % entry.schema.id, 'entry': entry, } if custom_view.is_custom("copy_entry", entry.schema.name): return custom_view.call_custom("copy_entry", entry.schema.name, request, user, entry, context) return render(request, 'copy_entry.html', context)
def revert_attrv(request, recv_data): user = User.objects.get(id=request.user.id) attr = Attribute.objects.filter(id=recv_data['attr_id']).first() if not attr: return HttpResponse('Specified Attribute-id is invalid', status=400) if not user.has_permission(attr, ACLType.Writable): return HttpResponse("You don't have permission to update this Attribute", status=400) attrv = AttributeValue.objects.filter(id=recv_data['attrv_id']).first() if not attrv or attrv.parent_attr.id != attr.id: return HttpResponse('Specified AttributeValue-id is invalid', status=400) # When the AttributeType was changed after settting value, this operation is aborted if attrv.data_type != attr.schema.type: return HttpResponse('Attribute-type was changed after this value was registered.', status=400) latest_value = attr.get_latest_value() if latest_value.get_value() != attrv.get_value(): # clear all exsts latest flag attr.unset_latest_flag() # copy specified AttributeValue new_attrv = AttributeValue.objects.create(**{ 'value': attrv.value, 'referral': attrv.referral, 'status': attrv.status, 'boolean': attrv.boolean, 'date': attrv.date, 'data_type': attrv.data_type, 'created_user': user, 'parent_attr': attr, 'is_latest': True, }) # This also copies child attribute values and append new one new_attrv.data_array.add(*[AttributeValue.objects.create(**{ 'value': v.value, 'referral': v.referral, 'created_user': user, 'parent_attr': attr, 'status': v.status, 'boolean': v.boolean, 'date': v.date, 'data_type': v.data_type, 'is_latest': False, 'parent_attrv': new_attrv, }) for v in attrv.data_array.all()]) # append cloned value to Attribute attr.values.add(new_attrv) # register update to the Elasticsearch attr.parent_entry.register_es() # call custom-view if it exists if custom_view.is_custom("revert_attrv", attr.parent_entry.schema.name): return custom_view.call_custom(*[ "revert_attrv", attr.parent_entry.schema.name, request, user, attr, latest_value, new_attrv ]) return HttpResponse('Succeed in updating Attribute "%s"' % attr.schema.name)
def do_edit(request, entity_id, recv_data): entity, error = get_obj_with_check_perm(request.user, Entity, entity_id, ACLType.Writable) if error: return error # validation checks for attr in recv_data["attrs"]: # formalize recv_data format if "ref_ids" not in attr: attr["ref_ids"] = [] if int(attr["type"]) & AttrTypeValue["object"] and not attr["ref_ids"]: return HttpResponse("Need to specify enabled referral ids", status=400) if any([ not Entity.objects.filter(id=x).exists() for x in attr["ref_ids"] ]): return HttpResponse("Specified referral is invalid", status=400) # duplication checks counter = collections.Counter([ attr["name"] for attr in recv_data["attrs"] if "deleted" not in attr or not attr["deleted"] ]) if len([v for v, count in counter.items() if count > 1]): return HttpResponse("Duplicated attribute names are not allowed", status=400) # prevent to show edit page under the processing if entity.get_status(Entity.STATUS_EDITING): return HttpResponse("Target entity is now under processing", status=400) if custom_view.is_custom("edit_entity"): resp = custom_view.call_custom("edit_entity", None, entity, recv_data["name"], recv_data["attrs"]) if resp: return resp # update status parameters if recv_data["is_toplevel"]: entity.set_status(Entity.STATUS_TOP_LEVEL) else: entity.del_status(Entity.STATUS_TOP_LEVEL) # update entity metatada informations to new ones entity.set_status(Entity.STATUS_EDITING) # Create a new job to edit entity and run it job = Job.new_edit_entity(request.user, entity, params=recv_data) job.run() new_name = recv_data["name"] return JsonResponse({ "entity_id": entity.id, "entity_name": new_name, "msg": 'Success to schedule to update Entity "%s"' % new_name, })
def get_attrs(self, obj: Entry) -> List[EntryAttributeType]: def get_attr_value(attr: Attribute): attrv = attr.get_latest_value(is_readonly=True) if not attrv: return '' if attr.schema.type & AttrTypeValue['array']: if attr.schema.type & AttrTypeValue['string']: return [x.value for x in attrv.data_array.all()] elif attr.schema.type & AttrTypeValue['named']: return [{ x.value: { 'id': x.referral.id if x.referral else None, 'name': x.referral.name if x.referral else '', 'schema': { 'id': x.referral.entry.schema.id, 'name': x.referral.entry.schema.name, } if x.referral else {} }, } for x in attrv.data_array.all()] elif attr.schema.type & AttrTypeValue['object']: return [{ 'id': x.referral.id if x.referral else None, 'name': x.referral.name if x.referral else '', 'schema': { 'id': x.referral.entry.schema.id, 'name': x.referral.entry.schema.name, } if x.referral else {} } for x in attrv.data_array.all()] elif attr.schema.type & AttrTypeValue['group']: groups = [ Group.objects.get(id=x.value) for x in attrv.data_array.all() ] return [{ 'id': group.id, 'name': group.name, } for group in groups] elif (attr.schema.type & AttrTypeValue['string'] or attr.schema.type & AttrTypeValue['text']): return attrv.value elif attr.schema.type & AttrTypeValue['named']: return { attrv.value: { 'id': attrv.referral.id if attrv.referral else None, 'name': attrv.referral.name if attrv.referral else '', 'schema': { 'id': attrv.referral.entry.schema.id, 'name': attrv.referral.entry.schema.name, } if attrv.referral else {} } } elif attr.schema.type & AttrTypeValue['object']: return { 'id': attrv.referral.id if attrv.referral else None, 'name': attrv.referral.name if attrv.referral else '', 'schema': { 'id': attrv.referral.entry.schema.id, 'name': attrv.referral.entry.schema.name, } if attrv.referral else {} } elif attr.schema.type & AttrTypeValue['boolean']: return attrv.boolean elif attr.schema.type & AttrTypeValue['date']: return attrv.date elif attr.schema.type & AttrTypeValue['group'] and attrv.value: group = Group.objects.get(id=attrv.value) return { 'id': group.id, 'name': group.name, } else: return '' attr_prefetch = Prefetch('attribute_set', queryset=Attribute.objects.filter( parent_entry=obj, is_active=True), to_attr="attr_list") entity_attrs = obj.schema.attrs.filter( is_active=True).prefetch_related(attr_prefetch).order_by('index') attrinfo: List[EntryAttributeType] = [] for entity_attr in entity_attrs: attr = entity_attr.attr_list[0] if entity_attr.attr_list else None value = get_attr_value(attr) if attr else AttrDefaultValue[ entity_attr.type] attrinfo.append({ 'id': attr.id if attr else None, 'type': entity_attr.type, 'value': value, 'schema_id': entity_attr.id, 'schema_name': entity_attr.name, }) # add and remove attributes depending on entity if custom_view.is_custom("get_entry_attr", obj.schema.name): attrinfo = custom_view.call_custom("get_entry_attr", obj.schema.name, obj, attrinfo) return attrinfo
def create_entry_attrs(self, job_id): job = Job.objects.get(id=job_id) if job.proceed_if_ready(): # At the first time, update job status to prevent executing this job duplicately job.update(Job.STATUS['PROCESSING']) user = User.objects.filter(id=job.user.id).first() entry = Entry.objects.filter(id=job.target.id, is_active=True).first() if not entry or not user: # Abort when specified entry doesn't exist job.update(Job.STATUS['CANCELED']) return recv_data = json.loads(job.params) # Create new Attributes objects based on the specified value for entity_attr in entry.schema.attrs.filter(is_active=True): # skip for unpermitted attributes if not entity_attr.is_active or not user.has_permission( entity_attr, ACLType.Readable): continue # This creates Attibute object that contains AttributeValues. # But the add_attribute_from_base may return None when target Attribute instance # has already been created or is creating by other process. In that case, this job # do nothing about that Attribute instance. attr = entry.add_attribute_from_base(entity_attr, user) if not attr or not any( [int(x['id']) == attr.schema.id for x in recv_data['attrs']]): continue # When job is canceled during this processing, abort it after deleting the created entry if job.is_canceled(): entry.delete() return # make an initial AttributeValue object if the initial value is specified attr_data = [ x for x in recv_data['attrs'] if int(x['id']) == attr.schema.id ][0] # register new AttributeValue to the "attr" try: attr.add_value(user, _convert_data_value(attr, attr_data)) except ValueError as e: Logger.warning('(%s) attr_data: %s' % (e, str(attr_data))) # Delete duplicate attrs because this processing may execute concurrently for entity_attr in entry.schema.attrs.filter(is_active=True): if entry.attrs.filter(schema=entity_attr, is_active=True).count() > 1: query = entry.attrs.filter(schema=entity_attr, is_active=True) query.exclude(id=query.first().id).delete() if custom_view.is_custom("after_create_entry", entry.schema.name): custom_view.call_custom("after_create_entry", entry.schema.name, recv_data, user, entry) # register entry information to Elasticsearch entry.register_es() # clear flag to specify this entry has been completed to ndcreate entry.del_status(Entry.STATUS_CREATING) # update job status and save it except for the case that target job is canceled. if not job.is_canceled(): job.update(Job.STATUS['DONE']) elif job.is_canceled(): # When job is canceled before starting, created entry should be deleted. entry = Entry.objects.filter(id=job.target.id, is_active=True).first() if entry: entry.delete()
def import_entries(self, job_id): job = Job.objects.get(id=job_id) if job.proceed_if_ready(): user = job.user entity = Entity.objects.get(id=job.target.id) if not user.has_permission(entity, ACLType.Writable): job.update( **{ 'status': Job.STATUS['ERROR'], 'text': 'Permission denied to import. ' 'You need Writable permission for "%s"' % entity.name }) return whole_data = json.loads(job.params).get(entity.name) if not whole_data: job.update( **{ 'status': Job.STATUS['ERROR'], 'text': 'Uploaded file has no entry data of %s' % entity.name }) return # get custom_view method to prevent executing check method in every loop processing custom_view_handler = None if custom_view.is_custom("after_import_entry", entity.name): custom_view_handler = 'after_import_entry' job.update(Job.STATUS['PROCESSING']) total_count = len(whole_data) # create or update entry for (index, entry_data) in enumerate(whole_data): job.text = 'Now importing... (progress: [%5d/%5d])' % (index + 1, total_count) job.save(update_fields=['text']) # abort processing when job is canceled if job.is_canceled(): return entry = Entry.objects.filter(name=entry_data['name'], schema=entity).first() if not entry: entry = Entry.objects.create(name=entry_data['name'], schema=entity, created_user=user) else: if not user.has_permission(entry, ACLType.Writable): continue entry.complement_attrs(user) for attr_name, value in entry_data['attrs'].items(): # If user doesn't have readable permission for target Attribute, # it won't be created. if not entry.attrs.filter(schema__name=attr_name).exists(): continue entity_attr = EntityAttr.objects.get( name=attr_name, parent_entity=entry.schema) attr = entry.attrs.get(schema=entity_attr, is_active=True) if (not user.has_permission(entity_attr, ACLType.Writable) or not user.has_permission(attr, ACLType.Writable)): continue input_value = attr.convert_value_to_register(value) if user.has_permission( attr.schema, ACLType.Writable) and attr.is_updated(input_value): attr.add_value(user, input_value) # call custom-view processing corresponding to import entry if custom_view_handler: custom_view.call_custom(custom_view_handler, entity.name, user, entry, attr, value) # register entry to the Elasticsearch entry.register_es() # update job status and save it except for the case that target job is canceled. if not job.is_canceled(): job.update(status=Job.STATUS['DONE'], text='')
def revert_attrv(request, recv_data): attr = Attribute.objects.filter(id=recv_data["attr_id"]).first() if not attr: return HttpResponse("Specified Attribute-id is invalid", status=400) if not request.user.has_permission(attr, ACLType.Writable): return HttpResponse( "You don't have permission to update this Attribute", status=400) attrv = AttributeValue.objects.filter(id=recv_data["attrv_id"]).first() if not attrv or attrv.parent_attr.id != attr.id: return HttpResponse("Specified AttributeValue-id is invalid", status=400) # When the AttributeType was changed after settting value, this operation is aborted if attrv.data_type != attr.schema.type: return HttpResponse( "Attribute-type was changed after this value was registered.", status=400) latest_value = attr.get_latest_value() if latest_value.get_value() != attrv.get_value(): # copy specified AttributeValue new_attrv = AttributeValue.objects.create( **{ "value": attrv.value, "referral": attrv.referral, "status": attrv.status, "boolean": attrv.boolean, "date": attrv.date, "data_type": attrv.data_type, "created_user": request.user, "parent_attr": attr, "is_latest": True, }) # This also copies child attribute values and append new one new_attrv.data_array.add(*[ AttributeValue.objects.create( **{ "value": v.value, "referral": v.referral, "created_user": request.user, "parent_attr": attr, "status": v.status, "boolean": v.boolean, "date": v.date, "data_type": v.data_type, "is_latest": False, "parent_attrv": new_attrv, }) for v in attrv.data_array.all() ]) # append cloned value to Attribute attr.values.add(new_attrv) # clear all exsts latest flag attr.unset_latest_flag(exclude_id=new_attrv.id) # register update to the Elasticsearch attr.parent_entry.register_es() # Send notification to the webhook URL job_notify = Job.new_notify_update_entry(request.user, attr.parent_entry) job_notify.run() # call custom-view if it exists if custom_view.is_custom("revert_attrv", attr.parent_entry.schema.name): return custom_view.call_custom(*[ "revert_attrv", attr.parent_entry.schema.name, request, request.user, attr, latest_value, new_attrv, ]) return HttpResponse('Succeed in updating Attribute "%s"' % attr.schema.name)
def _do_import_entries(job): user = job.user entity = Entity.objects.get(id=job.target.id) if not user.has_permission(entity, ACLType.Writable): job.update( **{ 'status': Job.STATUS['ERROR'], 'text': 'Permission denied to import. ' 'You need Writable permission for "%s"' % entity.name }) return whole_data = json.loads(job.params).get(entity.name) if not whole_data: job.update( **{ 'status': Job.STATUS['ERROR'], 'text': 'Uploaded file has no entry data of %s' % entity.name }) return # get custom_view method to prevent executing check method in every loop processing custom_view_handler = None if custom_view.is_custom("after_import_entry", entity.name): custom_view_handler = 'after_import_entry' job.update(Job.STATUS['PROCESSING']) total_count = len(whole_data) # create or update entry for (index, entry_data) in enumerate(whole_data): job.text = 'Now importing... (progress: [%5d/%5d])' % (index + 1, total_count) job.save(update_fields=['text']) # abort processing when job is canceled if job.is_canceled(): return entry = Entry.objects.filter(name=entry_data['name'], schema=entity).first() if not entry: entry = Entry.objects.create(name=entry_data['name'], schema=entity, created_user=user) # create job to notify create event to the WebHook URL job_notify = Job.new_notify_create_entry(user, entry) elif not user.has_permission(entry, ACLType.Writable): continue else: # create job to notify edit event to the WebHook URL job_notify = Job.new_notify_update_entry(user, entry) entry.complement_attrs(user) for attr_name, value in entry_data['attrs'].items(): # If user doesn't have readable permission for target Attribute, # it won't be created. if not entry.attrs.filter(schema__name=attr_name).exists(): continue # There should be only one EntityAttr that is specified by name and Entity. # Once there are multiple EntityAttrs, it must be an abnormal situation. # In that case, this aborts import processing for this entry and reports it # as an error. attr_query = entry.attrs.filter(schema__name=attr_name, is_active=True, schema__parent_entity=entry.schema) if attr_query.count() > 1: Logger.error( '[task.import_entry] Abnormal entry was detected(%s:%d)' % (entry.name, entry.id)) break attr = attr_query.last() if (not user.has_permission(attr.schema, ACLType.Writable) or not user.has_permission(attr, ACLType.Writable)): continue input_value = attr.convert_value_to_register(value) if user.has_permission( attr.schema, ACLType.Writable) and attr.is_updated(input_value): attr.add_value(user, input_value) # call custom-view processing corresponding to import entry if custom_view_handler: custom_view.call_custom(custom_view_handler, entity.name, user, entry, attr, value) # register entry to the Elasticsearch entry.register_es() # run notification job job_notify.run() if not job.is_canceled(): job.update(status=Job.STATUS['DONE'], text='')
def get_attrs(self, obj: Entry) -> List[EntryAttributeType]: def get_attr_value(attr: Attribute) -> EntryAttributeValue: attrv = attr.get_latest_value(is_readonly=True) if not attrv: return {} if attr.schema.type & AttrTypeValue["array"]: if attr.schema.type & AttrTypeValue["string"]: return { "as_array_string": [x.value for x in attrv.data_array.all()], } elif attr.schema.type & AttrTypeValue["named"]: array_named_object: List[Dict[ str, Optional[EntryAttributeValueObject]]] = [{ x.value: { "id": x.referral.id if x.referral else None, "name": x.referral.name if x.referral else "", "schema": { "id": x.referral.entry.schema.id, "name": x.referral.entry.schema.name, }, } if x.referral else None, } for x in attrv.data_array.all()] return {"as_array_named_object": array_named_object} elif attr.schema.type & AttrTypeValue["object"]: return { "as_array_object": [{ "id": x.referral.id if x.referral else None, "name": x.referral.name if x.referral else "", "schema": { "id": x.referral.entry.schema.id, "name": x.referral.entry.schema.name, }, } if x.referral else None for x in attrv.data_array.all()] } elif attr.schema.type & AttrTypeValue["group"]: groups = [ Group.objects.get(id=x.value) for x in attrv.data_array.all() ] return { "as_array_group": [{ "id": group.id, "name": group.name, } for group in groups] } elif (attr.schema.type & AttrTypeValue["string"] or attr.schema.type & AttrTypeValue["text"]): return {"as_string": attrv.value} elif attr.schema.type & AttrTypeValue["named"]: named: Dict[str, Optional[EntryAttributeValueObject]] = { attrv.value: { "id": attrv.referral.id if attrv.referral else None, "name": attrv.referral.name if attrv.referral else "", "schema": { "id": attrv.referral.entry.schema.id, "name": attrv.referral.entry.schema.name, }, } if attrv.referral else None } return {"as_named_object": named} elif attr.schema.type & AttrTypeValue["object"]: return { "as_object": { "id": attrv.referral.id if attrv.referral else None, "name": attrv.referral.name if attrv.referral else "", "schema": { "id": attrv.referral.entry.schema.id, "name": attrv.referral.entry.schema.name, }, } if attrv.referral else None } elif attr.schema.type & AttrTypeValue["boolean"]: return {"as_boolean": attrv.boolean} elif attr.schema.type & AttrTypeValue["date"]: return {"as_string": attrv.date if attrv.date else ""} elif attr.schema.type & AttrTypeValue["group"] and attrv.value: group = Group.objects.get(id=attrv.value) return { "as_group": { "id": group.id, "name": group.name, } } return {} def get_default_attr_value(type: int) -> EntryAttributeValue: if type & AttrTypeValue["array"]: if type & AttrTypeValue["string"]: return { "as_array_string": AttrDefaultValue[type], } elif type & AttrTypeValue["named"]: return {"as_array_named_object": AttrDefaultValue[type]} elif type & AttrTypeValue["object"]: return {"as_array_object": AttrDefaultValue[type]} elif type & AttrTypeValue["group"]: return {"as_array_group": AttrDefaultValue[type]} elif type & AttrTypeValue["string"] or type & AttrTypeValue["text"]: return {"as_string": AttrDefaultValue[type]} elif type & AttrTypeValue["named"]: return {"as_named_object": AttrDefaultValue[type]} elif type & AttrTypeValue["object"]: return {"as_object": AttrDefaultValue[type]} elif type & AttrTypeValue["boolean"]: return {"as_boolean": AttrDefaultValue[type]} elif type & AttrTypeValue["date"]: return {"as_string": AttrDefaultValue[type]} elif type & AttrTypeValue["group"]: return {"as_group": AttrDefaultValue[type]} raise ValidationError(f"unexpected type: {type}") attr_prefetch = Prefetch( "attribute_set", queryset=Attribute.objects.filter(parent_entry=obj, is_active=True), to_attr="attr_list", ) entity_attrs = (obj.schema.attrs.filter( is_active=True).prefetch_related(attr_prefetch).order_by("index")) attrinfo: List[EntryAttributeType] = [] for entity_attr in entity_attrs: attr = entity_attr.attr_list[0] if entity_attr.attr_list else None value = get_attr_value(attr) if attr else get_default_attr_value( entity_attr.type) attrinfo.append({ "id": attr.id if attr else None, "type": entity_attr.type, "is_mandatory": entity_attr.is_mandatory, "value": value, "schema": { "id": entity_attr.id, "name": entity_attr.name, }, }) # add and remove attributes depending on entity if custom_view.is_custom("get_entry_attr", obj.schema.name): attrinfo = custom_view.call_custom("get_entry_attr", obj.schema.name, obj, attrinfo) return attrinfo
def update(self, entry: Entry, validated_data): entry.set_status(Entry.STATUS_EDITING) user: User = self.context["request"].user entity_name = entry.schema.name if custom_view.is_custom("before_update_entry", entity_name): custom_view.call_custom("before_update_entry", entity_name, user, validated_data, entry) attrs_data = validated_data.pop("attrs", []) # update name of Entry object. If name would be updated, the elasticsearch data of entries # that refers this entry also be updated by creating REGISTERED_REFERRALS task. job_register_referrals: Optional[Job] = None if "name" in validated_data and entry.name != validated_data["name"]: entry.name = validated_data["name"] entry.save(update_fields=["name"]) job_register_referrals = Job.new_register_referrals(user, entry) for entity_attr in entry.schema.attrs.filter(is_active=True): attr: Attribute = entry.attrs.filter(schema=entity_attr, is_active=True).first() if not attr: attr = entry.add_attribute_from_base(entity_attr, user) # skip for unpermitted attributes if not user.has_permission(attr, ACLType.Writable): continue # make AttributeValue object if the value is specified attr_data = [ x for x in attrs_data if int(x["id"]) == entity_attr.id ] if not attr_data: continue # Check a new update value is specified, or not if not attr.is_updated(attr_data[0]["value"]): continue attr.add_value(user, attr_data[0]["value"]) if custom_view.is_custom("after_update_entry", entity_name): custom_view.call_custom("after_update_entry", entity_name, user, attrs_data, entry) # update entry information to Elasticsearch entry.register_es() # clear flag to specify this entry has been completed to edit entry.del_status(Entry.STATUS_EDITING) # running job of re-register referrals because of chaning entry's name if job_register_referrals: job_register_referrals.run() # running job to notify changing entry event job_notify_event: Job = Job.new_notify_update_entry(user, entry) job_notify_event.run() return entry