def copy_entry(self, job_id): job = Job.objects.get(id=job_id) if job.proceed_if_ready(): # update job status job.update(Job.STATUS['PROCESSING']) user = User.objects.get(id=job.user.id) src_entry = Entry.objects.get(id=job.target.id) params = json.loads(job.params) dest_entry = Entry.objects.filter(schema=src_entry.schema, name=params['new_name']).first() if not dest_entry: dest_entry = src_entry.clone(user, name=params['new_name']) dest_entry.register_es() if custom_view.is_custom("after_copy_entry", src_entry.schema.name): custom_view.call_custom("after_copy_entry", src_entry.schema.name, user, src_entry, dest_entry, params['post_data']) # update job status and save it job.update(Job.STATUS['DONE'], 'original entry: %s' % src_entry.name, dest_entry) # create and run event notification job job_notify_event = Job.new_notify_create_entry(user, dest_entry) job_notify_event.run()
def create(self, validated_data: EntryCreateData): user: User = self.context["request"].user entity_name = validated_data["schema"].name if custom_view.is_custom("before_create_entry_v2", entity_name): validated_data = custom_view.call_custom("before_create_entry_v2", entity_name, user, validated_data) attrs_data = validated_data.pop("attrs", []) entry: Entry = Entry.objects.create(**validated_data, status=Entry.STATUS_CREATING) for entity_attr in entry.schema.attrs.filter(is_active=True): attr: Attribute = entry.add_attribute_from_base(entity_attr, user) # skip for unpermitted attributes if not user.has_permission(attr, ACLType.Writable): continue # make an initial AttributeValue object if the initial value is specified attr_data = [ x for x in attrs_data if int(x["id"]) == entity_attr.id ] if not attr_data: continue attr.add_value(user, attr_data[0]["value"]) if custom_view.is_custom("after_create_entry_v2", entity_name): custom_view.call_custom("after_create_entry_v2", entity_name, user, entry) # register entry information to Elasticsearch entry.register_es() # clear flag to specify this entry has been completed to create entry.del_status(Entry.STATUS_CREATING) # Send notification to the webhook URL job_notify_event: Job = Job.new_notify_create_entry(user, entry) job_notify_event.run() return entry
def restore_entry(self, job_id): job = Job.objects.get(id=job_id) if job.proceed_if_ready(): job.update(Job.STATUS["PROCESSING"]) entry = Entry.objects.get(id=job.target.id) entry.restore() # remove status flag which is set before calling this entry.del_status(Entry.STATUS_CREATING) # Send notification to the webhook URL job_notify = Job.new_notify_create_entry(job.user, entry) job_notify.run() # calling custom view processing if necessary if custom_view.is_custom("after_restore_entry", entry.schema.name): custom_view.call_custom("after_restore_entry", entry.schema.name, job.user, entry) # update job status and save it job.update(Job.STATUS["DONE"])
def restore(self, request, pk): entry: Entry = self.get_object() if entry.is_active: raise ValidationError("specified entry has not deleted") # checks that a same name entry corresponding to the entity is existed, or not. if Entry.objects.filter(schema=entry.schema, name=re.sub(r"_deleted_[0-9_]*$", "", entry.name), is_active=True).exists(): raise ValidationError("specified entry has already exist other") user: User = request.user if custom_view.is_custom("before_restore_entry_v2", entry.schema.name): custom_view.call_custom("before_restore_entry_v2", entry.schema.name, user, entry) entry.set_status(Entry.STATUS_CREATING) # restore entry entry.restore() if custom_view.is_custom("after_restore_entry_v2", entry.schema.name): custom_view.call_custom("after_restore_entry_v2", entry.schema.name, user, entry) # remove status flag which is set before calling this entry.del_status(Entry.STATUS_CREATING) # Send notification to the webhook URL job_notify_event = Job.new_notify_create_entry(user, entry) job_notify_event.run() return Response(status=status.HTTP_201_CREATED)
def post(self, request, format=None): user = User.objects.get(id=request.user.id) sel = PostEntrySerializer(data=request.data) # This is necessary because request.data might be changed by the processing of serializer raw_request_data = deepcopy(request.data) if not sel.is_valid(): ret = { 'result': 'Validation Error', 'details': ['(%s) %s' % (k, ','.join(e)) for k, e in sel._errors.items()], } return Response(ret, status=status.HTTP_400_BAD_REQUEST) # checking that target user has permission to create an entry if not user.has_permission(sel.validated_data['entity'], ACLType.Writable): return Response( {'result': 'Permission denied to create(or update) entry'}, status=status.HTTP_400_BAD_REQUEST) # set target entry information to response data resp_data = { 'updated_attrs': {}, # This describes updated attribute values 'is_created': False, # This sets true when target entry will be created in this # processing } entry_condition = { 'schema': sel.validated_data['entity'], 'name': sel.validated_data['name'], 'is_active': True, } if 'id' in sel.validated_data: # prevent to register duplicate entry-name with other entry if Entry.objects.filter( Q(**entry_condition) & ~Q(id=sel.validated_data['id'])).exists(): return Response( { 'result': '"%s" is duplicate name with other Entry' % entry_condition['name'] }, status=status.HTTP_400_BAD_REQUEST) entry = Entry.objects.get(id=sel.validated_data['id']) entry.name = sel.validated_data['name'] entry.save(update_fields=['name']) entry.set_status(Entry.STATUS_EDITING) # create job to notify entry event to the registered WebHook job_notify = Job.new_notify_update_entry(user, entry) elif Entry.objects.filter(**entry_condition).exists(): entry = Entry.objects.get(**entry_condition) entry.set_status(Entry.STATUS_EDITING) # create job to notify entry event to the registered WebHook job_notify = Job.new_notify_update_entry(user, entry) else: entry = Entry.objects.create(created_user=user, status=Entry.STATUS_CREATING, **entry_condition) resp_data['is_created'] = True # create job to notify entry event to the registered WebHook job_notify = Job.new_notify_create_entry(user, entry) entry.complement_attrs(user) for name, value in sel.validated_data['attrs'].items(): # If user doesn't have readable permission for target Attribute, it won't be created. if not entry.attrs.filter(name=name).exists(): continue attr = entry.attrs.get(schema__name=name, is_active=True) if user.has_permission( attr.schema, ACLType.Writable) and attr.is_updated(value): attr.add_value(user, value) # This enables to let user know what attributes are changed in this request resp_data['updated_attrs'][name] = raw_request_data['attrs'][ name] # register target Entry to the Elasticsearch entry.register_es() # run notification job job_notify.run() entry.del_status(Entry.STATUS_CREATING | Entry.STATUS_EDITING) return Response(dict({'result': entry.id}, **resp_data))
def _do_import_entries(job): user = job.user entity = Entity.objects.get(id=job.target.id) if not user.has_permission(entity, ACLType.Writable): job.update( **{ 'status': Job.STATUS['ERROR'], 'text': 'Permission denied to import. ' 'You need Writable permission for "%s"' % entity.name }) return whole_data = json.loads(job.params).get(entity.name) if not whole_data: job.update( **{ 'status': Job.STATUS['ERROR'], 'text': 'Uploaded file has no entry data of %s' % entity.name }) return # get custom_view method to prevent executing check method in every loop processing custom_view_handler = None if custom_view.is_custom("after_import_entry", entity.name): custom_view_handler = 'after_import_entry' job.update(Job.STATUS['PROCESSING']) total_count = len(whole_data) # create or update entry for (index, entry_data) in enumerate(whole_data): job.text = 'Now importing... (progress: [%5d/%5d])' % (index + 1, total_count) job.save(update_fields=['text']) # abort processing when job is canceled if job.is_canceled(): return entry = Entry.objects.filter(name=entry_data['name'], schema=entity).first() if not entry: entry = Entry.objects.create(name=entry_data['name'], schema=entity, created_user=user) # create job to notify create event to the WebHook URL job_notify = Job.new_notify_create_entry(user, entry) elif not user.has_permission(entry, ACLType.Writable): continue else: # create job to notify edit event to the WebHook URL job_notify = Job.new_notify_update_entry(user, entry) entry.complement_attrs(user) for attr_name, value in entry_data['attrs'].items(): # If user doesn't have readable permission for target Attribute, # it won't be created. if not entry.attrs.filter(schema__name=attr_name).exists(): continue # There should be only one EntityAttr that is specified by name and Entity. # Once there are multiple EntityAttrs, it must be an abnormal situation. # In that case, this aborts import processing for this entry and reports it # as an error. attr_query = entry.attrs.filter(schema__name=attr_name, is_active=True, schema__parent_entity=entry.schema) if attr_query.count() > 1: Logger.error( '[task.import_entry] Abnormal entry was detected(%s:%d)' % (entry.name, entry.id)) break attr = attr_query.last() if (not user.has_permission(attr.schema, ACLType.Writable) or not user.has_permission(attr, ACLType.Writable)): continue input_value = attr.convert_value_to_register(value) if user.has_permission( attr.schema, ACLType.Writable) and attr.is_updated(input_value): attr.add_value(user, input_value) # call custom-view processing corresponding to import entry if custom_view_handler: custom_view.call_custom(custom_view_handler, entity.name, user, entry, attr, value) # register entry to the Elasticsearch entry.register_es() # run notification job job_notify.run() if not job.is_canceled(): job.update(status=Job.STATUS['DONE'], text='')
def create_entry_attrs(self, job_id): job = Job.objects.get(id=job_id) if job.proceed_if_ready(): # At the first time, update job status to prevent executing this job duplicately job.update(Job.STATUS['PROCESSING']) user = User.objects.filter(id=job.user.id).first() entry = Entry.objects.filter(id=job.target.id, is_active=True).first() if not entry or not user: # Abort when specified entry doesn't exist job.update(Job.STATUS['CANCELED']) return recv_data = json.loads(job.params) # Create new Attributes objects based on the specified value for entity_attr in entry.schema.attrs.filter(is_active=True): # skip for unpermitted attributes if not entity_attr.is_active or not user.has_permission( entity_attr, ACLType.Readable): continue # This creates Attibute object that contains AttributeValues. # But the add_attribute_from_base may return None when target Attribute instance # has already been created or is creating by other process. In that case, this job # do nothing about that Attribute instance. attr = entry.add_attribute_from_base(entity_attr, user) if not attr or not any( [int(x['id']) == attr.schema.id for x in recv_data['attrs']]): continue # When job is canceled during this processing, abort it after deleting the created entry if job.is_canceled(): entry.delete() return # make an initial AttributeValue object if the initial value is specified attr_data = [ x for x in recv_data['attrs'] if int(x['id']) == attr.schema.id ][0] # register new AttributeValue to the "attr" try: attr.add_value(user, _convert_data_value(attr, attr_data)) except ValueError as e: Logger.warning('(%s) attr_data: %s' % (e, str(attr_data))) # Delete duplicate attrs because this processing may execute concurrently for entity_attr in entry.schema.attrs.filter(is_active=True): if entry.attrs.filter(schema=entity_attr, is_active=True).count() > 1: query = entry.attrs.filter(schema=entity_attr, is_active=True) query.exclude(id=query.first().id).delete() if custom_view.is_custom("after_create_entry", entry.schema.name): custom_view.call_custom("after_create_entry", entry.schema.name, recv_data, user, entry) # register entry information to Elasticsearch entry.register_es() # clear flag to specify this entry has been completed to ndcreate entry.del_status(Entry.STATUS_CREATING) # update job status and save it except for the case that target job is canceled. if not job.is_canceled(): job.update(Job.STATUS['DONE']) # Send notification to the webhook URL job_notify_event = Job.new_notify_create_entry(user, entry) job_notify_event.run() elif job.is_canceled(): # When job is canceled before starting, created entry should be deleted. entry = Entry.objects.filter(id=job.target.id, is_active=True).first() if entry: entry.delete()
def post(self, request, format=None): sel = PostEntrySerializer(data=request.data) # This is necessary because request.data might be changed by the processing of serializer raw_request_data = deepcopy(request.data) if not sel.is_valid(): ret = { "result": "Validation Error", "details": ["(%s) %s" % (k, ",".join(e)) for k, e in sel._errors.items()], } return Response(ret, status=status.HTTP_400_BAD_REQUEST) # checking that target user has permission to create an entry if not request.user.has_permission(sel.validated_data["entity"], ACLType.Writable): return Response( {"result": "Permission denied to create(or update) entry"}, status=status.HTTP_400_BAD_REQUEST, ) # set target entry information to response data resp_data = { "updated_attrs": {}, # This describes updated attribute values "is_created": False, # This sets true when target entry will be created in this # processing } # This variable indicates whether NOTIFY UPDATE ENTRY Job will be created. # This is necessary to create minimum necessary NOTIFY_UPDATE_ENTRY Job. will_notify_update_entry = False # Common processing to update Entry's name and set will_notify_update_entry variable def _update_entry_name(entry): # Set Entry status that indicates target Entry is under editing processing # to prevent to updating this entry from others. entry.set_status(Entry.STATUS_EDITING) # Set will_notify_update_entry when name parameter is different with target Entry's name _will_notify_update_entry = False if entry.name != sel.validated_data["name"]: entry.name = sel.validated_data["name"] entry.save(update_fields=["name"]) _will_notify_update_entry = True return _will_notify_update_entry entry_condition = { "schema": sel.validated_data["entity"], "name": sel.validated_data["name"], "is_active": True, } if "id" in sel.validated_data: # prevent to register duplicate entry-name with other entry if Entry.objects.filter( Q(**entry_condition) & ~Q(id=sel.validated_data["id"])).exists(): return Response( { "result": '"%s" is duplicate name with other Entry' % entry_condition["name"] }, status=status.HTTP_400_BAD_REQUEST, ) entry = Entry.objects.get(id=sel.validated_data["id"]) will_notify_update_entry = _update_entry_name(entry) elif Entry.objects.filter(**entry_condition).exists(): entry = Entry.objects.get(**entry_condition) will_notify_update_entry = _update_entry_name(entry) else: entry = Entry.objects.create(created_user=request.user, status=Entry.STATUS_CREATING, **entry_condition) resp_data["is_created"] = True # create job to notify entry event to the registered WebHook Job.new_notify_create_entry(request.user, entry).run() entry.complement_attrs(request.user) for name, value in sel.validated_data["attrs"].items(): # If user doesn't have readable permission for target Attribute, it won't be created. if not entry.attrs.filter(name=name).exists(): continue attr = entry.attrs.get(schema__name=name, is_active=True) if request.user.has_permission( attr.schema, ACLType.Writable) and attr.is_updated(value): attr.add_value(request.user, value) will_notify_update_entry = True # This enables to let user know what attributes are changed in this request resp_data["updated_attrs"][name] = raw_request_data["attrs"][ name] if will_notify_update_entry: # Create job to notify event, which indicates target entry is updated, # to the registered WebHook. Job.new_notify_update_entry(request.user, entry).run() # register target Entry to the Elasticsearch entry.register_es() entry.del_status(Entry.STATUS_CREATING | Entry.STATUS_EDITING) return Response(dict({"result": entry.id}, **resp_data))