def celery_task_failure_email(**kwargs): """This event handler is for reporting by email when an exception error in celery.""" subject = "ERROR Celery Task {sender.name}".format(**kwargs) message = """ Task Name: {sender.name} Task ID: {task_id} Task args: {args} Task kwargs: {kwargs} raised exception: {exception!r} full traceback: {einfo} """.format(**kwargs) # Logger for DEBUG because email is not sent in dev environment Logger.error(message) # Logger for Alert because long texts usually cannot be parsed by log server Logger.error("An exception error has occurred") # Send an email so that admins can receive errors mail_admins(subject, message)
def is_authenticated(kls, username, password): try: c = ldap3.Connection( CONF_LDAP['SERVER_ADDRESS'], user=CONF_LDAP['USER_FILTER'].format(username=username), password=password) return c.bind() except LDAPException as e: Logger.error(str(e)) return False
def run(self, will_delay=True): method_table = self.method_table() if self.operation not in method_table: Logger.error('Job %s has invalid operation type' % self.id) return # initiate job processing method = method_table[self.operation] if will_delay: return method.delay(self.id) else: return method(self.id)
def is_authenticated(kls, username, password): try: o = ldap.initialize(CONF_LDAP["SERVER_ADDRESS"]) o.protocol_version = ldap.VERSION3 o.simple_bind_s( who=CONF_LDAP["USER_FILTER"].format(username=username), cred=password) o.unbind_s() return True except ldap.INVALID_CREDENTIALS: return False except ldap.LDAPError as e: Logger.error(str(e)) return False
def celery_task_failure_email(**kwargs): """This event handler is for reporting by email when an exception error in celery.""" subject = "ERROR Celery Task {sender.name}".format(**kwargs) message = """ Task Name: {sender.name} Task ID: {task_id} Task args: {args} Task kwargs: {kwargs} raised exception: {exception!r} full traceback: {einfo} """.format(**kwargs) # Logger for DEBUG because email is not sent in dev environment Logger.error(message) mail_admins(subject, message)
def _do_import_entries(job): user = job.user entity = Entity.objects.get(id=job.target.id) if not user.has_permission(entity, ACLType.Writable): job.update( **{ 'status': Job.STATUS['ERROR'], 'text': 'Permission denied to import. ' 'You need Writable permission for "%s"' % entity.name }) return whole_data = json.loads(job.params).get(entity.name) if not whole_data: job.update( **{ 'status': Job.STATUS['ERROR'], 'text': 'Uploaded file has no entry data of %s' % entity.name }) return # get custom_view method to prevent executing check method in every loop processing custom_view_handler = None if custom_view.is_custom("after_import_entry", entity.name): custom_view_handler = 'after_import_entry' job.update(Job.STATUS['PROCESSING']) total_count = len(whole_data) # create or update entry for (index, entry_data) in enumerate(whole_data): job.text = 'Now importing... (progress: [%5d/%5d])' % (index + 1, total_count) job.save(update_fields=['text']) # abort processing when job is canceled if job.is_canceled(): return entry = Entry.objects.filter(name=entry_data['name'], schema=entity).first() if not entry: entry = Entry.objects.create(name=entry_data['name'], schema=entity, created_user=user) # create job to notify create event to the WebHook URL job_notify = Job.new_notify_create_entry(user, entry) elif not user.has_permission(entry, ACLType.Writable): continue else: # create job to notify edit event to the WebHook URL job_notify = Job.new_notify_update_entry(user, entry) entry.complement_attrs(user) for attr_name, value in entry_data['attrs'].items(): # If user doesn't have readable permission for target Attribute, # it won't be created. if not entry.attrs.filter(schema__name=attr_name).exists(): continue # There should be only one EntityAttr that is specified by name and Entity. # Once there are multiple EntityAttrs, it must be an abnormal situation. # In that case, this aborts import processing for this entry and reports it # as an error. attr_query = entry.attrs.filter(schema__name=attr_name, is_active=True, schema__parent_entity=entry.schema) if attr_query.count() > 1: Logger.error( '[task.import_entry] Abnormal entry was detected(%s:%d)' % (entry.name, entry.id)) break attr = attr_query.last() if (not user.has_permission(attr.schema, ACLType.Writable) or not user.has_permission(attr, ACLType.Writable)): continue input_value = attr.convert_value_to_register(value) if user.has_permission( attr.schema, ACLType.Writable) and attr.is_updated(input_value): attr.add_value(user, input_value) # call custom-view processing corresponding to import entry if custom_view_handler: custom_view.call_custom(custom_view_handler, entity.name, user, entry, attr, value) # register entry to the Elasticsearch entry.register_es() # run notification job job_notify.run() if not job.is_canceled(): job.update(status=Job.STATUS['DONE'], text='')