示例#1
0
    def _add_event_listener(self, listener):
        
        '''
        add the event listener to the listener list

        '''
        try:
            mutex = self.event_listener_list_mutex
            def list_access(): 
                if mutex:
                    if mutex.acquire():
                        return True
                else:
                    return True
            def access_finish():
                if mutex:
                    mutex.release()
            if list_access():   
                try:
                    self.event_listener_list.append(listener)
                except Exception, e:
                    print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
                    access_finish()
                else:
                    access_finish()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
示例#2
0
    def _remove_event_listener(self, listener):
        
        '''
        remove the event listener from the listener list

        '''
        try:
            mutex = self.event_listener_list_mutex
            def list_access(): 
                if mutex:
                    if mutex.acquire():
                        return True
                else:
                    return True
            def access_finish():
                if mutex:
                    mutex.release()
            if list_access():   
                try:
                    i = self.event_listener_list.index(listener)
                    self.event_listener_list.pop(i)
                except Exception, e:
                    print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
                    access_finish()
                else:
                    access_finish()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
示例#3
0
 def on_job_finishing(self, job_unit):
     '''
     called by the job unit when the job unit finish itself
     params:
         job_unit: the job unit instance
     '''
     try:
         mutex = self.current_job_unit_list_mutex
         if mutex.acquire():
             try:
                 if job_unit.get_job_status() == STATUS_FAIL:
                     raise Exception('job: %s fail due to %s, job sequence processing abort' % \
                                     (job_unit.job_name, job_unit.get_job_finish_info()))
                 elif job_unit.get_job_status() == STATUS_ABORT:
                     raise Exception('job sequence abort by job: %s  due to %s' % \
                                     (job_unit.job_name, job_unit.get_job_finish_info()))
             except Exception, e:
                 print Exception, ':', e, \
                                 ' in %s:%s' % get_class_func_name(self)
                 self.remain_job_unit_name_sequence = []
                 self.remain_params_mapping_sequence = []
                 self.current_job_unit_list = []
                 mutex.release()
                 return
             else:
                 i = self.current_job_unit_list.index(job_unit)
                 self.current_job_unit_list.pop(i)
                 mutex.release()
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
         mutex.release()
示例#4
0
 def add_context_item(self,
                      ctx,
                      ctx_ns,
                      item_id,
                      item_id_n='context_item_id',
                      is_lock=True):
     '''
     add a context item to the store, the namespace of the added item 
     will be created if it dosen't exist
     params:
         ctx: the data of the context item added
         ctx_ns: the namespace of the added context item
         item_id: the id of the context item, should be unique in the namespace
         item_id_n: the name for context item id
         is_lock: indicating whether add a multi-threading protection for the added context
     '''
     try:
         ctx_ns_id, item_gid = self._get_store_id(ctx_ns, item_id)
         if self.root_mutex.acquire():
             try:
                 ext_ctx = {item_id_n: str(item_id)}
                 ext_ctx.update(ctx)
                 ns_locators = self.root_context_locator.setdefault(
                     ctx_ns_id, [])
                 if item_gid in ns_locators:
                     i = ns_locators.index(item_gid)
                     self.root_context[ctx_ns_id].pop(i)
                     self.root_context_locator[ctx_ns_id].pop(i)
                     self.context_mutexs[ctx_ns_id].pop(i)
                     self.context_item_ids[ctx_ns].pop(i)
                 self.context_item_ids.setdefault(ctx_ns,
                                                  []).append(item_id)
                 self.root_context.setdefault(ctx_ns_id, []).append(ext_ctx)
                 self.root_context_locator.setdefault(ctx_ns_id,
                                                      []).append(item_gid)
                 if is_lock:
                     self.context_mutexs.setdefault(ctx_ns_id,
                                                    []).append(lock())
                 else:
                     self.context_mutexs.setdefault(ctx_ns_id,
                                                    []).append(None)
             except Exception, e:
                 print Exception, ':', e, \
                             ' in %s:%s' % get_class_func_name(self)
                 self.root_mutex.release()
             else:
                 self.root_mutex.release()
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
    def execute_job(self, **params):
        '''
        called by the task unit to start the execution of this job.
        derived job unit class should override this method to deploy own processing logic

        '''
        try:
            self.event = self.task_unit.get_triggered_event()
            self._alloc_csar_id()
            self._alloc_csar_upload_dir()
            r_data = {
                'status': 'request accepted',
                'csarUploadDir': self.csar_upload_dir,
                'allocCsarId': self.csar_id
            }
            print r_data
            out_para = {'csar_upload_dir': self.csar_upload_dir}
            out_para.update({'csar_id': self.csar_id})
            self.event.set_return_data_syn(r_data)
            self._output_params(**out_para)
            self._execution_finish()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
            self.event.set_return_data_asyn({'status': 'request failed'})
            self._execution_exception(Exception, e)
示例#6
0
    def execute_job(self, **params):
        '''
        called by the task unit to start the execution of this job.
        derived job unit class should override this method to deploy own processing logic

        '''
        try:
            self.event = self.task_unit.get_triggered_event()
            self.slice_id = self.event.get_event_data().get('slice_id', None)
            self._update_slice_instantiating()
            r_data = {'status': 'request accepted'}
            self.event.set_return_data_syn(r_data)

            sch = self.task_unit.task_scheduler
            self.cata_ip = sch.get_catalogue_serv_ip()
            self.cata_port = sch.get_catalogue_serv_port()
            self.cata_proto = sch.get_catalogue_serv_proto()
            self.cata_usr = sch.get_catalogue_serv_username()
            self.cata_passwd = sch.get_catalogue_serv_password()

            self._gen_local_download_tmp_dir()
            self._gen_res_ctx()
            self._output_params(**{'slice_id': self.slice_id})
            self._execution_finish()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
            self.event.set_return_data_asyn({'status': 'request failed'})
            self._execution_exception(Exception, e)
示例#7
0
    def get_context_item_process_handler(self, ctx_ns, item_id):
        '''
        get the context process handler handling the context item, 
        located by the context namespace and the context item id
        params:
            ctx_ns: context namespace
            item_id: context item id
        '''
        try:
            if self.root_mutex.acquire():
                try:
                    if item_id not in self.context_item_ids.get(ctx_ns, []):
                        self.root_mutex.release()
                        return None
                    ctx_ns_id, item_gid = self._get_store_id(ctx_ns, item_id)
                    ctx_item = self._get_context_item(ctx_ns_id, item_gid)
                    mutex = self._get_context_item_mutex(ctx_ns_id, item_gid)
                except Exception, e:
                    print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
                    self.root_mutex.release()
                    return None
                else:
                    self.root_mutex.release()

            def store_if():
                self._pers_store_context_item(ctx_ns_id, item_gid)

            return ContextProcessHandler(ctx_item, mutex, store_if)
示例#8
0
    def get_context_namespace_process_handler(self, ctx_ns):
        '''
        get the context process handlers handling the context items under the namespace
        params:
            ctx_ns: context namespace
        return:
            a handler list contains all the handlers for each item under the namespace
        '''
        p_handlers = []
        try:
            if self.root_mutex.acquire():
                try:
                    if ctx_ns not in self.context_item_ids.keys():
                        self.root_mutex.release()
                        return []
                    ctx_ns_id = self._get_namespace_id(ctx_ns)
                    for i in range(0, len(self.root_context[ctx_ns_id])):
                        ctx_item = self.root_context[ctx_ns_id][i]
                        item_gid = self.root_context_locator[ctx_ns_id][i]
                        mutex = self.context_mutexs[ctx_ns_id][i]

                        def store_if():
                            self._pers_store_context_item(ctx_ns_id, item_gid)

                        p_handlers.append(
                            ContextProcessHandler(ctx_item, mutex, store_if))
                except Exception, e:
                    print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
                    self.root_mutex.release()
                    return None
                else:
                    self.root_mutex.release()
            return p_handlers
示例#9
0
    def execute_job(self, **params):
        '''
        called by the task unit to start the execution of this job.
        derived job unit class should override this method to deploy own processing logic

        '''

        try:
            print "#######", params
            self.slice_id = params['slice_id']
            resp = self._send_res_deploy_req()
            if 'sliceResId' not in resp.keys():
                raise Exception(
                    '#### SliceResDeployJob: slice resource deployment request failed'
                )
            self.slice_res_id = resp['sliceResId']
            print '#### SliceResDeployJob: get a slice resource Id: ' + self.slice_res_id
            self._wait_slice_res_deploy()
            self._update_slice_ctx()
            self._deploy_monitor_options()
            self._deploy_alarms()
            self._execution_finish()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
            self._execution_exception(Exception, e)
示例#10
0
    def continue_process(self, event, listener):
        '''
        this method is called when the task is invoked by the task scheduler
        params:
            event: the event triggering the invoking of the task
            listener: the event listener matched by the event

        '''

        try:
            l_id = listener.get_listener_id()
            mutex = self.event_listener_info_mutex
            if mutex.acquire():
                l_info = self.event_listener_info[l_id]
                if l_info['expire'] == 1:
                    self.event_listener_info.pop(l_id)
                mutex.release()
            tp = self.thread_pool
            ext_params = {'event': event}
            ext_params.update(l_info['params'])
            mutex = self.thread_pool_mutex
            if mutex.acquire():
                req = threadpool.makeRequests(l_info['handler'],
                                              [(None, ext_params)])
                tp.putRequest(req[0])
                mutex.release()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
            mutex.release()
示例#11
0
    def execute_job(self, **params):

        '''
        called by the task unit to start the execution of this job.
        derived job unit class should override this method to deploy own processing logic

        '''
        try:
            self.event = self.task_unit.get_triggered_event()
            self.csar_id = self.event.get_event_data().get('csar_id', None)
            self._update_csar_onboarding()
            self._alloc_slice_id()
            r_data = {'status': 'request accepted'}
            r_data.update({'allocSliceId': self.slice_id})
            self.event.set_return_data_syn(r_data)
	    print "csar execute"
            self._download_csar_meta()
            self._gen_slice_context()
            self._update_csar_onboarded
            self._execution_finish()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
            self.event.set_return_data_asyn({'status': 'request failed'})
            self._execution_exception(Exception, e)
示例#12
0
    def execute_job(self, **params):
        '''
        called by the task unit to start the execution of this job.
        derived job unit class should override this method to deploy own processing logic

        '''

        try:
            slice_id = params['slice_id']

            ctx_store = self.task_unit.task_scheduler.context_store
            ctx_h = ctx_store.get_context_item_process_handler(
                CONTEXT_NAMESPACE_SLICE, slice_id)
            slice_ctx = ctx_h.get_context_item()
            ns_combo_id = slice_ctx['nsComboId']
            instan_plan_process_id = slice_ctx['planInfo']['instanPlan'][
                'rtProcessId']
            req_data = {}
            req_data['processDefinitionId'] = instan_plan_process_id
            req_data['task_id'] = ns_combo_id
            self.task_unit.task_scheduler.\
                         req_remote_serv(COMPONENT_NAME_PLAN_ENGINE, SERVICE_SLICE_INSTAN_PLAN_EXEC, req_data)
            ctx_h.process_finish()

            self._execution_finish()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
            self._execution_exception(Exception, e)
示例#13
0
 def _process_event_by_task(self, **params):
     
     '''
     process event by creating a new task or invoking an old task
     the params dict should at least contain:
         'event': the triggered event
         'listener': the matched event listener
     either of the following key/values should be containted:
         'job_unit_sequence': the job unit sequence, new task will be created when this is contained
         'task_unit': the relevent task unit object, the object's 'continue_process' method will be called
     
     '''
     try:
         event = params['event']
         listener = params['listener']
         mutex = self.task_thread_pool_mutex
         if listener.expire == 1:
             self._remove_event_listener(listener)
         if 'job_unit_sequence' in params.keys():
             seq = params['job_unit_sequence']
             if mutex.acquire():
                 req = threadpool.makeRequests(self._start_new_task, [([seq, event], None)])
                 self.task_thread_pool.putRequest(req[0])
                 mutex.release()
         elif 'task_unit' in params.keys():
             if mutex.acquire():
                 req = threadpool.makeRequests(params['task_unit'].continue_process, [([event, listener], None)])
                 self.task_thread_pool.putRequest(req[0])
                 mutex.release()
         else:
             raise Exception('Either a job unit sequence for new task or a task object to continue is required')
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
示例#14
0
 def event_listener_register(self,
                             e_type,
                             t_e_data,
                             t_keys_list,
                             t_e_meta,
                             t_e_meta_keys,
                             p_handler=None,
                             p_params={},
                             job_unit_seq=None,
                             expi=EXPIRE_ONCE,
                             timeout_h=None,
                             timeout=None):
     '''
     the method used to register a event listener,
     called by job unit.
     params:
         e_type: event type of the registered
         t_e_data: target event data used to construct event matcher
         t_keys_list: target keys list to construct event matcher
         t_e_meta: target event meta used to construct event matcher
         t_e_meta_keys: target meta keys list to construct event matcher
         p_handler: the handler used to process the event when this listener is matched
         p_params: a dict contained the params the 'p_handler' method used
         job_unit_seq: the job unit sequence used when the listened event trigger a task creationg
         expi: the expire value for the listener
         timeout_h: the callback method called when the event listener timeout
         timeout: the timeout value for the listener
     '''
     try:
         if not job_unit_seq:
             if not p_handler:
                 raise Exception(
                     'either a job unit sequence or a processing handler is required'
                 )
             listener_id = self.task_scheduler.task_event_listener_register(
                 e_type, t_e_data, t_keys_list, t_e_meta, t_e_meta_keys,
                 p_params, None, self, expi, timeout_h, timeout)
         else:
             listener_id = self.task_scheduler.task_event_listener_register(
                 e_type, t_e_data, t_keys_list, t_e_meta, t_e_meta_keys,
                 p_params, job_unit_seq, None, expi, timeout_h, timeout)
         if p_handler:
             mutex = self.event_listener_info_mutex
             if mutex.acquire():
                 new_info = {
                     listener_id: {
                         'handler': p_handler,
                         'expire': expi,
                         'params': p_params
                     }
                 }
                 self.event_listener_info.update(new_info)
                 mutex.release()
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
示例#15
0
    def del_context_item(self, ctx_ns, item_id):
        '''
        delete a context item from the store

        '''
        try:
            ctx_ns_id, item_gid = self._get_store_id(ctx_ns, item_id)
            if self.root_mutex.acquire():
                try:
                    if ctx_ns_id not in self.root_context_locator.keys():
                        self.root_mutex.release()
                        print 'context store: no namespace: ', ctx_ns
                        return
                    if item_gid not in self.root_context_locator[ctx_ns_id]:
                        self.root_mutex.release()
                        print 'context store: no context item id: ', item_id
                        return
                    i = self.root_context_locator[ctx_ns_id].index(item_gid)
                    mutex = self.context_mutexs[ctx_ns_id].pop(i)
                    if mutex:
                        if mutex.acquire():
                            self.root_context[ctx_ns_id].pop(i)
                            self.root_context_locator[ctx_ns_id].pop(i)
                            self.context_item_ids[ctx_ns].pop(i)
                            mutex.release()
                    else:
                        self.root_context[ctx_ns_id].pop(i)
                        self.root_context_locator[ctx_ns_id].pop(i)
                        self.context_item_ids[ctx_ns].pop(i)
                except Exception, e:
                    print Exception, ':', e, \
                            ' in %s:%s' % get_class_func_name(self)
                    self.root_mutex.release()
                else:
                    if len(self.context_mutexs[ctx_ns_id]) == 0:
                        self.root_context.pop(ctx_ns_id)
                        self.root_context_locator.pop(ctx_ns_id)
                        self.context_mutexs.pop(ctx_ns_id)
                        self.context_item_ids.pop(ctx_ns)
                    self.root_mutex.release()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
示例#16
0
    def trigger(self):
        '''
        trigger the current event via event listener
        
        '''
        try:
            reqs = []
            mutex = self.event_listener_list_mutex
            tp = self.thread_pool

            def list_access():
                if mutex:
                    if mutex.acquire():
                        return True
                else:
                    return True

            def access_finish():
                if mutex:
                    mutex.release()

            match_list = []
            if list_access():
                try:
                    for l in self.event_listener_list:
                        if l.match(self.event):
                            match_list.append(l)
                except Exception, e:
                    print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
                    access_finish()
                else:
                    access_finish()
                    for l in match_list:
                        reqs = reqs + threadpool.makeRequests(
                            l.process_event, [(None, None)])
                    map(tp.putRequest, reqs)
                    #tp.wait()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
示例#17
0
    def _on_timeout(self):
        '''
        handler to handle the listener timeout

        '''
        try:
            if timeout_handler[0]:
                self.timeout_handler[0](listener=self,
                                        **self.timeout_handler[1])
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
示例#18
0
 def _get_id_without_env(self, ns, *params):
     
     '''
     generate a guid without the environment parameters
     params:
         ns: a namespace indicating the id type
         params: input parameters to generate id 
     '''
     try:
         return str(uuid3(NS, '_'.join([ns] + map(str, params)).encode(CODE))).encode(CODE)
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
         return
示例#19
0
 def get_job_unit_class(self, job_name):
     '''
     get the class of the job_unit by job_name, 
     the needed module will be imported dynamically
     
     '''
     try:
         m = ipl.import_module(
             self.job_unit_class_imports[job_name]['module'])
         return getattr(m, self.job_unit_class_imports[job_name]['class'])
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
         return None
示例#20
0
    def _on_match(self, event):
        '''
        called when the event is matched
        callback function in match handler is called,
        with 

        '''
        try:
            self.match_handler[0](event=event,
                                  listener=self,
                                  **self.match_handler[1])
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
示例#21
0
    def execute_job(self, **params):
        '''
        called by the task unit to start the execution of this job.
        derived job unit class should override this method to deploy own processing logic

        '''

        try:
            self.slice_id = params['slice_id']
            self._gen_conf_ctx()
            self._execution_finish()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
            self._execution_exception(Exception, e)
示例#22
0
 def _get_id(self, ns, *params):
     
     '''
     generate a guid
     params:
         ns: a namespace indicating the id type
         params: input parameters to generate id 
     '''
     try:
         env = (str(get_thread().ident) + '-' + str(uuid1())).encode(CODE)
         return str(uuid3(NS, '_'.join([ns, env] + map(str, params)).encode(CODE))).encode(CODE)
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
         return
示例#23
0
    def execute_job(self, **params):
        '''
        called by the task unit to start the execution of this job.
        derived job unit class should override this method to deploy own processing logic

        '''
        try:
            print 'add test data job'
            print 'get input %s: %s' % (params.keys()[0],
                                        params[params.keys()[0]])
            print 'get input %s: %s' % (params.keys()[1],
                                        params[params.keys()[1]])
            self._register_event_listener()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
示例#24
0
 def match(self, event_d, event_meta):
     '''
     trigger the current event via event listener
     params:
         event_d: the data of the check event
         event_meta: the meta of the check event
     
     '''
     try:
         t_data = [event_d.get(k, '') for k in self.target_keys]
         meta = [event_meta.get(k, '') for k in self.target_meta_keys]
         return self.result == cal(NS, str(t_data + meta).encode(CODE))
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
         return False
示例#25
0
    def execute_job(self, **params):
        '''
        called by the task unit to start the execution of this job.
        derived job unit class should override this method to deploy own processing logic

        '''
        try:
            self.event = self.task_unit.get_triggered_event()
            self.ns_combo_id = self.event.get_event_data().get(
                'ns_combo_id', None)
            r_data = {'status': 'request accepted'}
            self.event.set_return_data_syn(r_data)
            self._onboarding_csars()
            self._execution_finish()
        except Exception, e:
            print Exception, ':', e, \
                      ' in %s:%s' % get_class_func_name(self)
            self._execution_exception(Exception, e)
示例#26
0
 def _event_listener_register(self, event_type, 
                                    target_e_data, 
                                    target_keys_list,
                                    target_e_meta, 
                                    target_e_meta_keys,
                                    process_handler=None, 
                                    process_params={}, 
                                    job_unit_sequence=None, 
                                    expire=EXPIRE_ONCE, 
                                    timeout_handler=None, 
                                    timeout=None):
     '''
     the method used to register a event listener,
     called by job unit.
     params:
         event_type: event type of the registered
         target_e_data: target event data used to construct event matcher
         target_keys_list: target keys list to construct event matcher
         target_e_meta: target event meta used to construct event matcher
         target_e_meta_keys: target meta keys list to construct event matcher
         process_handler: the handler used to process the event when this listener is matched
         process_params: a dict contained the params the 'p_handler' method used
         job_unit_sequence: the job unit sequence used when the listened event trigger a task creationg
         expire: the expire value for the listener
         timeout_handler: the callback method called when the event listener timeout
         timeout: the timeout value for the listener
     '''
     try:
         if not process_handler and not job_unit_sequence:
             raise Exception('either a job unit sequence or a processing handler is required')
         self.task_unit.event_listener_register(event_type, 
                                                target_e_data, 
                                                target_keys_list,
                                                target_e_meta, 
                                                target_e_meta_keys,
                                                process_handler, 
                                                process_params, 
                                                job_unit_sequence, 
                                                expire, 
                                                timeout_handler, 
                                                timeout)
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
示例#27
0
 def start_process(self):
     '''
     start to process the job unit sequence
     this method is always called by the 'process_event' method in task scheduler
     when a event comes
     
     '''
     try:
         mutex = self.thread_pool_mutex
         tp = self.thread_pool
         while len(self.remain_job_unit_name_sequence) > 0:
             if mutex.acquire():
                 tp.wait()
                 if len(self.current_job_unit_list) == 0:
                     self._phase_shift()
                 mutex.release()
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
示例#28
0
 def __init__(self, target_e_data, target_keys_list, target_meta,
              target_meta_keys):
     '''
     Matcher class used to match a event uniquelly
     params:
         target_e_data: the data of the target event wanted
         target_keys_list: a list of key names indicating which values are concerned
         target_meta: the meta data of the target event wanted
         target_meta_keys: a list of key names indicating which meta are concerned
     '''
     self.target_keys = target_keys_list
     self.target_meta_keys = target_meta_keys
     try:
         t_data = [target_e_data[k] for k in target_keys_list]
         meta = [target_meta[k] for k in target_meta_keys]
         self.result = cal(NS, str(t_data + meta).encode(CODE))
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
示例#29
0
 def task_event_listener_register(self,
                                  event_type,
                                  target_e_data,
                                  target_keys_list,
                                  target_e_meta,
                                  target_e_meta_keys,
                                  process_params={},
                                  job_unit_sequence=None,
                                  task_unit=None,
                                  expire=EXPIRE_NEVER,
                                  timeout_handler=None,
                                  timeout=None):
     '''
     the method used to register a event listener by a task unit.
     params:
         event_type: event type of the registered
         target_e_data: target event data used to construct event matcher
         target_keys_list: target keys list to construct event matcher
         target_e_meta: target event meta used to construct event matcher
         target_e_meta_keys: target meta keys list to construct event matcher
         process_params: a dict contained the params the 'process_event' method used
         job_unit_sequence: the job unit sequence used when the listened event trigger a task creationg
         task_unit: the task unit to be awaked when the listend event trigger a continuing process of a task
         expire: the expire value for the listener
         timeout_handler: the callback method called when the event listener timeout
         timeout: the timeout value for the listener
     return:
         global event listener id
     '''
     ext_params = {}
     try:
         if job_unit_sequence:
             ext_params['job_unit_sequence'] = job_unit_sequence
         elif task_unit:
             ext_params['task_unit'] = task_unit
         else:
             raise Exception(
                 'either a job unit sequence for new task or a task object to continue is required'
             )
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
示例#30
0
 def get_event(self, event_type, event_producer, event_data={}):
     
     '''
     trigger the current event via event listener
     params:
         event_type: event type
         event_producer: event producer
         event_data: event data      
     '''
     try:
         event_id = self.id_generator.get_event_id(event_type, event_producer, event_data)
         return EventClass(event_id, event_type, event_producer, 
                           self.event_listener_list, 
                           self.event_listener_list_mutex,
                           self.thread_pool, 
                           event_data)
     except Exception, e:
         print Exception, ':', e, \
                   ' in %s:%s' % get_class_func_name(self)
         return None