def _wrapper(*args, **kwargs): if RuntimeConfig.PROCESS_ROLE in [ProcessRole.SERVER]: for i in range(3): try: stat_logger.info("detect session {} by table {} {}".format( session.get_session_id(), DETECT_TABLE[0], DETECT_TABLE[1])) stat_logger.info("start count table {} {}".format(DETECT_TABLE[0], DETECT_TABLE[1])) count = session.table(namespace=DETECT_TABLE[0], name=DETECT_TABLE[1]).count() stat_logger.info("table {} {} count is {}".format(DETECT_TABLE[0], DETECT_TABLE[1], count)) if count != DETECT_TABLE[2]: raise Exception("session {} count error".format(session.get_session_id())) stat_logger.info("session {} is ok".format(session.get_session_id())) break except Exception as e: stat_logger.exception(e) stat_logger.info("start init new session") try: clean_server_used_session() init_session_for_flow_server() except Exception as e: stat_logger.exception(e) stat_logger.info("init new session failed.") else: stat_logger.error("init new session failed.") else: # If in executor pass. TODO: detect and restore the session in executor pass return func(*args, **kwargs)
def put_event(self, event): try: self.queue.put(event) stat_logger.info('put event into in-process queue successfully: {}'.format(event)) except Exception as e: stat_logger.exception(e) stat_logger.error('put event into in-process queue failed')
def put_event(self, event): try: conn = self.get_conn() ret = conn.lpush(self.queue_name, json.dumps(event)) stat_logger.info('put event into redis queue {}: {}'.format('successfully' if ret else 'failed', event)) except Exception as e: stat_logger.exception(e) stat_logger.error('put event into redis queue failed')
def del_event(self, event): try: ret = self.dell(event) stat_logger.info('delete event from redis queue {}: {}'.format('successfully' if ret else 'failed', event)) except Exception as e: stat_logger.error('delete event from queue failed') stat_logger.exception(e) raise Exception('{} not in ListQueue'.format(event))
def get_event(self): try: event = self.queue.get(block=True) stat_logger.info('get event from in-process queue successfully: {}'.format(event)) return event except Exception as e: stat_logger.exception(e) stat_logger.error('get event from in-process queue failed') return None
def __init__(self, queue_name, host, port, password, max_connections): super(RedisQueue, self).__init__() self.queue_name = queue_name self.pool = redis.ConnectionPool(host=host, port=port, password=password, max_connections=max_connections, db=REDIS_QUEUE_DB_INDEX) if self.is_ready(): stat_logger.info('init redis queue') else: stat_logger.error('init redis queue error!') raise Exception('init redis queue error!')
def put_event(self, event, status=None, job_id=''): try: is_failed = self.put(item=event, status=status, job_id=job_id) stat_logger.info( 'put event into queue successfully: {}'.format(event)) return is_failed except Exception as e: stat_logger.error('put event into queue failed') stat_logger.exception(e) raise e
def put_event(self, event, status=None, job_id=None): try: self.put(event) stat_logger.info( 'put event into in-process queue successfully: {}'.format( event)) except Exception as e: stat_logger.error('put event into in-process queue failed') stat_logger.exception(e) raise e
def get_event(self): try: conn = self.get_conn() content = conn.brpop([self.queue_name]) event = self.parse_event(content[1]) stat_logger.info('get event from redis queue: {}'.format(event)) return event except Exception as e: stat_logger.exception(e) stat_logger.error('get event from redis queue failed') return None
def del_event(self, event): try: conn = self.get_conn() ret = conn.lrem(self.queue_name, 1, json.dumps(event)) stat_logger.info('delete event from redis queue {}: {}'.format('successfully' if ret else 'failed', event)) if not ret: raise Exception('job not in redis queue') except Exception as e: stat_logger.error('delete event from redis queue failed') stat_logger.exception(e) raise Exception('delete event from redis queue failed')
def get_event(self, status=None, end_status=None): try: event = self.get(block=True, status=status, end_status=end_status) stat_logger.info( 'get event from queue successfully: {}, status {}'.format( event, status)) return event except Exception as e: stat_logger.error('get event from queue failed') stat_logger.exception(e) return None
def operation_record(data: dict, oper_type, oper_status): try: if oper_type == 'migrate': OperLog.create(f_operation_type=oper_type, f_operation_status=oper_status, f_initiator_role=data.get("migrate_initiator", {}).get("role"), f_initiator_party_id=data.get( "migrate_initiator", {}).get("party_id"), f_request_ip=request.remote_addr, f_model_id=data.get("model_id"), f_model_version=data.get("model_version")) elif oper_type == 'load': OperLog.create( f_operation_type=oper_type, f_operation_status=oper_status, f_initiator_role=data.get("initiator").get("role"), f_initiator_party_id=data.get("initiator").get("party_id"), f_request_ip=request.remote_addr, f_model_id=data.get('job_parameters').get("model_id"), f_model_version=data.get('job_parameters').get( "model_version")) elif oper_type == 'bind': OperLog.create( f_operation_type=oper_type, f_operation_status=oper_status, f_initiator_role=data.get("initiator").get("role"), f_initiator_party_id=data.get("party_id") if data.get("party_id") else data.get("initiator").get("party_id"), f_request_ip=request.remote_addr, f_model_id=data.get("model_id") if data.get("model_id") else data.get('job_parameters').get("model_id"), f_model_version=data.get("model_version") if data.get("model_version") else data.get('job_parameters').get("model_version")) else: OperLog.create( f_operation_type=oper_type, f_operation_status=oper_status, f_initiator_role=data.get("role") if data.get("role") else data.get("initiator").get("role"), f_initiator_party_id=data.get("party_id") if data.get("party_id") else data.get("initiator").get("party_id"), f_request_ip=request.remote_addr, f_model_id=data.get("model_id") if data.get("model_id") else data.get('job_parameters').get("model_id"), f_model_version=data.get("model_version") if data.get("model_version") else data.get('job_parameters').get("model_version")) except Exception: stat_logger.error(traceback.format_exc())
def component_output_model(): request_data = request.json check_request_parameters(request_data) job_dsl, job_runtime_conf, runtime_conf_on_party, train_runtime_conf = job_utils.get_job_configuration(job_id=request_data['job_id'], role=request_data['role'], party_id=request_data['party_id']) try: model_id = runtime_conf_on_party['job_parameters']['model_id'] model_version = runtime_conf_on_party['job_parameters']['model_version'] except Exception as e: job_dsl, job_runtime_conf, train_runtime_conf = job_utils.get_model_configuration(job_id=request_data['job_id'], role=request_data['role'], party_id=request_data['party_id']) if any([job_dsl, job_runtime_conf, train_runtime_conf]): adapter = JobRuntimeConfigAdapter(job_runtime_conf) model_id = adapter.get_common_parameters().to_dict().get('model_id') model_version = adapter.get_common_parameters().to_dict.get('model_version') else: stat_logger.exception(e) stat_logger.error(f"Can not find model info by filters: job id: {request_data.get('job_id')}, " f"role: {request_data.get('role')}, party id: {request_data.get('party_id')}") raise Exception(f"Can not find model info by filters: job id: {request_data.get('job_id')}, " f"role: {request_data.get('role')}, party id: {request_data.get('party_id')}") tracker = Tracker(job_id=request_data['job_id'], component_name=request_data['component_name'], role=request_data['role'], party_id=request_data['party_id'], model_id=model_id, model_version=model_version) dag = schedule_utils.get_job_dsl_parser(dsl=job_dsl, runtime_conf=job_runtime_conf, train_runtime_conf=train_runtime_conf) component = dag.get_component_info(request_data['component_name']) output_model_json = {} # There is only one model output at the current dsl version. output_model = tracker.get_output_model(component.get_output()['model'][0] if component.get_output().get('model') else 'default') for buffer_name, buffer_object in output_model.items(): if buffer_name.endswith('Param'): output_model_json = json_format.MessageToDict(buffer_object, including_default_value_fields=True) if output_model_json: component_define = tracker.get_component_define() this_component_model_meta = {} for buffer_name, buffer_object in output_model.items(): if buffer_name.endswith('Meta'): this_component_model_meta['meta_data'] = json_format.MessageToDict(buffer_object, including_default_value_fields=True) this_component_model_meta.update(component_define) return get_json_result(retcode=0, retmsg='success', data=output_model_json, meta=this_component_model_meta) else: return get_json_result(retcode=0, retmsg='no data', data={})