def __init__(self, parent=None): """ initialize Queue, Manager and ThreadPool for work with that """ super().__init__(parent) self.pool = QThreadPool(self) self.pool.setMaxThreadCount(5000) self.queues = {'HQueue': PGQ('HQueue'), 'MQueue': PGQ('MQueue'), 'LQueue': PGQ('LQueue')}
def __init__(self, queue_name, default_limit): super().__init__() self.running_tasks = {} self.done_tasks = [] self.default_limit = default_limit self.queue = PGQ(queue_name) self.queue_name = queue_name self.pool = QThreadPool(self) self.pool.setMaxThreadCount(5000) signal.signal(signal.SIGUSR1, self.__signal_handler) self.items = [] try: __items = TaskModel.select().where((TaskModel.status == constants.STATUS_RUNNING) & (TaskModel.queue_name == self.queue_name)) for __item in __items: self.items.append(__item) except OperationalError: raise NetworkError('Network is unreachable')
def pause(data): params = Command.__get_data(data) if params: if params.get('process_id'): tag = params.get('process_id') try: task_exist = TaskModel.get(TaskModel.process_id == tag) except Exception: task_exist = False queue_exist = PGQ.preview(tag) if queue_exist or task_exist: try: action = ActionModel.get(ActionModel.process_id == tag) except: action = ActionModel() action.process_id = params.get('process_id') action.action = constants.STATUS_PAUSE if params.get('timeout'): action.timeout = params.get('timeout') else: return { 'data': 'timeout keyword not found', 'status': constants.STATUS_ERROR } try: action.save() except Exception: raise DatabaseError('can not access database') Command.send_signal( ['process_h.py', 'process_m.py', 'process_l.py']) return { 'data': 'pause request received!', 'status': constants.STATUS_PENDING } else: return { 'data': 'process_id not found', 'status': constants.STATUS_ERROR } else: return { 'data': 'process_id keyword not found', 'status': constants.STATUS_ERROR } return { 'data': 'format or process id is wrong', 'status': constants.STATUS_ERROR }
def resume(data): params = Command.__get_data(data) if params: if params.get('process_id'): tag = params.get('process_id') try: task_exist = TaskModel.get(TaskModel.process_id == tag) except Exception: task_exist = False if PGQ.preview(tag) or task_exist: try: action = ActionModel.get(ActionModel.process_id == tag) except: action = ActionModel() action.process_id = params.get('process_id') action.action = constants.STATUS_RESUME try: action.save() except Exception: raise DatabaseError(' can not access database') Command.send_signal( ['process_h.py', 'process_m.py', 'process_l.py']) return { 'data': 'resume request applied!', 'status': constants.STATUS_SUCCESS } else: return { 'data': 'process_id not found', 'status': constants.STATUS_ERROR } else: return { 'data': 'process_id keyword not found', 'status': constants.STATUS_ERROR } return { 'data': 'format or process id is wrong', 'status': constants.STATUS_ERROR }
def progress(data): params = Command.__get_data(data) if params: try: # check if process is in task_model result = TaskModel.get( TaskModel.process_id == params.get('process_id')) if result.status == constants.STATUS_RUNNING: return { 'data': result.progress, 'status': constants.STATUS_SUCCESS } else: res = result.response_data for k, r in res.items(): try: res.update({k: json.loads(r)}) except Exception: pass return {'data': res, 'status': constants.STATUS_SUCCESS} except Exception: # exception happen when process not exist in task_model, so check queue if PGQ.preview(params.get('process_id')): return { 'data': { 'status': constants.STATUS_QUEUE, 'process_id': params.get('process_id'), 'message': 'process still is in the queue' }, 'status': constants.STATUS_SUCCESS } else: # task not exist in queue neither return { 'data': 'process_id not found', 'status': constants.STATUS_ERROR } return { 'data': 'format or process id is wrong', 'status': constants.STATUS_ERROR }
class BaseProcess(QObject): def __init__(self, queue_name, default_limit): super().__init__() self.running_tasks = {} self.done_tasks = [] self.default_limit = default_limit self.queue = PGQ(queue_name) self.queue_name = queue_name self.pool = QThreadPool(self) self.pool.setMaxThreadCount(5000) signal.signal(signal.SIGUSR1, self.__signal_handler) self.items = [] try: __items = TaskModel.select().where((TaskModel.status == constants.STATUS_RUNNING) & (TaskModel.queue_name == self.queue_name)) for __item in __items: self.items.append(__item) except OperationalError: raise NetworkError('Network is unreachable') def run(self): while 1: # sleep for decrease cpu usage sleep(0.01) QCoreApplication.processEvents() from_db = False if self.running_tasks.__len__() >= self.limit(): continue try: if self.items: from_db = True item = self.items.pop() else: item = self.queue.get() # type: RequestObject if item: if not from_db: task_model = TaskModel() task_model.route = item.route task_model.process_id = item.process_id task_model.status = constants.STATUS_RUNNING task_model.data = item.data task_model.call_back = item.call_back task_model.token = item.token task_model.module_version = item.module_version task_model.queue_name = self.queue_name task_model.save() else: task_model = item task = Task(task_model) if task.instance_module: task.instance_module.task_finished.connect(self.__task_finished) task.setAutoDelete(True) self.running_tasks.update({item.process_id: task}) # check cancel or pause request before start self.apply_action_by_pid(task, item.process_id) self.pool.start(task) else: # TODO: set error alarm ApiLogging.error('problem running task') except Exception as e: ApiLogging.error('process exception' + str(e)) # TODO: set error alarm continue def limit(self): try: limit = Setting.get(self.queue_name) if limit: return int(limit) except Exception: # use default limit if exception or not present pass return self.default_limit @pyqtSlot(str, name='task_finished') def __task_finished(self, task_id): ApiLogging.info('task finished') del self.running_tasks[task_id] self.done_tasks.append(task_id) self.send_signal() @staticmethod def send_signal(): pids = [] for process_name in constants.APP_PROCESSES: if process_name.get('name') == 'process_sync.py': pids = find_pid(process_name.get('token')) if len(pids) > 1: ApiLogging.warning('Too many sync process running') elif len(pids) == 1: p = psutil.Process(pids[0]) p.send_signal(signal.SIGUSR1) def __signal_handler(self, signal, frame): ApiLogging.info('base process signal received') # TODO: update limit self.check_pending_actions() def apply_action_by_pid(self, task, pid): actions = ActionModel.select().where(ActionModel.process_id == pid) for action in actions: if action.action == constants.STATUS_PAUSE: task.instance_module.pause_request = action.timeout elif action.action == constants.STATUS_CANCEL: task.instance_module.cancel_request = True elif action.action == constants.STATUS_RESUME: task.instance_module.resume_request = True action.status = constants.STATUS_SUCCESS action.save() def check_pending_actions(self): actions = ActionModel.select().where(ActionModel.status == constants.STATUS_PENDING) for action in actions: if action.process_id in self.running_tasks: task = self.running_tasks.get(action.process_id) if task: if action.action == constants.STATUS_PAUSE: task.instance_module.pause_request = action.timeout elif action.action == constants.STATUS_CANCEL: task.instance_module.cancel_request = True elif action.action == constants.STATUS_RESUME: task.instance_module.resume_request = True action.status = constants.STATUS_SUCCESS action.save() elif action.process_id in self.done_tasks: action.status = constants.STATUS_SUCCESS action.save()
def cancel(data): params = Command.__get_data(data) if params: if params.get('process_id'): tag = params.get('process_id') try: # check if this process_id is running task_exist = TaskModel.get(TaskModel.process_id == tag) except Exception: task_exist = False queue_exist = PGQ.preview(tag) # check if this process_id is on queue or running if queue_exist or task_exist: try: # check if any other command submitted action = ActionModel.get(ActionModel.process_id == tag) except: action = ActionModel() action.process_id = params.get('process_id') action.action = constants.STATUS_CANCEL if queue_exist: action.status = constants.STATUS_SUCCESS action.save() if queue_exist: item = PGQ.get_by_tag( tag ) # get from queue and remove item, and save in task_model task_model = TaskModel() # route, call_back, token, process_id, data, module_version error = { 'code': CancelExecutionError.get_code(), 'message': 'task canceled by user!' } task_model.route = item.data.route task_model.process_id = item.data.process_id task_model.status = constants.STATUS_FAILED task_model.token = item.data.token task_model.module_version = item.data.module_version task_model.data = item.data.data task_model.call_back = item.data.call_back task_model.delivery = constants.STATUS_NEW task_model.queue_name = item.name task_model.response_data = { 'data': to_json({}), 'error': to_json(error), 'status': constants.STATUS_ERROR, 'token': item.data.token, 'process_id': item.data.process_id } try: task_model.save() except Exception: raise DatabaseError('can not access database') Command.send_signal( ['process_h.py', 'process_m.py', 'process_l.py']) return { 'data': 'cancel request received!', 'status': constants.STATUS_PENDING } else: return { 'data': 'process_id not found', 'status': constants.STATUS_ERROR } else: return { 'data': 'process_id keyword not found', 'status': constants.STATUS_ERROR } return { 'data': 'format or process id is wrong', 'status': constants.STATUS_ERROR }