def task_callback(self, ch, method, properties, body): """Callback function when a published task is received. When a published task is received from rabbitmq, a new process will be established to attend to the task. `self.channel.basic_qos(prefetch_count=1)` is used to control the maximum concurrency of data processing process. """ self.logger.debug("Receive task from queue at %f" % time.time()) tbody = pickle.loads(body) ttype = tbody["meta"]["type"] ssid = tbody["meta"]["ssid"] self.logger.info("receive %s task : '%.200s'" % (ttype, tbody)) task_uri = D._uri(ttype, **(tbody["args"])) self.logger.debug("check task at %f" % time.time()) qlen = add_to_task_l_and_check_qlen(task_uri, ssid) if qlen == 1: # first to create the task queue # no task is running # here the data processes will not use the historical memory cache as before # acutally the memory cache is used for accelerate the inside of a # process self.logger.debug("start processing data at %f" % time.time()) # In order to no longer clear the MemoryCache, a process has been created here. p = multiprocessing.Process(target=getattr(self, "%s_callback" % ttype), args=(tbody["args"], task_uri)) p.start() p.join() else: self.logger.debug(f"There has already been the same task. Just append the ssid {ssid}.") ch.basic_ack(delivery_tag=method.delivery_tag)
def clear_task(body): """Callback function when initialize rabbitmq.""" tbody = pickle.loads(body) ttype = tbody["meta"]["type"] task_uri = D._uri(ttype, **(tbody["args"])) # delete task pop_ssids_from_redis(task_uri)