def multi_thred_block_merger(self,task_id, task_starttime,jobid): thread = threading.current_thread().name log_info("multi_thred_block_merger" + str(thread)+" | block-merger process started ===>",app_context.application_context) file_value_response = self.workflow_response(task_id, task_starttime) if "errorID" not in file_value_response.keys(): producer = Producer() producer.push_data_to_queue(config.output_topic, file_value_response, jobid, task_id) else: log_info("process_merger_kf error send to error handler", app_context.application_context)
def word_detector_request_worker(): file_ops = FileOperation() DOWNLOAD_FOLDER = file_ops.create_file_download_dir(config.download_folder) producer_tok = Producer(config.bootstrap_server) log_info("word_detector_request_worker : starting thread ", LOG_WITHOUT_CONTEXT) while True: data = Queue.get(block=True) ################# task_id = str("word_detector" + str(time.time()).replace('.', '')) ################### task_starttime = str(time.time()).replace('.', '') input_files, workflow_id, jobid, tool_name, step_order = file_ops.json_input_format( data) log_info( "word_detector_request_worker processing -- received message " + str(jobid), data) try: response_gen = Response(data, DOWNLOAD_FOLDER) file_value_response = response_gen.workflow_response( task_id, task_starttime, False) if file_value_response != None: push_output(producer_tok, config.output_topic, file_value_response, jobid, task_id, data) log_info( "word_detector_request_worker : response send to topic %s" % (config.output_topic), LOG_WITHOUT_CONTEXT) else: erro_obj = { 'code': 400, 'jobID': jobid, 'message': "Word detector failed" } producer_tok.push_data_to_queue( config.KAFKA_ANUVAAD_ETL_WF_ERROR_TOPIC, erro_obj) log_info( "word_detector_request_worker : error send to error handler", data) log_info( 'word_detector_request_worker - request in internal queue {}'. format(Queue.qsize()), data) Queue.task_done() except Exception as e: log_exception("word_detector_request_worker ", LOG_WITHOUT_CONTEXT, e)