예제 #1
0
def process_annotation_kf():
    file_ops = FileOperation()
    DOWNLOAD_FOLDER = file_ops.create_file_upload_dir(config.download_folder)
    # instatiation of consumer for respective topic
    try:
        consumer_class = Consumer(config.ner_input_topic,
                                  config.bootstrap_server)
        consumer = consumer_class.consumer_instantiate()
        log.info("--- consumer running -----")
    except:
        response = Status.ERR_Consumer.value
        producer_html2json = Producer(config.bootstrap_server)
        producer = producer_html2json.producer_fn()
        producer.send(config.ner_output_topic, value=response)
        producer.flush()
        log.error(
            "error in kafka opertation while listening to consumer on topic %s"
            % (config.ner_input_topic))
        log.info("response send to topic %s" % (config.ner_output_topic))
    try:
        log.info("trying to receive data from consumer")
        for msg in consumer:
            log.info("received data from consumer")
            data = msg.value
            task_id = str("NER-" + str(time.time()).replace('.', ''))
            task_starttime = str(time.time()).replace('.', '')
            checking_response = CheckingResponse(data, task_id, task_starttime,
                                                 DOWNLOAD_FOLDER)
            file_value_response = checking_response.main_response_wf()
            try:
                producer_ner = Producer(config.bootstrap_server)
                producer = producer_ner.producer_fn()
                producer.send(config.ner_output_topic,
                              value=file_value_response)
                producer.flush()
                log.info("producer flushed for topic %s" %
                         (config.ner_output_topic))
            except:
                log.info(
                    "error occured in file operation of workflow and it is pushed to error queue"
                )
    except Exception as e:
        log.error(
            "error occured during consumer running or flushing data to another queue %s"
            % e)
        for msg in consumer:
            log.info("value received from consumer")
            data = msg.value
            input_files, workflow_id, jobid, tool_name, step_order = file_ops.json_input_format(
                data)
            task_id = str("NER-" + str(time.time()).replace('.', ''))
            task_starttime = str(time.time()).replace('.', '')
            response = CustomResponse(Status.ERR_Producer.value, jobid,
                                      task_id)
            file_ops.error_handler(response, True)
            log.info(
                "error in kafka opertation producer flushed value on error topic"
            )
예제 #2
0
def process_block_segmenter_kf():
    file_ops = FileOperation()
    DOWNLOAD_FOLDER = file_ops.create_file_download_dir(config.download_folder)
    producer_tok = Producer(config.bootstrap_server)

    # instatiation of consumer for respective topic
    try:
        consumer = consumer_validator()
        log_info(
            "process_document_segmenter_kf : trying to receive value from consumer ",
            LOG_WITHOUT_CONTEXT)

        for msg in consumer:
            if Consumer.get_json_data(msg.value) == None:
                log_info(
                    'process_document_segmenter_kf - received invalid data {}'.
                    format(msg.value), None)
                continue
            data = Consumer.get_json_data(msg.value)

            jobid = data['jobID']
            log_info(
                'process_document_segmenter_kf - received message from kafka, dumping into internal queue',
                data)
            input_files, workflow_id, jobid, tool_name, step_order = file_ops.json_input_format(
                data)

            #if input_files[0]['locale'] == 'en':
            #############
            ####################################
            Queue.put(data)
            log_info(
                'process_document_segmenter_kf - request in internal queue {}'.
                format(Queue.qsize()), data)
            ########################################
            # else:
            #     blockMergerOCRQueue.put(data)
            #     log_info('process_block_merger_kf - request in internal OCR queue {}'.format(blockMergerOCRQueue.qsize()), data)

            # We should reject kafka request if internal queue size become too-much.
            #

    except KafkaConsumerError as e:
        response_custom = {}
        response_custom['message'] = str(e)
        file_ops.error_handler(response_custom, "KAFKA_CONSUMER_ERROR", True)
        log_exception(
            "process_layout_detector_kf : Consumer didn't instantiate", None,
            e)
    except KafkaProducerError as e:
        response_custom = {}
        response_custom['message'] = e.message
        file_ops.error_handler(response_custom, "KAFKA_PRODUCER_ERROR", True)
        log_exception(
            "process_layout_detector_kf : response send to topic %s" %
            (config.output_topic), None, e)
예제 #3
0
def process_vision_ocr_kf():
    file_ops            = FileOperation()
    DOWNLOAD_FOLDER     = file_ops.create_file_download_dir(config.download_folder)
    producer_tok        = Producer(config.bootstrap_server)
    
    # instatiation of consumer for respective topic
    try:
        consumer = consumer_validator()
        log_info("process_vision_ocr_kf : trying to receive value from consumer ", LOG_WITHOUT_CONTEXT)

        while True:
            wait_for_control = controlQueue.get(block=True)
        
            for msg in consumer:
                if Consumer.get_json_data(msg.value) == None:
                    log_info('process_vision_ocr_kf - received invalid data {}'.format(msg.value), None)
                    continue

                data            = Consumer.get_json_data(msg.value)

                consumer.commit()  # <--- This is what we need
                # Optionally, To check if everything went good
                print('New Kafka offset: %s' % consumer.committed(TopicPartition(config.input_topic, msg.partition)))

                jobid           = data['jobID']
                log_info('process_vision_ocr_kf - received message from kafka, dumping into internal queue', data)
                input_files, workflow_id, jobid, tool_name, step_order = file_ops.json_input_format(data)

                #if input_files[0]['locale'] == 'en':
                    #############
                ####################################
                processQueue.put(data)
                log_info('process_vision_ocr_kf - request in internal queue {}'.format(Queue.qsize()),
                            data)
                break

            ########################################
            # else:
            #     blockMergerOCRQueue.put(data)
            #     log_info('process_block_merger_kf - request in internal OCR queue {}'.format(blockMergerOCRQueue.qsize()), data)

            # We should reject kafka request if internal queue size become too-much.
            #
    
    except KafkaConsumerError as e:
        response_custom = {}
        response_custom['message'] = str(e)
        file_ops.error_handler(response_custom, "KAFKA_CONSUMER_ERROR", True)
        log_exception("process_vision_ocr_kf : Consumer didn't instantiate", None, e)
    except KafkaProducerError as e:
        response_custom = {}
        response_custom['message'] = e.message      
        file_ops.error_handler(response_custom, "KAFKA_PRODUCER_ERROR", True)
        log_exception("process_vision_ocr_kf : response send to topic %s"%(config.output_topic), None, e)
예제 #4
0
def process_merger_kf():
    file_ops = FileOperation()
    DOWNLOAD_FOLDER = file_ops.create_file_download_dir(config.download_folder)
    task_id = str("BM-" + str(time.time()).replace('.', ''))
    task_starttime = str(time.time()).replace('.', '')
    # instatiation of consumer for respective topic
    try:
        consumer_class = Consumer(config.input_topic, config.bootstrap_server)
        consumer = consumer_class.consumer_instantiate()
        log_info("process_merger_kf", "trying to receive value from consumer",
                 None)
        thread_instance = 0
        for msg in consumer:
            try:
                data = msg.value
                task_id = str("BM-" + str(time.time()).replace('.', ''))
                task_starttime = str(time.time()).replace('.', '')
                input_files, workflow_id, jobid, tool_name, step_order = file_ops.json_input_format(
                    data)
                log_info("process_merger_kf", "kafka request arrived ", jobid)
                response_gen = Response(data, DOWNLOAD_FOLDER)
                t1 = threading.Thread(
                    target=response_gen.multi_thred_block_merger,
                    args=(task_id, task_starttime, jobid),
                    name='BM-thread-' + str(thread_instance))
                t1.start()
                thread_instance += 1
                log_info("multithread", "block-merger running on multithread",
                         None)
                '''
                file_value_response = response_gen.workflow_response(task_id, task_starttime)
                if "errorID" not in file_value_response.keys():
                    producer = Producer()
                    producer.push_data_to_queue(config.output_topic, file_value_response, jobid, task_id)
                else:
                    log_info("process_merger_kf", "error send to error handler", jobid)'''
            except Exception as e:
                log_exception("process_pdf_kf",
                              "exception while consuming the records", jobid,
                              e)

    except KafkaConsumerError as e:
        response_custom = CustomResponse(Status.ERR_STATUS.value, None, None)
        response_custom.status_code['message'] = str(e)
        file_ops.error_handler(response_custom.status_code,
                               "KAFKA_CONSUMER_ERROR", True)
        log_exception("process_pdf_kf", "Consumer didn't instantiate", None, e)
    except KafkaProducerError as e:
        response_custom = e.code
        response_custom['message'] = e.message
        file_ops.error_handler(response_custom, "KAFKA_PRODUCER_ERROR", True)
        log_exception("process_pdf_kf",
                      "response send to topic %s" % (config.output_topic),
                      response_custom['jobID'], e)
예제 #5
0
def process_kf_request_payload():
    file_ops = FileOperation()

    # instatiation of consumer for respective topic
    try:
        consumer = consumer_validator()
        log_info("trying to receive value from consumer ", LOG_WITHOUT_CONTEXT)

        for msg in consumer:
            if Consumer.get_json_data(msg.value) == None:
                log_info('received invalid data {}'.format(msg.value),
                         LOG_WITHOUT_CONTEXT)
                continue

            data = Consumer.get_json_data(msg.value)
            LOG_WITHOUT_CONTEXT['jobID'] = data['jobID']
            log_info(
                "received input request from Kafka queue for JobID: %s " %
                (data['jobID']), LOG_WITHOUT_CONTEXT)
            processRequest(data)

    except KafkaConsumerError as e:
        response_custom = {}
        response_custom['message'] = str(e)
        file_ops.error_handler(response_custom, "KAFKA_CONSUMER_ERROR", True)
        log_exception("Consumer didn't instantiate", None, e)
    except KafkaProducerError as e:
        response_custom = {}
        response_custom['message'] = e.message
        file_ops.error_handler(response_custom, "KAFKA_PRODUCER_ERROR", True)
        log_exception("response send to topic %s" % (config.output_topic),
                      None, e)
    except Exception as e:
        file_ops.error_handler(response_custom, "KAFKA_CONSUMER_ERROR", True)
        log_exception("response send to topic %s" % (config.output_topic),
                      None, e)