from flask_jwt_extended import jwt_required, get_current_user from flask_restplus import Namespace, Resource, fields from nleaser.api.error_handler import error_handler from nleaser.api.request_models import get_tasks_response_model, tasks_model from nleaser.api.request_models.ngram_models import post_model, post_response_model, get_response_model, \ delete_response_model, ngram_model, get_model from nleaser.models.nlp_extracted_data.ngrams import ExtractedNGramsSchema from nleaser.models.tasks.ngrams.create import NGramsCreateTaskSchema from nleaser.sources.logger import create_logger from nleaser.sources.ngrams import NGramsService from nleaser.sources.tasks.ngrams.create import NGramsCreateTaskService logger = create_logger(__name__) ns_ngrams = Namespace( "NGrams", "Namespace para extrair, paginar e excluir NGrams dos conjuntos de dados") # POST create_ngram_response_model = ns_ngrams.model("create_ngram_response_model", post_response_model) # GET get_ngram_response_model = get_response_model.copy() get_ngram_response_model["ngrams"] = fields.Nested(ns_ngrams.model( "ngram_model", ngram_model), as_list=True) get_ngram_response_model = ns_ngrams.model("get_ngram_response_model", get_ngram_response_model) # DELETE
from nleaser.models import connect_db from nleaser.sources.logger import create_logger from workers.wordcloud_create import wordcloud_create_consumer logger = create_logger("wordcloud_create") if __name__ == '__main__': import time import logging from nleaser.sources.rabbit.consumer import RabbitConsumer while True: connect_db() pika_logger = logging.getLogger("pika") pika_logger.setLevel(logging.ERROR) try: logger.info("Conectando ao rabbitmq") consumer = RabbitConsumer("NLEaser.wordcloud_create") logger.info("Consumindo") consumer.consume(wordcloud_create_consumer, auto_ack=False, prefetch=1) except Exception as e: logger.error("Erro ao consumir mensagem", exc_info=True) time.sleep(5)
from nleaser.models import connect_db from nleaser.sources.logger import create_logger from nleaser.sources.rabbit.consumer import RabbitConsumer from workers.ner_extract import ner_resume_create_consumer logger = create_logger("ner_extract") if __name__ == '__main__': import logging import time while True: connect_db() pika_logger = logging.getLogger("pika") pika_logger.setLevel(logging.ERROR) try: logger.info("Conctando ao RabbitMQ") consumer = RabbitConsumer("NLEaser.ner_resume_create") logger.info("Consumindo") consumer.consume(ner_resume_create_consumer, auto_ack=False, prefetch=1, long_time_process=True) except Exception as e: logger.error("Erro ao consumir mensagem", exc_info=True) time.sleep(5)
from nleaser.models import connect_db from nleaser.sources.logger import create_logger from workers.sentence_import import sentence_preprocessor_consumer logger = create_logger("sentence_import") if __name__ == '__main__': import time import logging from nleaser.sources.rabbit.consumer import RabbitConsumer while True: connect_db() pika_logger = logging.getLogger("pika") pika_logger.setLevel(logging.ERROR) try: logger.info("Conectando ao rabbitmq") consumer = RabbitConsumer("NLEaser.sentence_import") logger.info("Consumindo") consumer.consume(sentence_preprocessor_consumer, auto_ack=False, prefetch=1) except Exception as e: logger.error("Erro ao consumir mensagem", exc_info=True) time.sleep(5)
from nleaser.models import connect_db from nleaser.sources.logger import create_logger from workers.ngrams_create import ngram_create_consumer logger = create_logger("ngrams_create") if __name__ == '__main__': import logging import time from nleaser.sources.rabbit.consumer import RabbitConsumer while True: connect_db() pika_logger = logging.getLogger("pika") pika_logger.setLevel(logging.ERROR) try: logger.info("Conectando ao RabbitMQ") consumer = RabbitConsumer("NLEaser.ngrams_create") logger.info("Consumindo") consumer.consume(ngram_create_consumer, auto_ack=False, prefetch=1) except Exception as e: logger.error("Erro ao consumir mensagem", exc_info=True) time.sleep(5)