def __init__(self, **kwargs): # targetq = 'medlines' self.db = DB() if 'targetq' not in kwargs or kwargs["targetq"] is None: raise Exception("No target queue provided") else: self.targetq = kwargs["targetq"] if not 'objIds' in kwargs: self.objIds = [] self.loadall = True else: self.objIds = kwargs["objIds"] self.loadall = False if "wfsteps" in kwargs: self.wfsteps = kwargs["wfsteps"] else: self.wfsteps = [] self.connection = pika.BlockingConnection(self.db.mq_conn_params) self.channel = self.connection.channel() self.channel.queue_declare(queue=self.targetq, durable=True)
def __init__(self, **kwargs): self.db = DB() self.label_obj = None self.progressHandler = PH() if 'dev' in kwargs: print("Run in development mode...") self.flagDev = True self.spellChecker = Speller() else: self.spellChecker = Speller() self.flagDev = False self.connection = pika.BlockingConnection(self.db.mq_conn_params) self.channel = self.connection.channel() self.channel.queue_declare(queue=os.environ.get("MQ_QUEUE_OCR"), durable=True) self.channel.basic_qos(prefetch_count=1)
def __init__(self, dir, **kwargs): self.db = DB() self.dir = dir self.files = [] self.load_files()
def __init__(self): self._init_nlp() self.lookup = pd.read_csv(os.environ.get("PRETAG_LOOKUP_TABLE_PATH"), sep=",", encoding='utf8', keep_default_na=False) self.lookup.set_index("search", inplace=True) self.db = DB() self.tag_cache = [] self._init_phrase_matcher()
def main(args=sys.argv[1:]): args = parser.parse_args(args) db = DB() for handler in logging.root.handlers[:]: logging.root.removeHandler(handler) logging.basicConfig( format='%(asctime)s %(relativeCreated)d %(levelname)s %(message)s', filename='./comfash_logger.log', level=logging.DEBUG) logging.Formatter('%(asctime)s %(levelname)s %(message)s') warnings.filterwarnings("ignore") if 'inbound' in args.routines: inbound.init_consuming(db) if 'classify' in args.routines: classify.init_consuming(db) if 'validated' in args.routines: validated.init_consuming(db) if 'reindex' in args.routines: resultdata.reindex(db) if 'detectitems' in args.routines: comfash_vapi = ComfashVAPI() test_image = "/mnt/cfp/class/bikini.jpg" comfash_vapi.detect_and_classify_items(test_image, "testSESSIONID") if 'issue2classify' in args.routines: session_id = args.sessionid resultdata.issue_classify_queue(db, args.models, args.copythumbs, session_id) if 'postprod' in args.routines: postprod.init_post_to_prod_index(db) if 'gather' in args.routines: gather.init_consuming_gather(db)
def __init__(self, file_path, pat_nummer, pat_name, **kwargs): self.TIFF_PATH = os.environ.get("TIFF_PATH") self.db = DB() self.cvProcessor = CVProcessor() self.file_path = file_path self.file_name = os.path.basename(self.file_path) self.pat_nummer = pat_nummer self.pat_name = pat_name self.pages = [] self.exist_ids = [] self._get_file_hash() self.flag_file_exists = False self.flagoverwrite = False if "flagoverwrite" in kwargs: if kwargs["flagoverwrite"]: print("Overwriting files with input...") self.flagoverwrite = True else: self.check_file_exists() else: self.check_file_exists() self.inserted_id = None if "wfsteps" in kwargs: self.wfsteps = kwargs["wfsteps"] else: self.wfsteps = [] self.connection = pika.BlockingConnection(self.db.mq_conn_params) self.channel = self.connection.channel() self.channel.queue_declare(queue=os.environ.get("MQ_QUEUE_OCR"), durable=True)
def dev(self, objId): self.db = DB() self.objId = objId self.process_obj(self.objId)
def __init__(self, label): self.label = label self.db = DB() self.tag = self.get_db_tag()