def post(self): userID = request.headers.get('userID') if userID == None: userID = request.headers.get('x-user-id') body = request.get_json() if 'words' not in body and not body['words']: return post_error("Data Missing", "words are required", None), 400 words = body['words'] AppContext.adduserID(userID) log_info( "DigitalDocumentUpdateWordResource for user {}, number words to update {} request {}" .format(userID, len(words), body), AppContext.getContext()) try: result = digitalRepo.update_words(userID, words) if result == True: res = CustomResponse(Status.SUCCESS.value, words) return res.getres() # return post_error("Data Missing","Failed to update word since data is missing",None), 400 return result, 400 except Exception as e: log_exception( "Exception in DigitalDocumentUpdateWordResource |{}".format( str(e)), AppContext.getContext(), e) return post_error("Data Missing", "Failed to update word since data is missing", None), 400
def post(self): body = request.get_json() if "keys" not in body or not body["keys"]: res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 keys = body["keys"] log_info("Fetching sentences from redis store", AppContext.getContext()) try: result = sentenceRepo.get_sentences_from_store(keys) if result == None: res = CustomResponse( Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 res = CustomResponse(Status.SUCCESS.value, result) return res.getres() except Exception as e: log_exception( "Exception while fetching sentences from redis store ", AppContext.getContext(), e) res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400
def post(self): body = request.get_json() user_id = request.headers.get('userid') if user_id == None: user_id = request.headers.get('x-user-id') pages = body['pages'] file_locale = '' if 'file_locale' in body: file_locale = body['file_locale'] job_id = '' if 'job_id' in body: job_id = body['job_id'] record_id = None if 'record_id' in body: record_id = body['record_id'] src_lang = None if 'src_lang' in body: src_lang = body['src_lang'] tgt_lang = None if 'tgt_lang' in body: tgt_lang = body['tgt_lang'] if 'pages' not in body or user_id is None or record_id == None or src_lang == None or tgt_lang == None: AppContext.addRecordID(record_id) log_info( 'Missing params in FileContentSaveResource {}, user_id:{}'. format(body, user_id), AppContext.getContext()) res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 AppContext.addRecordID(record_id) log_info( "FileContentSaveResource record_id ({}) for user ({})".format( record_id, user_id), AppContext.getContext()) try: if fileContentRepo.store(user_id, file_locale, record_id, pages, src_lang, tgt_lang) == False: res = CustomResponse( Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 AppContext.addRecordID(record_id) log_info( "FileContentSaveResource record_id ({}) for user ({}) saved". format(record_id, user_id), AppContext.getContext()) res = CustomResponse(Status.SUCCESS.value, None) return res.getres() except Exception as e: AppContext.addRecordID(record_id) log_exception("FileContentSaveResource ", AppContext.getContext(), e) res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400
def get(self): parser = reqparse.RequestParser() parser.add_argument( 'start_page', type=int, location='args', help= 'start_page can be 0, set start_page & end_page as 0 to get entire document', required=True) parser.add_argument( 'end_page', type=int, location='args', help= 'end_page can be 0, set start_page & end_page as 0 to get entire document', required=True) parser.add_argument('ad-userid', location='headers', type=str, help='userid cannot be empty', required=True) parser.add_argument('job_id', type=str, location='args', help='Job Id is required', required=False) parser.add_argument('record_id', type=str, location='args', help='record_id is required', required=True) args = parser.parse_args() AppContext.addRecordID(args['record_id']) log_info( "FileContentGetResource record_id {} for user {}".format( args['record_id'], args['ad-userid']), AppContext.getContext()) try: result = fileContentRepo.get(args['ad-userid'], args['record_id'], args['start_page'], args['end_page']) if result == False: res = CustomResponse( Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 log_info( "FileContentGetResource record_id {} for user {} has {} pages". format(args['record_id'], args['ad-userid'], result['total']), AppContext.getContext()) res = CustomResponse(Status.SUCCESS.value, result['pages'], result['total']) return res.getres() except Exception as e: log_exception("FileContentGetResource ", AppContext.getContext(), e) res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400
def __init__(self): collections = get_db()[DB_SCHEMA_NAME] try: collections.create_index('recordID') except pymongo.errors.DuplicateKeyError as e: log_info("duplicate key, ignoring", AppContext.getContext()) except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e)
def store(self, user_id, file_locale, record_id, pages, src_lang, tgt_lang): blocks = [] for page in pages: log_info(page, AppContext.getContext()) page_info = {} page_info['page_no'] = page['page_no'] page_info['page_width'] = page['page_width'] page_info['page_height'] = page['page_height'] try: if 'images' in page and page['images'] != None: for image in page['images']: blocks.append( self.create_block_info(image, record_id, page_info, 'images', user_id, src_lang, tgt_lang)) except Exception as e: AppContext.addRecordID(record_id) log_exception( 'images key not present, thats strange:{}'.format(str(e)), AppContext.getContext(), e) try: if 'lines' in page and page['lines'] != None: for line in page['lines']: blocks.append( self.create_block_info(line, record_id, page_info, 'lines', user_id, src_lang, tgt_lang)) except Exception as e: AppContext.addRecordID(record_id) log_info( 'lines key is not present, ignorning further:{}'.format( str(e)), AppContext.getContext()) pass try: if 'text_blocks' in page and page['text_blocks'] != None: for text in page['text_blocks']: blocks.append( self.create_block_info(text, record_id, page_info, 'text_blocks', user_id, src_lang, tgt_lang)) except Exception as e: AppContext.addRecordID(record_id) log_exception( 'text_blocks key not present, thats strange:{}'.format( str(e)), AppContext.getContext(), e) pass if self.blockModel.store_bulk_blocks(blocks) == False: return False return True
def get(self): parser = reqparse.RequestParser() parser.add_argument( 'start_page', type=int, location='args', help= 'start_page can be 0, set start_page & end_page as 0 to get entire document', required=True) parser.add_argument( 'end_page', type=int, location='args', help= 'end_page can be 0, set start_page & end_page as 0 to get entire document', required=True) parser.add_argument('recordID', type=str, location='args', help='record_id is required', required=True) args = parser.parse_args() AppContext.addRecordID(args['recordID']) log_info( "DigitalDocumentGetResource record_id {} ".format( args['recordID']), AppContext.getContext()) try: result = digitalRepo.get_pages(args['recordID'], args['start_page'], args['end_page']) if result == False: return post_error("Data Missing", "Failed to get pages since data is missing", None), 400 AppContext.addRecordID(args['recordID']) log_info( "DigitalDocumentGetResource record_id {} has {} pages".format( args['recordID'], result['total']), AppContext.getContext()) res = CustomResponse(Status.SUCCESS.value, result['pages'], result['total']) return res.getres() except Exception as e: AppContext.addRecordID(args['recordID']) log_exception( "Exception in DigitalDocumentGetResource |{}".format(str(e)), AppContext.getContext(), e) return post_error("Data Missing", "Failed to get pages since data is missing", None), 400
def __init__(self): collections = get_db()[DB_SCHEMA_NAME] try: collections.create_index([("record_id", pymongo.TEXT), ("block_identifier", pymongo.TEXT)], name="file_content_index") except pymongo.errors.DuplicateKeyError as e: log_info("duplicate key, ignoring", AppContext.getContext()) except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e)
def post(self): body = request.get_json() user_id = request.headers.get('userid') if user_id == None: user_id = request.headers.get('x-user-id') if 'sentences' not in body or user_id is None or 'workflowCode' not in body: log_info( 'Missing params in SaveSentenceResource {}, user_id:{}'.format( body, user_id), AppContext.getContext()) res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 sentences = body['sentences'] workflowCode = body['workflowCode'] AppContext.addRecordID(None) log_info( "SaveSentenceResource for user {}, number sentences to update {} request {}" .format(user_id, len(sentences), body), AppContext.getContext()) try: result = sentenceRepo.update_sentences(user_id, sentences, workflowCode) if result == False: res = CustomResponse( Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 if USER_TRANSLATION_ENABLED: try: result = sentenceRepo.save_sentences(user_id, sentences) except Exception as e: log_exception("SaveSentenceResource", AppContext.getContext(), e) # sentence_ids = [] # for sentence in sentences: # sentence_ids.append(sentence['s_id']) # result = sentenceRepo.get_sentence(user_id, sentence_ids) # if result == False: # res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) # return res.getresjson(), 400 # else: res = CustomResponse(Status.SUCCESS.value, sentences) return res.getres() except Exception as e: log_exception("SaveSentenceResource ", AppContext.getContext(), e) res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400
def post(self): body = request.json parser = reqparse.RequestParser() parser.add_argument('dict_fallback', type=int, location='args', help='set 1 to invoke google transalte and 0 to not', required=False,default=1) args = parser.parse_args() dict_fallback=args["dict_fallback"] log_info('received request for WordSearch', AppContext.getContext()) if 'word' not in body or 'word_locale' not in body or 'target_locale' not in body: res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 if (body['word_locale'] == 'en') or (body['target_locale'] == 'en'): result = None if body['word_locale'] == 'en': body['word'] = body['word'].lower() result = wordRepo.search_english(body['word'], body['target_locale']) else: result = wordRepo.search_vernacular(body['word'], body['word_locale']) if result == None and DICTIONARY_FALLBACK==True: translate = GoogleTranslate() ''' - call google apis to get the translation - save the translation - return the response ''' log_info('checking google for the searched word ({})'.format(body['word']), AppContext.getContext()) input_word, translated_word, input_locale = translate.translate_text(body['target_locale'], body['word']) log_info('google returned input ({}), translated ({})'.format(input_word, translated_word), AppContext.getContext()) if translated_word == None: res = CustomResponse(Status.SUCCESS.value, None) return res.getres() else: if body['word_locale'] == 'en': result = wordRepo.update(body['word'], 'en', translated_word, body['target_locale']) else: result = wordRepo.update(translated_word, body['target_locale'], body['word'], body['word_locale']) if result == None: res = CustomResponse(Status.SUCCESS.value, None) return res.getres() else: res = CustomResponse(Status.SUCCESS.value, result) return res.getres() else: log_info('returning word search from local database', AppContext.getContext()) res = CustomResponse(Status.SUCCESS.value, result) return res.getres() else: res = CustomResponse(Status.ERR_ENGLISH_MANDATORY.value, None) return res.getresjson(), 400
def post(self): body = request.get_json() user_id = request.headers.get('userid') if user_id == None: user_id = request.headers.get('x-user-id') modifiedSentences = None if 'modifiedSentences' in body: modifiedSentences = body['modifiedSentences'] workflowCode = None record_id = None if 'blocks' not in body or user_id is None: res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 if 'workflowCode' in body: workflowCode = body['workflowCode'] if 'record_id' in body: record_id = body['record_id'] blocks = body['blocks'] AppContext.addRecordID(record_id) log_info( "FileContentUpdateResource for user ({}), to update ({}) blocks". format(user_id, len(blocks)), AppContext.getContext()) log_info(str(body), AppContext.getContext()) try: result, updated_blocks = fileContentRepo.update( record_id, user_id, blocks, workflowCode, modifiedSentences) if result == False: res = CustomResponse( Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 log_info( "FileContentUpdateResource for user ({}) updated".format( user_id), AppContext.getContext()) response = {'blocks': updated_blocks, 'workflowCode': workflowCode} res = CustomResponse(Status.SUCCESS.value, response, len(updated_blocks)) return res.getres() except Exception as e: log_exception("FileContentUpdateResource ", AppContext.getContext(), e) res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400
def save(self, words): try: collections = get_db()[DB_SCHEMA_NAME] results = collections.insert_many(words, ordered=False) if len(words) == len(results.inserted_ids): return True except pymongo.errors.BulkWriteError as e: log_info("some of the record has duplicates ", AppContext.getContext()) return True except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e) return False
def get_document_total_page_count(self, record_id): try: collections = get_db()[DB_SCHEMA_NAME] results = collections.aggregate([{ '$match': { 'record_id': record_id } }, { '$group': { '_id': '$record_id', 'page_count': { '$max': "$page_no" } } }]) count = 0 for result in results: count = result['page_count'] break return count except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e) return 0
def get_sentence_by_s_id(self, user_id, s_id): try: collections = get_db()[DB_SCHEMA_NAME] docs = collections.aggregate([{ '$match': { 'data.tokenized_sentences.s_id': s_id } }, { '$project': { 'tokenized_sentences': { '$filter': { 'input': '$data.tokenized_sentences', 'as': 'ts', 'cond': { '$eq': ['$$ts.s_id', s_id] } } } } }]) for doc in docs: sentence = doc['tokenized_sentences'][0] if 's0_tgt' not in list(sentence.keys()): sentence['s0_tgt'] = sentence['tgt'] if 's0_src' not in list(sentence.keys()): sentence['s0_src'] = sentence['src'] return sentence return None except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e) return None
def post(self): body = request.json log_info('received request for WordSaveResource', AppContext.getContext()) if body == None: res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 if 'words' not in body: res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 for word in body['words']: if word['locale'] != 'en': res = CustomResponse( Status.ERR_ENGLISH_MANDATORY_WHILE_SAVING.value, None) return res.getresjson(), 400 result = wordRepo.store(body['words']) if result == False: res = CustomResponse(Status.ERR_SCHEMA_VALIDATION.value, None) return res.getresjson(), 400 res = CustomResponse(Status.SUCCESS.value, None) return res.getres()
def update_sentence_by_s_id(self, user_id, sentence): SENTENCE_KEYS = ['n_id', 'pred_score', 's_id', 'src', 'tgt'] try: collections = get_db()[DB_SCHEMA_NAME] results = collections.update( { '$and': [{ 'created_by': user_id }, { 'data.tokenized_sentences': { '$elemMatch': { 's_id': { '$eq': sentence['s_id'] } } } }] }, { '$set': { "data.tokenized_sentences.$.n_id": sentence['n_id'], "data.tokenized_sentences.$.src": sentence['src'], "data.tokenized_sentences.$.tgt": sentence['tgt'], "data.tokenized_sentences.$.save": sentence['save'], } }, upsert=False) if 'writeError' in list(results.keys()): return False return True except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e) return False
def update_block_info(self, block, update_s0, modifiedSentences=None): new_block = {} new_block['data'] = block # log_info("update_block_info payload {}".format(json.dumps(block)), AppContext.getContext()) if 'tokenized_sentences' in list(block.keys()): for elem in block['tokenized_sentences']: #case in which only the targeted setences are modified if update_s0 and modifiedSentences != None and len( modifiedSentences) != 0: if 's_id' in elem and elem['s_id'] in modifiedSentences: if 'tgt' in elem: elem['s0_tgt'] = elem['tgt'] elem['s0_src'] = elem['src'] if update_s0 and (modifiedSentences == None or len(modifiedSentences) == 0): if 'tgt' in elem: elem['s0_tgt'] = elem['tgt'] elem['s0_src'] = elem['src'] if 'input_subwords' in elem: del elem['input_subwords'] if 'output_subwords' in elem: del elem['output_subwords'] if 'pred_score' in elem: del elem['pred_score'] # case in which entire block is updated/ updating source file log_info( "updating new block for block_identifier {}".format( block['block_identifier']), AppContext.getContext()) return new_block
def update(self, record_id, user_id, blocks, workflowCode, modifiedSentences=None): updated_blocks = [] saved_blocks = [] update_s0 = False ''' - workflowCode: - WF_S_TR and WF_S_TKTR, changes the sentence structure hence s0 pair needs to be updated - DP_WFLOW_S_C, doesn't changes the sentence structure hence no need to update the s0 pair ''' if workflowCode is not None and (workflowCode == 'WF_S_TR' or workflowCode == 'WF_S_TKTR'): update_s0 = True log_info( "FileContentUpdateRepo -workflowcode : {} | update_S0 : {}".format( workflowCode, update_s0), AppContext.getContext()) for block in blocks: updated_blocks.append( self.update_block_info(block, update_s0, modifiedSentences)) if len(updated_blocks) > 0: for updated_block in updated_blocks: AppContext.addRecordID(record_id) log_info("FileContentUpdateRepo -updating blocks", AppContext.getContext()) if self.blockModel.update_block( record_id, user_id, updated_block['data']['block_identifier'], updated_block) == False: return False, saved_blocks AppContext.addRecordID(record_id) log_info("FileContentUpdateRepo -fetching back updated blocks", AppContext.getContext()) saved_block_results = self.blockModel.get_block_by_block_identifier( record_id, user_id, updated_block['data']['block_identifier']) for saved_block in saved_block_results: saved_blocks.append(saved_block['data'][0]) log_info( "FileContentUpdateRepo -updated blocks : {}".format( str(saved_blocks)), AppContext.getContext()) return True, saved_blocks
def update_words(self, user_id, words): for word in words: Validation = validator.update_word_validation(word) if Validation is not None: return Validation page = word['page_no'] region_id = word['region_id'] word_id = word['word_id'] record_id = word['record_id'] user_word = word['updated_word'] AppContext.addRecordID(record_id) log_info("DigitalDocumentRepo update word request", AppContext.getContext()) #str(page) region_to_update = self.docModel.get_word_region( user_id, record_id, region_id, page) if region_to_update: if region_to_update['identifier'] == region_id: region_to_update['updated'] = True for data in region_to_update['regions']: for word in data['regions']: if word['identifier'] == word_id: word['ocr_text'] = word['text'] word['text'] = user_word break else: pass # return post_error("Data Missing","No record with the given user_id,record_id and word_id",None) else: return post_error( "Data Missing", "No record with the given user_id,record_id and region_id", None) AppContext.addRecordID(record_id) log_info( "DigitalDocumentRepo update word region :{}".format( str(region_to_update)), AppContext.getContext()) print(region_to_update) if self.docModel.update_word(user_id, record_id, region_id, region_to_update, page) == False: return post_error( "Data Missing", "Failed to update word since data is missing", None) return True
def save_sentences(self, user_id, sentences): # Creates a md5 hash values using userID and src try: for sent in sentences: locale = sent["src_lang"] + "|" + sent["tgt_lang"] sentence_hash = user_id + "___" + sent["src"] + "___" + locale sent_key = hashlib.sha256( sentence_hash.encode('utf_16')).hexdigest() save_result = self.sentenceModel.save_sentences_on_hashkey( sent_key, sent) log_info("Sentences pushed to redis store", AppContext.getContext()) except Exception as e: log_exception( "Exception while storing sentence data on redis: " + str(e), AppContext.getContext(), e) return None
def get_sentence(self, user_id, s_ids): sentences = [] for s_id in s_ids: sentence = self.sentenceModel.get_sentence_by_s_id(user_id, s_id) if sentence == None: log_info('could not get sentence for s_id {}'.format(s_id), AppContext.getContext()) continue sentences.append(sentence) return sentences
def get_block_by_s_id(self, user_id, s_id): try: collections = get_db()[DB_SCHEMA_NAME] docs = collections.find({'$and': [{'created_by': user_id}, { 'data.tokenized_sentences': {'$elemMatch': {'s_id': {'$eq': s_id}}}}]}) for doc in docs: return doc return None except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e) return None
def store_bulk_blocks(self, blocks): try: collections = get_db()[DB_SCHEMA_NAME] results = collections.insert_many(blocks) if len(blocks) == len(results.inserted_ids): return True except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e) return False
def save_sentences_on_hashkey(self, key, sent): try: client = get_redis() client.set(key, json.dumps(sent)) return 1 except Exception as e: log_exception( "Exception in storing sentence data on redis store | Cause: " + str(e), AppContext.getContext(), e) return None
def search_source_word(self, word): try: collections = get_db()[DB_SCHEMA_NAME] docs = collections.find({'name': word}) for doc in docs: return normalize_bson_to_json(doc) return None except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e) return None
def update_word(self, word): try: collections = get_db()[DB_SCHEMA_NAME] results = collections.update({'name': word['name']}, {'$set': word}, upsert=True) if 'writeError' in list(results.keys()): return False return True except pymongo.errors.WriteError as e: log_info("some of the record has duplicates ", AppContext.getContext()) log_exception("update_word : exception ", AppContext.getContext(), e) return True except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e) return False
def get(self, user_id, s_id): AppContext.addRecordID(None) log_info( "SentenceBlockGetResource {} for user {}".format(s_id, user_id), AppContext.getContext()) try: result = SentenceRepositories.get_sentence_block(user_id, s_id) if result == False: res = CustomResponse( Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 res = CustomResponse(Status.SUCCESS.value, result) return result, 200 except Exception as e: log_exception("SentenceBlockGetResource ", AppContext.getContext(), e) res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400
def post(self): body = request.get_json() user_id = request.headers.get('userid') if user_id == None: user_id = request.headers.get('x-user-id') if 'record_ids' not in body or user_id is None: log_info( 'Missing params in SentenceStatisticsCount {}, user_id:{}'. format(body, user_id), AppContext.getContext()) res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 record_ids = body['record_ids'] bleu_return = None if 'bleu_score' in body: bleu_return = body['bleu_score'] else: bleu_return = False AppContext.addRecordID(None) log_info( "SentenceStatisticsCount for user {}, sentence count for record_ids {}" .format(user_id, record_ids), AppContext.getContext()) try: result = sentenceRepo.get_sentences_counts(record_ids, bleu_return) if result == False: res = CustomResponse( Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400 res = CustomResponse(Status.SUCCESS.value, result) return res.getres() except Exception as e: log_exception("SentenceStatisticsCount ", AppContext.getContext(), e) res = CustomResponse(Status.ERR_GLOBAL_MISSING_PARAMETERS.value, None) return res.getresjson(), 400
def store(self, userID, recordID, files): try: for file in files: # recordID= recordID jobID = recordID.split('|')[0] fileID = file['file']['identifier'] file_name = file['file']['name'] locale = file['config']['language'] file_type = file['file']['type'] pages = file['pages'] log_info( "DigitalDocumentRepo save document for user: {}| record: {}| count of pages received: {}" .format(userID, recordID, str(len(pages))), AppContext.getContext()) blocks = [] for page in pages: block = self.create_regions_from_page(userID, jobID, recordID, fileID, file_name, locale, file_type, page) if len(block.keys()) > 5: blocks.append(block) else: return block log_info( 'DigitalDocumentRepo page blocks created for insert, user_id:{}, record_id:{}, block length:{}' .format(userID, recordID, str(len(blocks))), AppContext.getContext()) result = self.docModel.store_bulk_blocks(blocks) if result == False: return False except Exception as e: AppContext.addRecordID(recordID) log_exception( 'Exception on save document | DigitalDocumentRepo :{}'.format( str(e)), AppContext.getContext(), e) return post_error("Data Missing", "Failed to store doc since :{}".format(str(e)), None)
def get_all_blocks(self, user_id, record_id): try: collections = get_db()[DB_SCHEMA_NAME] docs = collections.find({ 'record_id': record_id, 'created_by': user_id }) return docs except Exception as e: log_exception("db connection exception ", AppContext.getContext(), e) return False