def get_document_preview_status(batch_id=None, document_id=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch: raise errors.BatchNotFound() doc = BatchDocumentDbObject.query.filter_by(batch=batch, id=document_id).first() if not doc: raise errors.DocumentNotFound() links = { 'pdf': FileStorage.get_url(doc.file, current_app.config), 'jpeg': [] } if doc.file else { 'pdf': None, 'jpeg': [] } return { 'result': { 'state': doc.status, 'links': links, 'document_id': unicode(document_id) } }
def finalize_batch(batch_id=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch: raise errors.BatchNotFound() return _finalize_batch(batch)
def render_batch_documents(batch_id=None, document_type=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch: raise errors.BatchNotFound() batch_manager = BatchManager.init(batch) document_types = json.loads(document_type) if not isinstance(document_types, list) and not isinstance( document_types, tuple): raise errors.InvalidParameterValue('document_type') doc_type_set = set() for doc_type in document_types: if not batch_manager.check_doc_type_support(batch.batch_type, doc_type): raise errors.InvalidParameterValue('document_type') doc_type_set.add(doc_type) action_descriptor = {'plugin': 'doc_builder', 'action': 'render_group'} event_data = {'doc_types': list(doc_type_set), 'batch_id': batch.id} BatchManager.perform_action(action_descriptor, batch, event_data, current_app.logger, current_app.config) return {"result": True}
def get_bank_partners(batch_id=None): bank_partner_list = [] try: address = BatchManager.get_batch_address(batch_id) city = address['region'] if address[ 'region'] in SPECIAL_CITY_REGIONS else address.get( 'city', address.get('village', u"")) except Exception: raise errors.BatchNotFound() banks = BankPartnersObject.query.filter_by(enabled=True) if city: banks = banks.filter( or_(BankPartnersObject.city.contains([city]), BankPartnersObject.city == None)) for item in banks.order_by(BankPartnersObject.sort_index.asc()): bank_partner_list.append({ "id": item.id, "link": item.link, "banner": item.banner, "title": item.title, "conditions": item.conditions or [] }) return {"result": {"banks_partners": bank_partner_list}}
def unfinalize_batch(batch_id=None, force=False): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar() if not batch: raise errors.BatchNotFound() if batch.status not in (BatchStatusEnum.BS_FINALISED, ): raise errors.DocumentBatchDefinalizationError() if batch.current_task_id: from celery import app as celery current_app.logger.debug(u"There are task id: %s" % unicode(batch.current_task_id)) celery.default_app.control.revoke(batch.current_task_id) remove_task_id_run_file(current_app.config, batch.current_task_id) batch.current_task_id = None batch.batch_rendering_start = None sqldb.session.commit() batch_manager = BatchManager.init(batch) try: if not batch_manager.definalize_batch( current_app.config, current_app.logger, batch, force): raise errors.DocumentBatchDefinalizationError() except Exception: batch.status = BatchStatusEnum.BS_FINALISED sqldb.session.commit() raise errors.DocumentBatchDefinalizationError() return {"result": True}
def yurist_get(batch_id=None): batch_db = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch_db: raise errors.BatchNotFound() # get active or last refused check_obj = YuristBatchCheckObject.query.filter( YuristBatchCheckObject.batch_id == batch_id, YuristBatchCheckObject.status.notin_(YuristBatchCheckStatus.FINAL_STATUSES) ).first() if not check_obj: # get the last one check_obj = YuristBatchCheckObject.query.filter_by(batch_id=batch_id).order_by( YuristBatchCheckObject.create_date.desc() ).first() if check_obj: booking = YuristBatchCheck.db_obj_to_field(check_obj).get_api_structure() if booking['status'] == YuristBatchCheckStatus.YBS_WAIT: booking['status'] = YuristBatchCheckStatus.YBS_IN_PROGRESS return {'result': booking} return {'result': { 'batch_id': batch_id, 'attached_files': [], 'typos_correction': False, 'status': YuristBatchCheckStatus.YBS_NEW }}
def get_batch_status(batch_id=None): batch_db_obj = DocumentBatchDbObject.query.filter_by( id=batch_id, _owner=current_user, deleted=False).first() if not batch_db_obj: raise errors.BatchNotFound() batch = DocumentBatch.db_obj_to_field(batch_db_obj) result = {'result': batch.get_api_structure(skip_documents=True)} return result
def get_accountant_partners(batch_id=None): accountant_partner_list = [] try: region = BatchManager.get_batch_region(batch_id) except Exception, ex: current_app.logger.exception(u"Failed to get batch region") raise errors.BatchNotFound()
def get_render_batch_documents_state(batch_id=None, document_types=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch: raise errors.BatchNotFound() batch_manager = BatchManager.init(batch) try: document_types = json.loads(document_types) if not isinstance(document_types, list) and not isinstance( document_types, tuple): raise Exception() except Exception: raise errors.InvalidParameterValue('document_type') doc_type_set = set() for doc_type in document_types: if not batch_manager.check_doc_type_support(batch.batch_type, doc_type): raise errors.InvalidParameterValue('document_type') doc_type_set.add(doc_type) result = [] for doc_type in doc_type_set: doc_obj = BatchDocumentDbObject.query.filter_by( batch_id=batch_id, document_type=doc_type).first() if not doc_obj: result.append({ 'state': UserDocumentStatus.DS_NEW, 'document_type': doc_type }) continue doc_info = {'state': doc_obj.status, 'document_type': doc_type} if doc_obj.status == UserDocumentStatus.DS_RENDERED: if doc_obj.file: doc_info['links'] = { 'pdf': FileStorage.get_url(doc_obj.file, current_app.config), 'jpeg': [] } result.append(doc_info) else: current_app.logger.debug( u"Not found rendered documents for rendered document %s. " u"Returning as rendering_failed" % doc_type) result.append({ 'state': UserDocumentStatus.DS_RENDERING_FAILED, 'document_type': doc_type }) else: result.append(doc_info) return result
def batch_delete(batch_id=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch: raise errors.BatchNotFound() batch.deleted = True sqldb.session.commit() return {'result': True}
def batch_update_metadata(batch_id=None, batch=None): current_batch_db_object = DocumentBatchDbObject.query.filter_by( id=batch_id, _owner=current_user, deleted=False).first() if not current_batch_db_object: raise errors.BatchNotFound() current_batch_db_object._metadata = batch.get('metadata', {}) sqldb.session.commit() current_batch = DocumentBatch.db_obj_to_field(current_batch_db_object) return {'result': current_batch.get_api_structure()}
def get_batch_doc(batch_id=None, document_id=None): config = current_app.config if not config['STAGING'] and not config['DEBUG']: abort(404) batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first() if not batch: raise errors.BatchNotFound() doc = BatchDocumentDbObject.query.filter_by(id=document_id).first() file_obj = doc.file if file_obj: file_path = FileStorage.get_path(file_obj, current_app.config) if os.path.exists(file_path) and file_path.endswith('.pdf'): file_path = file_path[:-4] + '.text-src' if os.path.exists(file_path): with codecs.open(file_path, 'r', 'utf-8') as ff: content = ff.read() return {'result': content} raise errors.BatchNotFound()
def go_back(batch_id=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch: raise errors.BatchNotFound() if batch.batch_type != DocumentBatchTypeEnum.DBT_NEW_LLC: BatchManager.handle_event(batch_id, 'go_back', {'batch_id': batch_id}, logger=current_app.logger, config=current_app.config) return {"result": True} raise NotImplementedError()
def get_bank_partner_request_status(bank_id=None, batch_id=None): try: bank_id = ObjectId(bank_id) except Exception: raise errors.InvalidParameterValue('bank_id') try: ObjectId(batch_id) except Exception: raise errors.InvalidParameterValue('batch_id') current_bank_request = BankPartnerRequestObject.query.filter_by( bank_partner_id=bank_id, batch_id=batch_id).first() if not current_bank_request: raise errors.BatchNotFound() if current_bank_request and current_bank_request.status == 'sending' and \ abs((datetime.utcnow() - current_bank_request.sent_date).total_seconds()) > 60: sqldb.session.delete(current_bank_request) sqldb.session.commit() raise errors.BatchNotFound() return {"result": current_bank_request['status']}
def update_batch(self, batch_id, new_batch, current_user_id, config, logger): current_batch_db_object = DocumentBatchDbObject.query.filter_by( id=batch_id, _owner_id=current_user_id, deleted=False).first() if not current_batch_db_object: raise errors.BatchNotFound() batch_status = current_batch_db_object.status if batch_status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED): logger.warn(u"Can't update batch %s in status %s" % (unicode(batch_id), unicode(batch_status))) raise errors.DocumentBatchUpdateError() try: current_batch = DocumentBatch.db_obj_to_field( current_batch_db_object) except Exception: logger.fatal(u"Failed to validate batch from DB!") raise current_fields = current_batch.data.value assert isinstance(current_fields, dict) # STEP 1: make new data and metadata # and collect changed fields names new_batch_db = new_batch.get_db_object() merged_fields, changed_field_names = self._merge_raw_fields( current_batch_db_object.data, new_batch_db.data) current_batch_db_object._metadata = new_batch_db._metadata current_batch_db_object.data = merged_fields sqldb.session.commit() # STEP 2: make document set from data and schema try: new_field_set, new_docs, _ = self.make_docs_for_new_data( current_batch.data.value, new_batch.data.value, current_batch_db_object, BatchManager.get_batch_document_fields( current_batch_db_object)) except errors.DocumentBatchUpdateError, ex: logger.exception(u"Failed to update batch with new values") current_batch_db_object.error_info = { "error": u"unknown error (%s)" % str(ex) } sqldb.session.commit() raise
def batch_update(batch_id=None, batch=None): with current_app.model_cache_context: current_batch_db_object = DocumentBatchDbObject.query.filter_by( id=batch_id, _owner=current_user, deleted=False).first() if not current_batch_db_object: raise errors.BatchNotFound() if current_batch_db_object.status == BatchStatusEnum.BS_BEING_FINALISED: current_app.logger.debug( u"Updating batch during finalization - cancelling finalization" ) try: BatchManager.cancel_batch_finalization(current_batch_db_object, current_app.config, current_app.logger) except Exception: current_app.logger.exception( u"Failed to cancel batch finalisation") DocumentBatchDbObject.query.filter_by( id=batch_id, status=BatchStatusEnum.BS_BEING_FINALISED).update( {'status': BatchStatusEnum.BS_EDITED}) sqldb.session.commit() raise errors.DocumentBatchUpdateError() manager = BatchManager.init(current_batch_db_object) batch_type = current_batch_db_object.batch_type batch['batch_type'] = batch_type if 'metadata' in batch: batch['_metadata'] = batch['metadata'] new_batch = DocumentBatch.parse_raw_value(batch, api_data=True) new_batch_api_data = manager.update_batch(batch_id, new_batch, current_user.id, current_app.config, current_app.logger) DocumentBatchDbObject.query.filter_by(id=batch_id).update( {'last_change_dt': datetime.utcnow()}) sqldb.session.commit() if batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC: schedule_notification_email(batch_id) return new_batch_api_data
def cancel_batch_finalization(batch_id=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch: raise errors.BatchNotFound() if batch.status != BatchStatusEnum.BS_BEING_FINALISED: return {"result": True} try: BatchManager.cancel_batch_finalization(batch, current_app.config, current_app.logger) except Exception: current_app.logger.exception(u"Failed to cancel batch finalization.") return {"result": False} return {"result": True}
def track_mail_status(batch_id=None, tracking=None): tracking = tracking.strip() if not tracking: raise errors.InvalidParameterValue('tracking') tracking_item = RussianPostTrackingItem.query.filter( RussianPostTrackingItem.batch_id == batch_id, RussianPostTrackingItem.tracking == tracking, RussianPostTrackingItem.owner_id != 111111 ).first() if tracking_item: return { 'result': True } RussianPostTrackingItem.query.filter( RussianPostTrackingItem.batch_id == batch_id, RussianPostTrackingItem.owner == current_user ).delete() sqldb.session.commit() batch = DocumentBatchDbObject.query.filter_by(id=batch_id, deleted=False).scalar() if not batch: raise errors.BatchNotFound() new_tracking = RussianPostTrackingItem( batch=batch, owner=current_user, tracking=tracking ) sqldb.session.add(new_tracking) sqldb.session.commit() from services.russian_post.async_tasks import get_tracking_info_async get_tracking_info_async.delay(batch_id=batch.id) return {'result': True}
def get_stamp_partners(batch_id=None): stamp_partner_list = [] try: region = BatchManager.get_batch_region(batch_id) except Exception: raise errors.BatchNotFound() stamps = StampPartnersObject.query.filter_by(enabled=True) if region: stamps = stamps.filter( or_(StampPartnersObject.region.contains([region]), StampPartnersObject.region == None)) for item in stamps.order_by(StampPartnersObject.sort_index.asc()): stamp_partner_list.append({ "id": item.id, "link": item.link, "banner": item.banner, "title": item.title }) return {"result": {"stamp_partners": stamp_partner_list}}
def render_document_preview(batch_id=None, document_id=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch: raise errors.BatchNotFound() doc = BatchDocumentDbObject.query.filter_by(batch=batch, id=document_id).scalar() if not doc: return errors.DocumentNotFound() async_result = rendering.render_document_preview.apply_async( (document_id, ), countdown=2) if not async_result.ready(): task_id = str(async_result.id) doc.status = UserDocumentStatus.DS_RENDERING doc._celery_task_id = task_id # override (capture) document by new task doc._celery_task_started = datetime.utcnow() current_app.logger.debug(u"Render preview task id: %s" % task_id) sqldb.session.commit() return {"result": True}
def get_batch_reg_fee_invoice_data(batch_id=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar() if not batch: raise errors.BatchNotFound() if batch.status != BatchStatusEnum.BS_FINALISED: return {"result": None, "message": "document batch is not finalized"} for doc in batch._documents: if doc.document_type in ( DocumentTypeEnum.DT_REGISTRATION_FEE_INVOICE, DocumentTypeEnum.DT_IP_STATE_DUTY ) and doc.status == UserDocumentStatus.DS_RENDERED: file_obj = doc.file if not file_obj: return { "result": None, "message": "failed to find file object for document with id %s" % doc.id } path = FileStorage.get_path(file_obj, current_app.config) if not path or not os.path.exists(path): return {"result": None, "message": "file %s not found" % path} res = get_reg_fee_data(path) if not res: return { "result": None, "message": "Failed to get reg fee data" } return {"result": res} return {"result": None, "message": "rendered document not found"}
def request_bank_partner(bank_id=None, batch_id=None, bank_contact_phone_general_manager=False, bank_contact_phone="", send_private_data=None): if not bank_contact_phone_general_manager and not bank_contact_phone: raise errors.MissingRequiredParameter('bank_contact_phone') batch = DocumentBatchDbObject.query.filter_by( id=batch_id, _owner=current_user, deleted=False, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC).scalar() if not batch or not batch.data: raise errors.BatchNotFound() current_batch = DocumentBatch.db_obj_to_field(batch) partner = BankPartnersObject.query.filter_by(id=bank_id).first() if not partner: raise errors.InvalidParameterValue('partner_id') svc_data = BankPartnersServiceObject.query.filter_by( bank_partner_id=partner.id).first() if not svc_data: raise errors.ServerError() current_bank_request = BankPartnerRequestObject.query.filter_by( bank_partner_id=partner.id, batch_id=batch_id).first() if current_bank_request and current_bank_request.status in ('sending', 'success'): struct = current_batch.get_api_structure() return {'result': struct} if current_bank_request and abs( (datetime.utcnow() - current_bank_request.sent_date).total_seconds()) > 60: BankPartnerRequestObject.query.filter_by( id=current_bank_request.id).delete() sqldb.session.commit() current_bank_request = None svc_type = svc_data.type fields = svc_data.fields extra_context = { 'bank_contact_phone_general_manager': bank_contact_phone_general_manager, 'bank_contact_phone': bank_contact_phone, 'send_private_data': send_private_data, 'bank_title': partner.title } field_list = BatchManager.make_fields_from_data( batch_id, fields, current_app.config, extra_context=extra_context) context = {} errors_list = [] for name in field_list: field = field_list[name] try: if not field.initialized: if field.required: raise MissingRequiredFieldException(name) else: field.validate() except (InvalidFieldValueException, MissingRequiredFieldException), ex: if hasattr(field, "suppress_validation_errors"): suppress_validation_errors = field.suppress_validation_errors if isinstance(suppress_validation_errors, dict): suppress_validation_condition = Condition( suppress_validation_errors) context = copy.copy(batch.data) context.update(extra_context) suppress_validation_errors = suppress_validation_condition.check( context) if suppress_validation_errors: continue if getattr(ex, 'ext_data', None): err_list = error_tree_to_list(ex.ext_data) error_field_paths = [{ 'field': name + '.' + i['field'], 'error_code': i['error_code'] } for i in err_list] errors_list.extend(error_field_paths) else: errors_list.append({ 'field': name, 'error_code': ex.ERROR_CODE }) current_app.logger.exception(u"Field %s validation error" % name) continue if field_list[name].initialized: context[name] = field_list[name]
def notarius_reserve(notarius_id=None, **kwargs): dt = kwargs['datetime'] batch_id = kwargs.get('batch_id', None) batch = None if batch_id: batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar() if not batch: raise errors.BatchNotFound() notarius_db = NotariusObject.query.filter_by(id=notarius_id).scalar() if not notarius_db: raise errors.NotariusNotFound() target_day = datetime(dt.year, dt.month, dt.day) if NotariusObject.is_weekend(notarius_db, target_day): current_app.logger.info(u"NotariusObject.is_weekend -> skip") return {'result': None} time_slots = NotariusObject.make_slots( notarius_db, target_day, ) if not time_slots: return {'result': None} found = False for slot in time_slots: if slot['slot_start'] == dt.strftime("%H:%M") and slot['slot_end'] == ( dt + timedelta(seconds=1800)).strftime("%H:%M"): found = True break if not found: current_app.logger.info(u"timeslot not found -> skip") return {'result': None} address = DocAddressField() address.parse_raw_value(notarius_db.address, api_data=False) booking = NotariusBookingObject(notarius=notarius_db, dt=dt, address=address.as_string(), owner=current_user, _discarded=False) if batch_id: NotariusBookingObject.query.filter_by(batch_id=batch_id, owner=current_user, _discarded=False).delete() sqldb.session.commit() booking.batch_id = batch_id sqldb.session.add(booking) batch_data = batch.data or {} batch_data['lawyer_check'] = True DocumentBatchDbObject.query.filter_by(id=batch_id).update( {'data': batch_data}) sqldb.session.commit() result = booking.get_api_structure() rec_list = current_app.config['YURIST_EMAIL_LIST'] if batch: attaches = BatchManager.get_shared_links_to_rendered_docs( batch, current_app.config, current_app.logger) llc_full_name = batch.data.get('full_name', "") social_link = SocialServiceBackends.get_user_social_network_profile_url( current_user.id) from fw.async_tasks import send_email for recipient in rec_list: send_email.send_email.delay( recipient, 'notarius_batch_check', email=current_user.email, mobile=current_user.mobile, social_link=social_link, full_name=llc_full_name, notarius=unicode(notarius_db.title) or address.as_string(), booking_time=pytils.dt.ru_strftime(u"%d %B %Y в %H:%M", inflected=True, date=dt), attaches=attaches) return {'result': result}
def yurist_set(batch_id=None, check=None, file_list=None, typos_correction=False): typos_correction = bool(typos_correction) batch_db = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch_db: raise errors.BatchNotFound() if check: new = True # search for any active check cur_check_obj = YuristBatchCheckObject.query.filter( YuristBatchCheckObject.batch_id == batch_id, YuristBatchCheckObject.status.notin_(YuristBatchCheckStatus.FINAL_STATUSES) ).order_by(YuristBatchCheckObject.create_date.desc()).first() if cur_check_obj: new = False real_file_list = [] file_descr = [] if file_list: try: file_list_data = json.loads(file_list) for file_obj in file_list_data: file_id = file_obj['id'] file_obj = FileStorage.get_file(file_id) if file_obj: real_file_list.append(file_obj) else: current_app.logger.warn(u"Failed to find file with id %s" % file_id) except Exception: current_app.logger.exception(u"Failed to parse file list: %s" % file_list) # Insert new check if new: yurist_batch_check = YuristBatchCheckObject(**{ 'batch_id': batch_id, 'create_date': datetime.utcnow(), 'status': YuristBatchCheckStatus.YBS_WAIT, 'typos_correction': typos_correction }) sqldb.session.add(yurist_batch_check) for file_obj in real_file_list: attach = YuristCheckFilesObject() attach.files_id = file_obj.id yurist_batch_check.attached_files.append(attach) else: YuristCheckFilesObject.query.filter_by(check_id=cur_check_obj.id).delete() for file_obj in real_file_list: attach = YuristCheckFilesObject() attach.files_id = file_obj.id cur_check_obj.attached_files.append(attach) cur_check_obj.create_date = datetime.utcnow() cur_check_obj.typos_correction = typos_correction cur_check_obj.status = YuristBatchCheckStatus.YBS_WAIT sqldb.session.commit() yurist_manager.yurist_check(current_app.config, batch_db, real_file_list, current_app.logger) else: # search for active check cur_check_obj = YuristBatchCheckObject.query.filter( YuristBatchCheckObject.batch_id == batch_id, YuristBatchCheckObject.status.notin_(YuristBatchCheckStatus.FINAL_STATUSES) ).order_by(YuristBatchCheckObject.create_date.desc()).first() # If found any: set status to refused if cur_check_obj: cur_check_obj.status = YuristBatchCheckStatus.YBS_REFUSED sqldb.session.commit() return {'result': True} return {'result': True}
def get_batch(count=None, offset=None, batch_id=None, finalised=None): batch_api_structures = [] broken_batch_ids = [] if batch_id: batch_db_obj = DocumentBatchDbObject.query.filter_by( id=batch_id, _owner=current_user, _broken=False, deleted=False).first() if not batch_db_obj: raise errors.BatchNotFound() sqldb.session.expunge(batch_db_obj) make_transient(batch_db_obj) total = 1 result_count = 1 if 1: try: batch_api_structures.append( DocumentBatch.db_obj_to_field( batch_db_obj).get_api_structure()) except Exception: current_app.logger.exception(u"Set batch _broken") broken_batch_ids.append(batch_id) else: query = DocumentBatchDbObject.query.filter_by( _owner=current_user, _broken=False, deleted=False).options( joinedload(DocumentBatchDbObject._documents)) if finalised is not None: query = query.filter_by( status=BatchStatusEnum.BS_FINALISED ) if finalised else query.filter( DocumentBatchDbObject.status != BatchStatusEnum.BS_FINALISED) query = query.order_by(DocumentBatchDbObject.creation_date.desc()) total = query.count() if count is not None: query = query.limit(count) if offset is not None: query = query.offset(offset) result_count = query.count() has_broken = False with current_app.model_cache_context: for item in query: batch_id = item.id sqldb.session.expunge(item) make_transient(item) for doc in item._documents: current_app.model_cache_context.add( BatchDocumentDbObject.__tablename__, doc.id, doc) if 1: try: batch_api_structures.append( DocumentBatch.db_obj_to_field( item).get_api_structure(db_obj=item)) except Exception: current_app.logger.exception(u"Set batch _broken %s" % unicode(item.id)) broken_batch_ids.append(batch_id) has_broken = True continue if has_broken: for batch_id in broken_batch_ids: batch = DocumentBatchDbObject.query.filter_by( id=batch_id).first() if batch: batch._broken = True sqldb.session.commit() result = { 'result': { 'total': total, 'count': result_count, 'batches': batch_api_structures } } return result