def definalize_batch(self, config, logger, batch, force): if batch.status != BatchStatusEnum.BS_FINALISED: return False result_fields = batch.result_fields or {} ifns_reg_status = result_fields.get('ifns_reg_info', {}).get('status', '') if ifns_reg_status == 'registered': raise errors.DocumentBatchDefinalizationError() for doc in BatchDocumentDbObject.query.filter_by(batch=batch): if doc.file: file_obj = doc.file doc.file = None FileStorage.remove_file(file_obj.id, config) batch.status = BatchStatusEnum.BS_EDITED batch.ifns_reg_info = None batch.last_change_dt = datetime.utcnow() sqldb.session.commit() from services.ifns import ifns_manager from services.notarius import notarius_manager from services.yurist import yurist_manager yurist_manager.cancel_check(batch, config, logger) notarius_manager.discard_booking(batch, config, logger) ifns_manager.discard_booking(batch, logger) return True
def definalize_batch(self, config, logger, batch, force): if batch.status != BatchStatusEnum.BS_FINALISED: return False for doc in BatchDocumentDbObject.query.filter_by(batch=batch): if doc.file: file_obj = doc.file doc.file = None FileStorage.remove_file(file_obj.id, config) batch.status = BatchStatusEnum.BS_EDITED batch.ifns_reg_info = None, batch.last_change_dt = datetime.utcnow() sqldb.session.commit() return True
def yurist_check(config, batch_db, file_obj_list, logger): # get batch id and check if it is still in active state batch_check = YuristBatchCheckObject.query.filter( YuristBatchCheckObject.batch_id == batch_db.id, YuristBatchCheckObject.status.notin_(YuristBatchCheckStatus.FINAL_STATUSES) ).order_by(YuristBatchCheckObject.create_date.desc()).first() # this check should be performed later if not batch_check: return False user = batch_db._owner if not user: raise Exception("Failed to find batch owner") from fw.documents.batch_manager import BatchManager attaches = BatchManager.get_shared_links_to_rendered_docs(batch_db, config, logger) schema = config['WEB_SCHEMA'] domain = config['DOMAIN'] for file_obj in file_obj_list: path = FileStorage.get_path(file_obj, config) if os.path.exists(path): if file_obj._owner: url = u"%s://%s%s" % (schema, domain, FileStorage.get_shared_link(file_obj.id, config)) else: url = u"%s://%s%s" % (schema, domain, FileStorage.get_url(file_obj, config)) attaches.append({ 'url': url, 'title': file_obj.file_name or url }) rec_list = config['YURIST_EMAIL_LIST'] from services.yurist.async_tasks import yurist_check_send batch_check_id = batch_check.id if batch_check else "not-found" # countdown 2 hours before execution yurist_check_send.check_and_send.apply_async( args=[], kwargs=dict( email=user.email, batch_check_id=batch_check_id, server_url_schema=config['WEB_SCHEMA'], api_url=config['api_url'], attaches=attaches, mail_type='yurist_batch_check', rec_list=rec_list ), countdown=config['SEND_DOCS_TO_YURIST_DELAY_SECONDS'] )
def get_document_preview_status(batch_id=None, document_id=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch: raise errors.BatchNotFound() doc = BatchDocumentDbObject.query.filter_by(batch=batch, id=document_id).first() if not doc: raise errors.DocumentNotFound() links = { 'pdf': FileStorage.get_url(doc.file, current_app.config), 'jpeg': [] } if doc.file else { 'pdf': None, 'jpeg': [] } return { 'result': { 'state': doc.status, 'links': links, 'document_id': unicode(document_id) } }
def make_all_user_fin_batch_paid_and_replace_watermarked_docs_with_normal(*args, **kwargs): db = celery.conf.get('db') config = celery.conf.get('config') user_id = kwargs['user_id'] with celery.conf['flask_app']().app_context(): logger = current_app.logger batches = DocumentBatchDbObject.query.filter_by(_owner_id=user_id, paid=False, status=BatchStatusEnum.BS_FINALISED) for batch in batches: batch.paid = True for doc in batch._documents: if doc.status == UserDocumentStatus.DS_RENDERED and doc.file: try: batch_id = batch.id file_obj = doc.file if not file_obj: logger.error(u"Can't replace watermarked file: " u"Failed to find file of batch %s" % unicode(batch_id)) continue file_path = FileStorage.get_path(file_obj, current_app.config) if not file_path or not os.path.exists(file_path) or not os.path.exists(file_path + '.src'): logger.error(u"Can't replace watermarked file: " u"Failed to find original or source file %s of batch %s" % ( unicode(file_path + '.src'), unicode(batch_id))) continue os.rename(file_path + '.src', file_path) except Exception: logger.exception(u"Can't replace watermarked file") sqldb.session.commit() return True
def forward(config, logger): logger.debug(u"Migrate yurist models") yurist_col = db['yurist_batch_check'] YuristCheckFilesObject.query.delete() YuristBatchCheckObject.query.delete() sqldb.session.commit() for old_yc in yurist_col.find(): batch_id = str(old_yc['batch_id']) batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first() if not batch: continue new_yc = YuristBatchCheckObject( id=str(old_yc['_id']), batch_id=batch_id, status=old_yc['status'], create_date=old_yc.get('create_date', datetime.utcnow()), typos_correction=old_yc.get('typos_correction', False)) sqldb.session.add(new_yc) for file_descr in (old_yc.get('attached_files') or []): file_obj = FileStorage.get_file(str(file_descr['id'])) if file_obj: attach = YuristCheckFilesObject() attach.files_id = file_obj.id new_yc.attached_files.append(attach) sqldb.session.commit()
def yurist_check(config, batch_db, file_obj_list, logger): # get batch id and check if it is still in active state batch_check = YuristBatchCheckObject.query.filter( YuristBatchCheckObject.batch_id == batch_db.id, YuristBatchCheckObject.status.notin_( YuristBatchCheckStatus.FINAL_STATUSES)).order_by( YuristBatchCheckObject.create_date.desc()).first() # this check should be performed later if not batch_check: return False user = batch_db._owner if not user: raise Exception("Failed to find batch owner") from fw.documents.batch_manager import BatchManager attaches = BatchManager.get_shared_links_to_rendered_docs( batch_db, config, logger) schema = config['WEB_SCHEMA'] domain = config['DOMAIN'] for file_obj in file_obj_list: path = FileStorage.get_path(file_obj, config) if os.path.exists(path): if file_obj._owner: url = u"%s://%s%s" % (schema, domain, FileStorage.get_shared_link( file_obj.id, config)) else: url = u"%s://%s%s" % (schema, domain, FileStorage.get_url(file_obj, config)) attaches.append({'url': url, 'title': file_obj.file_name or url}) rec_list = config['YURIST_EMAIL_LIST'] from services.yurist.async_tasks import yurist_check_send batch_check_id = batch_check.id if batch_check else "not-found" # countdown 2 hours before execution yurist_check_send.check_and_send.apply_async( args=[], kwargs=dict(email=user.email, batch_check_id=batch_check_id, server_url_schema=config['WEB_SCHEMA'], api_url=config['api_url'], attaches=attaches, mail_type='yurist_batch_check', rec_list=rec_list), countdown=config['SEND_DOCS_TO_YURIST_DELAY_SECONDS'])
def get_render_batch_documents_state(batch_id=None, document_types=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch: raise errors.BatchNotFound() batch_manager = BatchManager.init(batch) try: document_types = json.loads(document_types) if not isinstance(document_types, list) and not isinstance( document_types, tuple): raise Exception() except Exception: raise errors.InvalidParameterValue('document_type') doc_type_set = set() for doc_type in document_types: if not batch_manager.check_doc_type_support(batch.batch_type, doc_type): raise errors.InvalidParameterValue('document_type') doc_type_set.add(doc_type) result = [] for doc_type in doc_type_set: doc_obj = BatchDocumentDbObject.query.filter_by( batch_id=batch_id, document_type=doc_type).first() if not doc_obj: result.append({ 'state': UserDocumentStatus.DS_NEW, 'document_type': doc_type }) continue doc_info = {'state': doc_obj.status, 'document_type': doc_type} if doc_obj.status == UserDocumentStatus.DS_RENDERED: if doc_obj.file: doc_info['links'] = { 'pdf': FileStorage.get_url(doc_obj.file, current_app.config), 'jpeg': [] } result.append(doc_info) else: current_app.logger.debug( u"Not found rendered documents for rendered document %s. " u"Returning as rendering_failed" % doc_type) result.append({ 'state': UserDocumentStatus.DS_RENDERING_FAILED, 'document_type': doc_type }) else: result.append(doc_info) return result
def get_render_batch_documents_state(batch_id=None, document_types=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch: raise errors.BatchNotFound() batch_manager = BatchManager.init(batch) try: document_types = json.loads(document_types) if not isinstance(document_types, list) and not isinstance(document_types, tuple): raise Exception() except Exception: raise errors.InvalidParameterValue('document_type') doc_type_set = set() for doc_type in document_types: if not batch_manager.check_doc_type_support(batch.batch_type, doc_type): raise errors.InvalidParameterValue('document_type') doc_type_set.add(doc_type) result = [] for doc_type in doc_type_set: doc_obj = BatchDocumentDbObject.query.filter_by(batch_id=batch_id, document_type=doc_type).first() if not doc_obj: result.append({ 'state': UserDocumentStatus.DS_NEW, 'document_type': doc_type }) continue doc_info = { 'state': doc_obj.status, 'document_type': doc_type } if doc_obj.status == UserDocumentStatus.DS_RENDERED: if doc_obj.file: doc_info['links'] = { 'pdf': FileStorage.get_url(doc_obj.file, current_app.config), 'jpeg': [] } result.append(doc_info) else: current_app.logger.debug(u"Not found rendered documents for rendered document %s. " u"Returning as rendering_failed" % doc_type) result.append({ 'state': UserDocumentStatus.DS_RENDERING_FAILED, 'document_type': doc_type }) else: result.append(doc_info) return result
def upload_file(): file_obj = request.files['file'] if file_obj and file_obj.filename and len(os.path.splitext(file_obj.filename)) > 1: t_file_out = tempfile.NamedTemporaryFile(mode="w+", delete=True, suffix=os.path.splitext(file_obj.filename)[-1]) full_name = t_file_out.name t_file_out.close() file_obj.save(full_name) file_obj = FileStorage.add_file_from_disk(full_name, current_app.config, current_user.id, file_name=file_obj.filename) result = { "id": unicode(file_obj.id), "size": os.path.getsize(full_name), "file_name": file_obj.file_name, "url": FileStorage.get_url(file_obj, current_app.config) } os.unlink(full_name) resp = Response(json.dumps({"result": result}), content_type="application/json") resp.headers.add('Access-Control-Allow-Credentials', "true") resp.headers.add('Access-Control-Allow-Origin', "http://%s" % current_app.config['site_domain']) return resp abort(400)
def get_batch_doc(batch_id=None, document_id=None): config = current_app.config if not config['STAGING'] and not config['DEBUG']: abort(404) batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first() if not batch: raise errors.BatchNotFound() doc = BatchDocumentDbObject.query.filter_by(id=document_id).first() file_obj = doc.file if file_obj: file_path = FileStorage.get_path(file_obj, current_app.config) if os.path.exists(file_path) and file_path.endswith('.pdf'): file_path = file_path[:-4] + '.text-src' if os.path.exists(file_path): with codecs.open(file_path, 'r', 'utf-8') as ff: content = ff.read() return {'result': content} raise errors.BatchNotFound()
def get_batch_reg_fee_invoice_data(batch_id=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar() if not batch: raise errors.BatchNotFound() if batch.status != BatchStatusEnum.BS_FINALISED: return {"result": None, "message": "document batch is not finalized"} for doc in batch._documents: if doc.document_type in (DocumentTypeEnum.DT_REGISTRATION_FEE_INVOICE, DocumentTypeEnum.DT_IP_STATE_DUTY) and doc.status==UserDocumentStatus.DS_RENDERED: file_obj = doc.file if not file_obj: return {"result": None, "message": "failed to find file object for document with id %s" % doc.id} path = FileStorage.get_path(file_obj, current_app.config) if not path or not os.path.exists(path): return {"result": None, "message": "file %s not found" % path} res = get_reg_fee_data(path) if not res: return {"result": None, "message": "Failed to get reg fee data"} return {"result": res} return {"result": None, "message": "rendered document not found"}
def get_batch_reg_fee_invoice_data(batch_id=None): batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar() if not batch: raise errors.BatchNotFound() if batch.status != BatchStatusEnum.BS_FINALISED: return {"result": None, "message": "document batch is not finalized"} for doc in batch._documents: if doc.document_type in ( DocumentTypeEnum.DT_REGISTRATION_FEE_INVOICE, DocumentTypeEnum.DT_IP_STATE_DUTY ) and doc.status == UserDocumentStatus.DS_RENDERED: file_obj = doc.file if not file_obj: return { "result": None, "message": "failed to find file object for document with id %s" % doc.id } path = FileStorage.get_path(file_obj, current_app.config) if not path or not os.path.exists(path): return {"result": None, "message": "file %s not found" % path} res = get_reg_fee_data(path) if not res: return { "result": None, "message": "Failed to get reg fee data" } return {"result": res} return {"result": None, "message": "rendered document not found"}
owner_id, add_watermark=None ) # TODO: watermark logic for not-paid batches, preview documents if result: logger.info(u" - BB - ") first_file_id = result[0] assert first_file_id file_obj = FileObject.query.filter_by(id=first_file_id).first() if file_obj: logger.info(u" - BB file_obj = %s - " % first_file_id) BatchDocumentDbObject.query.filter_by( id=doc_id, _celery_task_id=request_id).update({ 'status': UserDocumentStatus.DS_RENDERED, 'pages_count': FileStorage.get_pdf_file_page_count( first_file_id, config) }) DocumentFilesObject.query.filter_by(doc_id=doc_id).delete() for file_id in result: new_doc_file = DocumentFilesObject(doc_id=doc_id, files_id=file_id) sqldb.session.add(new_doc_file) else: logger.info(u" - CC - ") raise Exception("Failed to generate document %s" % doc_id) except Exception, ex: logger.info(u" - DD - ") logger.exception(u"Failed to render document %s" % doc_id) result = BatchDocumentDbObject.query.filter_by(
def get_private_file(file_path): if not current_user.is_authenticated: return not_authorized(current_app.config['site_domain']) current_app.logger.info(u'file: %s' % file_path) content_type = 'application/octet-stream' if ('download/' in file_path) else '' file_id = None try: for part in file_path.split('/'): try: ObjectId(part) file_id = part break except InvalidId: pass if not file_id: raise ValueError() except ValueError: current_app.logger.exception(u"Invalid file id") abort(400) return file_obj = FileObject.query.filter_by(id=file_id).first() if not file_obj: current_app.logger.exception(u"No such file in db with id %s" % unicode(file_id)) abort(404) return if file_obj._owner and current_user != file_obj._owner: current_app.logger.exception(u"File is not yours with id %s" % unicode(file_id)) abort(403) return if file_obj._original_file: file_id = file_obj._original_file file_obj = FileObject.query.filter_by(id=file_obj._original_file).first() if not file_obj: current_app.logger.exception(u"No such file in db with id %s" % file_id) abort(404) return # current_app.logger.info(u" file 4") file_full_path = FileStorage.get_path(file_obj, current_app.config) if not os.path.exists(file_full_path): current_app.logger.exception(u"File with id %s not found at %s" % (unicode(file_id), file_full_path)) abort(404) return # current_app.logger.info(u" file 5") resp = Response(u"", headers={'X-Accel-Redirect': file_full_path}, content_type=content_type) if 'download/' in file_path: try: from email.header import decode_header, Header parts = file_path.split('/') fname = u"" if len(parts) > 1: fname = parts[-1] if not fname and len(parts) > 2: fname = parts[-2] fname = filter(lambda x: x in u"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVQXYZ0123456789-=`!@#$%^&*()_+\\|[]{}абвгдеёжзийклмнопрстуфхцчшщьыъэюяАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЬЫЪЭЮЯ.,;':\"/? ", fname) current_app.logger.info(u" fname:%s" % fname) if fname: header_val = str(Header(fname, 'utf-8', maxlinelen=10000)).replace('?=\n =?utf-8?b?', '') current_app.logger.info(u" header_val:%s" % header_val) resp.headers.add("Content-Disposition", u"attachment; filename=%s" % header_val) except Exception, ex: current_app.logger.exception(u"Failed to add header")
def yad_payment_aviso(): dt_str = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") # u"2011-05-04T20:38:01.000+04:00" logger = current_app.logger request_datetime = request.form.get('requestDatetime', "") md5 = request.form.get('md5', "") shop_id = request.form.get('shopId', "") shop_article_id = request.form.get('shopArticleId', "") invoice_id = request.form.get('invoiceId', "") orderId = request.form.get('orderId', "") customer_number = request.form.get('customerNumber', "") order_created_datetime = request.form.get('orderCreatedDatetime', "") order_sum_amount = request.form.get('orderSumAmount', "") order_sum_currency_paycash = request.form.get('orderSumCurrencyPaycash', "") order_sum_bank_paycash = request.form.get('orderSumBankPaycash', "") shop_sum_amount = request.form.get('shopSumAmount', "") shop_sum_currency_paycash = request.form.get('shopSumCurrencyPaycash', "") shop_sum_bank_paycash = request.form.get('shopSumBankPaycash', "") payment_payer_code = request.form.get('paymentPayerCode', "") payment_type = request.form.get('paymentType', "") action = request.form.get('action', "") payment_datetime = request.form.get('paymentDatetime', "") cps_user_country_code = request.form.get('cps_user_country_code', "") invalid_request_error = u"""<?xml version="1.0" encoding="UTF-8"?> <paymentAvisoResponse performedDatetime="%s" code="200" invoiceId="%s" shopId="%s" message="msg"/>""" % ( dt_str, invoice_id, shop_id) authorization_error = u"""<?xml version="1.0" encoding="UTF-8"?> <paymentAvisoResponse performedDatetime="%s" code="1" invoiceId="%s" shopId="%s" message="Invalid request: md5 sum does not match provided value"/>""" % ( dt_str, invoice_id, shop_id) admins_emails = current_app.config['ADMIN_EMAIL_LIST'] if not md5 or not shop_id or not action or not order_sum_amount or not order_sum_currency_paycash \ or not order_sum_bank_paycash or not invoice_id or not customer_number or not orderId: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"missing one of required arguments", admins_emails) return _xml_resp(invalid_request_error.replace(u'msg', u"missing one of required arguments")) shop_password = current_app.config['YAD_ESHOP_PASSWORD'] yad_ip_list = current_app.config['YAD_IP_LIST'] # MD5 calc # action;orderSumAmount;orderSumCurrencyPaycash;orderSumBankPaycash;shopId;invoiceId;customerNumber;shopPassword our_md5_string = "%s;%s;%s;%s;%s;%s;%s;%s" % (action, order_sum_amount, order_sum_currency_paycash, order_sum_bank_paycash, shop_id, invoice_id, customer_number, shop_password) m = hashlib.md5() m.update(our_md5_string) ip = None if 'X-Forwarded-For' in request.headers: ip = request.headers['X-Forwarded-For'] if not ip and 'X-Real-Ip' in request.headers: ip = request.headers['X-Real-Ip'] if not ip: ip = request.remote_addr new_item = YadRequestsObject( ip=ip, created=datetime.utcnow(), request_datetime=parse_iso_dt(request_datetime), md5=md5, shop_id=int(shop_id), shop_article_id=int(shop_article_id) if shop_article_id else 0, invoice_id=int(invoice_id), order_number=orderId, customer_number=customer_number, order_created_datetime=parse_iso_dt(order_created_datetime), order_sum_amount=Decimal(order_sum_amount), order_sum_currency_paycash=order_sum_currency_paycash, order_sum_bank_paycash=order_sum_bank_paycash, shop_sum_amount=Decimal(shop_sum_amount), shop_sum_currency_paycash=shop_sum_currency_paycash, shop_sum_bank_paycash=shop_sum_bank_paycash, payment_payer_code=payment_payer_code, payment_type=payment_type, action=action, payment_datetime=parse_iso_dt(payment_datetime), cps_user_country_code=cps_user_country_code ) sqldb.session.add(new_item) sqldb.session.commit() if action != u'paymentAviso': current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"invalid action id: %s" % unicode(action), admins_emails) return _xml_resp(invalid_request_error.replace(u'msg', u"invalid action id: %s" % unicode(action))) if yad_ip_list: if ip not in yad_ip_list: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"sender ip (%s) not in whitelist" % ip, admins_emails) return _xml_resp(invalid_request_error.replace(u'msg', u"sender ip not in whitelist")) else: current_app.logger.warn(u"Can't check IP address: YAD_IP_LIST config option is empty") if m.hexdigest().upper() != md5: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"arguments md5 digests do not match", admins_emails) return _xml_resp(authorization_error) try: auth_user_id = customer_number batch_id = orderId if orderId not in ('subscription_3', 'subscription_1') else None subs_type = orderId if orderId in ('subscription_3', 'subscription_1') else None if not batch_id and not subs_type: raise Exception("Invalid order number:%s" % orderId) except Exception: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"Invalid user id or batch id", admins_emails) return _xml_resp(invalid_request_error.replace(u'msg', u"Invalid user id or batch id")) user = AuthUser.query.filter_by(uuid=auth_user_id).scalar() if not user: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"User with id %s not found" % unicode(auth_user_id), admins_emails) return _xml_resp(invalid_request_error.replace(u'msg', u"User not found")) success_result = u"""<?xml version="1.0" encoding="UTF-8"?> <paymentAvisoResponse performedDatetime ="%s" code="0" invoiceId="%s" shopId="%s"/>""" % ( dt_str, invoice_id, shop_id) pay_info = { 'dt': datetime.now(), 'shop_id': shop_id, 'invoice_id': invoice_id, 'order_sum_amount': order_sum_amount, 'order_sum_currency_paycash': order_sum_currency_paycash, 'order_sum_bank_paycash': order_sum_bank_paycash } if shop_article_id is not None: pay_info['shop_article_id'] = shop_article_id if order_created_datetime: pay_info['order_created_datetime'] = order_created_datetime if shop_sum_amount: pay_info['shop_sum_amount'] = shop_sum_amount if shop_sum_currency_paycash: pay_info['shop_sum_currency_paycash'] = shop_sum_currency_paycash if shop_sum_bank_paycash: pay_info['shop_sum_bank_paycash'] = shop_sum_bank_paycash if payment_payer_code: pay_info['payment_payer_code'] = payment_payer_code if payment_type: pay_info['payment_type'] = payment_type if payment_datetime: pay_info['payment_datetime'] = payment_datetime if cps_user_country_code: pay_info['cps_user_country_code'] = cps_user_country_code if request_datetime: pay_info['request_datetime'] = request_datetime if batch_id: batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar() if not batch: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"Batch with id %s not found" % batch_id, admins_emails) return _xml_resp(invalid_request_error.replace(u'msg', u"Batch not found")) modify_result = DocumentBatchDbObject.query.filter_by(id=batch_id).update({ "pay_info": pay_info, "paid": True }) if batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC: pay_info = PayInfoObject( user=batch._owner, batch=batch, pay_record_id=new_item.id, payment_provider=PaymentProvider.YAD, service_type=PurchaseServiceType.LLC_PURCHASE ) sqldb.session.add(pay_info) sqldb.session.commit() elif batch.batch_type == DocumentBatchTypeEnum.DBT_OSAGO: count = PayInfoObject.query.filter_by(batch=batch).count() osago_service_code = PurchaseServiceType.OSAGO_PART1 if count < 1 else PurchaseServiceType.OSAGO_PART2 pay_info = PayInfoObject( user=batch._owner, batch=batch, pay_record_id=new_item.id, payment_provider=PaymentProvider.YAD, service_type=osago_service_code ) sqldb.session.add(pay_info) batch.paid = True sqldb.session.commit() event = { PurchaseServiceType.OSAGO_PART1: 'rerender_pretension', PurchaseServiceType.OSAGO_PART2: 'rerender_claim' }.get(osago_service_code, None) if event: BatchManager.handle_event(batch_id, event, {'batch_id': batch_id}, current_app.logger, current_app.config) if modify_result is None: logger.error(u"Failed to set payment info to batch") _notify_admin(action, u"Failed to set payment info to batch", admins_emails) return _xml_resp(invalid_request_error.replace(u'msg', u"Failed to process")) try: for doc in BatchDocumentDbObject.query.filter_by(batch=batch, status=UserDocumentStatus.DS_RENDERED): if not doc.file: continue file_obj = doc.file if not file_obj: logger.error(u"Can't replace watermarked file: Failed to find file of batch %s" % batch_id) continue file_path = FileStorage.get_path(file_obj, current_app.config) if not file_path or not os.path.exists(file_path) or not os.path.exists(file_path + '.src'): logger.error( u"Can't replace watermarked file: Failed to find original or source file %s of batch %s" % ( unicode(file_path + '.src'), batch_id)) continue os.rename(file_path + '.src', file_path) except Exception: current_app.logger.exception(u"FAILED TO REPLACE WATERMARKED DOCS") if current_app.config.get('PROD', False): appcraft.send_stat(batch.batch_type + '_payment_received', batch._owner, batch.id, batch.batch_type, int(invoice_id)) mixpanel_metrics.send_stat(batch.batch_type + '_payment_received', batch._owner, batch.id, batch.batch_type) try: if batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC: BatchManager.send_batch_docs_to_user(batch_id, current_app.config) except Exception: logger.exception(u"Failed to send documents to user.") elif subs_type: user_subs = PaymentSubscriptionObject.query.filter( PaymentSubscriptionObject.user == user, PaymentSubscriptionObject.end_dt.__ge__(datetime.utcnow()) ) if not user_subs.count(): end_date = datetime.utcnow() if subs_type == 'subscription_3': end_date += relativedelta(months=3) elif subs_type == 'subscription_1': end_date += relativedelta(months=1) new_subs = PaymentSubscriptionObject( pay_info=pay_info, created=datetime.utcnow(), end_dt=end_date, user=user, type=subs_type ) sqldb.session.add(new_subs) sqldb.session.commit() from fw.async_tasks import not_paid_check_send not_paid_check_send.make_all_user_fin_batch_paid_and_replace_watermarked_docs_with_normal.delay( user_id=user.id) current_app.logger.info(u"yad - success") return _xml_resp(success_result)
try: try: from fw.documents.doc_builder import DocBuilder result = DocBuilder.process(doc_data, target_document.document_type.db_value(), config, owner_id, add_watermark=None) # TODO: watermark logic for not-paid batches, preview documents if result: logger.info(u" - BB - ") first_file_id = result[0] assert first_file_id file_obj = FileObject.query.filter_by(id=first_file_id).first() if file_obj: logger.info(u" - BB file_obj = %s - " % first_file_id) BatchDocumentDbObject.query.filter_by(id=doc_id, _celery_task_id=request_id).update({ 'status': UserDocumentStatus.DS_RENDERED, 'pages_count': FileStorage.get_pdf_file_page_count(first_file_id, config) }) DocumentFilesObject.query.filter_by(doc_id=doc_id).delete() for file_id in result: new_doc_file = DocumentFilesObject( doc_id=doc_id, files_id=file_id ) sqldb.session.add(new_doc_file) else: logger.info(u" - CC - ") raise Exception("Failed to generate document %s" % doc_id) except Exception, ex: logger.info(u" - DD - ") logger.exception(u"Failed to render document %s" % doc_id)
def act(action, batch_db, event_data, plugin_config, logger, config): assert batch_db descriptors = filter(lambda x: x['name'] == action, get_actions()) action_descriptor = descriptors[0] if descriptors else None if not action_descriptor: raise ValueError(u'Invalid action: %s for %s plugin' % (action, PLUGIN_NAME)) args = action_descriptor['args'] source_data = copy(event_data) data = transform_with_schema(source_data, {"fields": args}) batch_manager = BatchManager.init(batch_db) if action == 'render_group': doc_id_list = [] batch_id = data['batch_id'].db_value() doc_types = event_data[ 'doc_types'] if 'doc_types' in event_data else plugin_config.get( 'doc_types', []) assert doc_types try: all_ready = True for doc_type in doc_types: doc = BatchDocumentDbObject.query.filter_by( document_type=doc_type, batch_id=batch_id).first() if doc: doc.data = {} doc.status = UserDocumentStatus.DS_RENDERING doc.tried_to_render = True if doc.file: file_obj = doc.file doc.file = None FileStorage.remove_file(file_obj.id, current_app.config) sqldb.session.commit() else: if not batch_manager.is_document_required( batch_db, doc_type): logger.debug( u"Document %s is not required by its condition. Skipping" % doc_type) continue new_doc = BatchDocumentDbObject( _owner=current_user, document_type=doc_type, batch_id=batch_id, data={}, status=UserDocumentStatus.DS_RENDERING, caption=batch_manager.get_title(doc_type), tried_to_render=True) sqldb.session.add(new_doc) sqldb.session.commit() doc = new_doc async_result = rendering.render_document_plugin.apply_async( (batch_id, { 'doc_id': doc.id }), countdown=2) if not async_result.ready(): all_ready = False BatchDocumentDbObject.query.filter_by(id=doc.id).update({ '_celery_task_id': str(async_result.id), '_celery_task_started': datetime.utcnow() }) sqldb.session.commit() doc_id_list.append(doc.id) check_task_info = DocGroupRenderTaskCheck(batch_id=batch_id, doc_id_list=doc_id_list, event_data=event_data) sqldb.session.add(check_task_info) sqldb.session.commit() if all_ready: rendering.batch_group_gen_check_task.delay() except Exception: zabbix_sender.send("celery_failures", 1) logger.exception(u"Failed to start rendering document group") raise elif action == 'render_doc': pass elif action == 'render_doc_by_id': pass elif action == 'cancel_doc_render': pass elif action == 'cancel_doc_render_by_id': pass else: raise Exception(u"Invalid action %s for plugin %s" % (action, PLUGIN_NAME)) # mail_type = data['mail_type'].db_value() # target_type = data['target_type'].db_value() # target_emails = [] # if target_type == MailTargetEnum.MTA_BATCH_OWNER: # email = batch_db._owner.email # if email: # target_emails.append(email) # elif target_type == MailTargetEnum.MTA_SPECIFIED: # target_emails = data.get('target_email_list', []) # else: #MailTargetEnum.MTA_EVENT_DATA_FIELD # data_field = data.get('event_data_field', None) # if data_field: # email = event_data.get(data_field, None) # if email: # target_emails.append(email) # # if not target_emails: # core_tasks.send.delay(batch_db.id, '%s:send_fail' % PLUGIN_NAME, event_data) # return False # # composer = create_composer(mail_type, logger) # retry_count = data.get('retry_count') # silent = data.get('silent', False) # from fw.documents.fields.simple_doc_fields import DocField # try: # if isinstance(target_emails, DocField): # target_emails = target_emails.db_value() # if isinstance(retry_count, DocField): # retry_count = retry_count.db_value() # if isinstance(silent, DocField): # silent = silent.db_value() # composer.send_email(target_emails, batch_id, event_data, retry_count, silent=silent) # except Exception: # logger.exception(u"Failed to send email") # return False return True
def render_batch_document_raw(batch_db_object_id, doc_id, config): start_time = datetime.utcnow() request_id = current_task.request.id logger = celery.log.get_default_logger() logger.info(u"Starting rendering document %s of %s" % (doc_id, batch_db_object_id)) target_document = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar() if not target_document: logger.warn(u" - O_o - ") return False if celery.conf['CELERY_ALWAYS_EAGER']: request_id = "test" target_document._celery_task_id = request_id target_document._celery_task_started = start_time logger.info(u"doc type of %s is %s" % (doc_id, target_document.document_type)) if target_document and target_document.status == UserDocumentStatus.DS_RENDERING: assert(target_document._celery_task_id) assert(target_document._celery_task_started) if target_document._celery_task_id != request_id: logger.info(u" - B - ") if abs((datetime.utcnow() - target_document._celery_task_started).total_seconds()) < 60: logger.info(u" - C - ") logger.info( u"Task for rendering %s of %s is already being run. Exiting..." % (doc_id, batch_db_object_id)) return True result = BatchDocumentDbObject.query.filter_by(id=doc_id, status=UserDocumentStatus.DS_RENDERING).update( { '_celery_task_id': request_id, '_celery_task_started': start_time } ) sqldb.session.commit() if not result: logger.error(u"Failed to change status of document being rendered into RENDERING state. \n" u"Probably document is being generated now already 1") return True logger.info(u" - F - ") batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_db_object_id).scalar() if not batch_db_object: logger.error(u"Failed to find batch with id %s" % batch_db_object_id) return True from fw.documents.batch_manager import BatchManager owner_id = batch_db_object._owner_id logger.info(u" - I - ") if target_document.file: file_obj = target_document.file target_document.file = None FileStorage.remove_file(file_obj.id, config) BatchDocumentDbObject.query.filter_by(id=target_document.id).update({ 'file_id': None, 'status': UserDocumentStatus.DS_RENDERING, 'data': {}, '_celery_task_id': request_id, '_celery_task_started': start_time, 'tried_to_render': True }) sqldb.session.commit() target_document = BatchDocumentDbObject.query.filter_by(id=target_document.id).scalar() logger.info(u" - J updated %s - " % target_document.id) try: target_document = UserDocument.db_obj_to_field(target_document) _ = batch_db_object.data detached_batch = batch_db_object sqldb.session.expunge(detached_batch) make_transient(detached_batch) target_document_data = BatchManager.make_document(detached_batch, target_document.document_type.db_value()) batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_db_object_id).scalar() logger.info(u" - T - ") if not target_document_data: logger.info(u" - U - ") BatchDocumentDbObject.query.filter_by(id=doc_id, _celery_task_id=request_id).update({ 'status': UserDocumentStatus.DS_RENDERING_FAILED, '_celery_task_id': None, '_celery_task_started': None }) sqldb.session.commit() return True target_document.data.value = target_document_data target_document.data.initialized = True doc_data = target_document.get_db_object_data()["data"] result = BatchDocumentDbObject.query.filter_by(id=doc_id, _celery_task_id=request_id).update({'data': doc_data}) sqldb.session.commit() # batch_descriptor = DocRequisitiesStorage.get_batch_descriptor(batch_db_object.batch_type) # validator_condition_schema = batch_descriptor.get('validation_condition', None) # from fw.documents.schema.var_construction import VarConstructorFactory # validator_condition = VarConstructorFactory.make_constructor(validator_condition_schema) if validator_condition_schema else None # # validation_type = ValidationTypeEnum.VT_STRICT # # if validator_condition: # context = { # '<document>': target_document, # '<batch>': batch_db_object # } # validation_type = validator_condition.build(context) # # if validation_type != ValidationTypeEnum.VT_NO: # target_document.validate(strict=(validation_type == ValidationTypeEnum.VT_STRICT)) target_document.validate(strict=True) logger.info(u" - V - ") except Exception, ex: logger.info(u" - W - ") logger.exception(u"Failed to make document %s from batch %s" % (doc_id, batch_db_object_id)) ext_data = [] if getattr(ex, 'ext_data', None): ext_data.extend(ex.ext_data) if ext_data: error_info_ext = error_tree_to_list(ext_data) error_info_ext = [{ 'field': '.'.join(i['field'].split('.')[1:]) if '.' in i['field'] else i['field'], 'error_code': i['error_code'] } for i in error_info_ext] if not batch_db_object.error_info or 'error_ext' not in batch_db_object.error_info: batch_db_object.error_info = { 'error_ext': error_info_ext } else: error_info_fields_set = set([i['field'] for i in batch_db_object.error_info['error_ext']]) merged_error_info = copy(batch_db_object.error_info) for i in error_info_ext: if i['field'] not in error_info_fields_set: merged_error_info['error_ext'].append({ 'field': i['field'], 'error_code': i['error_code'] }) sqldb.session.commit() batch_db_object.error_info = merged_error_info logger.info(u" - pre YY -> XX: %s - " % str(doc_id)) result = BatchDocumentDbObject.query.filter_by(id=doc_id, _celery_task_id=request_id).update({ 'status': UserDocumentStatus.DS_RENDERING_FAILED, '_celery_task_id': None, '_celery_task_started': None }) sqldb.session.commit() if not result: logger.warn(u"Failed to mark rendering document as failed") logger.info(u" - X - ") logger.info(u" - XX - ") return True
def act(action, batch_db, event_data, plugin_config, logger, config): assert batch_db descriptors = filter(lambda x: x['name'] == action, get_actions()) action_descriptor = descriptors[0] if descriptors else None if not action_descriptor: raise ValueError(u'Invalid action: %s for %s plugin' % (action, PLUGIN_NAME)) args = action_descriptor['args'] source_data = copy(event_data) data = transform_with_schema(source_data, {"fields": args}) batch_manager = BatchManager.init(batch_db) if action == 'render_group': doc_id_list = [] batch_id = data['batch_id'].db_value() doc_types = event_data['doc_types'] if 'doc_types' in event_data else plugin_config.get('doc_types', []) assert doc_types try: all_ready = True for doc_type in doc_types: doc = BatchDocumentDbObject.query.filter_by(document_type=doc_type, batch_id=batch_id).first() if doc: doc.data = {} doc.status = UserDocumentStatus.DS_RENDERING doc.tried_to_render = True if doc.file: file_obj = doc.file doc.file = None FileStorage.remove_file(file_obj.id, current_app.config) sqldb.session.commit() else: if not batch_manager.is_document_required(batch_db, doc_type): logger.debug(u"Document %s is not required by its condition. Skipping" % doc_type) continue new_doc = BatchDocumentDbObject( _owner=current_user, document_type=doc_type, batch_id=batch_id, data={}, status=UserDocumentStatus.DS_RENDERING, caption=batch_manager.get_title(doc_type), tried_to_render=True ) sqldb.session.add(new_doc) sqldb.session.commit() doc = new_doc async_result = rendering.render_document_plugin.apply_async((batch_id, {'doc_id': doc.id}), countdown=2) if not async_result.ready(): all_ready = False BatchDocumentDbObject.query.filter_by(id=doc.id).update({ '_celery_task_id': str(async_result.id), '_celery_task_started': datetime.utcnow() }) sqldb.session.commit() doc_id_list.append(doc.id) check_task_info = DocGroupRenderTaskCheck( batch_id=batch_id, doc_id_list=doc_id_list, event_data=event_data ) sqldb.session.add(check_task_info) sqldb.session.commit() if all_ready: rendering.batch_group_gen_check_task.delay() except Exception: zabbix_sender.send("celery_failures", 1) logger.exception(u"Failed to start rendering document group") raise elif action == 'render_doc': pass elif action == 'render_doc_by_id': pass elif action == 'cancel_doc_render': pass elif action == 'cancel_doc_render_by_id': pass else: raise Exception(u"Invalid action %s for plugin %s" % (action, PLUGIN_NAME)) # mail_type = data['mail_type'].db_value() # target_type = data['target_type'].db_value() # target_emails = [] # if target_type == MailTargetEnum.MTA_BATCH_OWNER: # email = batch_db._owner.email # if email: # target_emails.append(email) # elif target_type == MailTargetEnum.MTA_SPECIFIED: # target_emails = data.get('target_email_list', []) # else: #MailTargetEnum.MTA_EVENT_DATA_FIELD # data_field = data.get('event_data_field', None) # if data_field: # email = event_data.get(data_field, None) # if email: # target_emails.append(email) # # if not target_emails: # core_tasks.send.delay(batch_db.id, '%s:send_fail' % PLUGIN_NAME, event_data) # return False # # composer = create_composer(mail_type, logger) # retry_count = data.get('retry_count') # silent = data.get('silent', False) # from fw.documents.fields.simple_doc_fields import DocField # try: # if isinstance(target_emails, DocField): # target_emails = target_emails.db_value() # if isinstance(retry_count, DocField): # retry_count = retry_count.db_value() # if isinstance(silent, DocField): # silent = silent.db_value() # composer.send_email(target_emails, batch_id, event_data, retry_count, silent=silent) # except Exception: # logger.exception(u"Failed to send email") # return False return True
def yad_payment_aviso(): dt_str = datetime.utcnow().strftime( "%Y-%m-%dT%H:%M:%SZ") # u"2011-05-04T20:38:01.000+04:00" logger = current_app.logger request_datetime = request.form.get('requestDatetime', "") md5 = request.form.get('md5', "") shop_id = request.form.get('shopId', "") shop_article_id = request.form.get('shopArticleId', "") invoice_id = request.form.get('invoiceId', "") orderId = request.form.get('orderId', "") customer_number = request.form.get('customerNumber', "") order_created_datetime = request.form.get('orderCreatedDatetime', "") order_sum_amount = request.form.get('orderSumAmount', "") order_sum_currency_paycash = request.form.get('orderSumCurrencyPaycash', "") order_sum_bank_paycash = request.form.get('orderSumBankPaycash', "") shop_sum_amount = request.form.get('shopSumAmount', "") shop_sum_currency_paycash = request.form.get('shopSumCurrencyPaycash', "") shop_sum_bank_paycash = request.form.get('shopSumBankPaycash', "") payment_payer_code = request.form.get('paymentPayerCode', "") payment_type = request.form.get('paymentType', "") action = request.form.get('action', "") payment_datetime = request.form.get('paymentDatetime', "") cps_user_country_code = request.form.get('cps_user_country_code', "") invalid_request_error = u"""<?xml version="1.0" encoding="UTF-8"?> <paymentAvisoResponse performedDatetime="%s" code="200" invoiceId="%s" shopId="%s" message="msg"/>""" % ( dt_str, invoice_id, shop_id) authorization_error = u"""<?xml version="1.0" encoding="UTF-8"?> <paymentAvisoResponse performedDatetime="%s" code="1" invoiceId="%s" shopId="%s" message="Invalid request: md5 sum does not match provided value"/>""" % ( dt_str, invoice_id, shop_id) admins_emails = current_app.config['ADMIN_EMAIL_LIST'] if not md5 or not shop_id or not action or not order_sum_amount or not order_sum_currency_paycash \ or not order_sum_bank_paycash or not invoice_id or not customer_number or not orderId: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"missing one of required arguments", admins_emails) return _xml_resp( invalid_request_error.replace( u'msg', u"missing one of required arguments")) shop_password = current_app.config['YAD_ESHOP_PASSWORD'] yad_ip_list = current_app.config['YAD_IP_LIST'] # MD5 calc # action;orderSumAmount;orderSumCurrencyPaycash;orderSumBankPaycash;shopId;invoiceId;customerNumber;shopPassword our_md5_string = "%s;%s;%s;%s;%s;%s;%s;%s" % ( action, order_sum_amount, order_sum_currency_paycash, order_sum_bank_paycash, shop_id, invoice_id, customer_number, shop_password) m = hashlib.md5() m.update(our_md5_string) ip = None if 'X-Forwarded-For' in request.headers: ip = request.headers['X-Forwarded-For'] if not ip and 'X-Real-Ip' in request.headers: ip = request.headers['X-Real-Ip'] if not ip: ip = request.remote_addr new_item = YadRequestsObject( ip=ip, created=datetime.utcnow(), request_datetime=parse_iso_dt(request_datetime), md5=md5, shop_id=int(shop_id), shop_article_id=int(shop_article_id) if shop_article_id else 0, invoice_id=int(invoice_id), order_number=orderId, customer_number=customer_number, order_created_datetime=parse_iso_dt(order_created_datetime), order_sum_amount=Decimal(order_sum_amount), order_sum_currency_paycash=order_sum_currency_paycash, order_sum_bank_paycash=order_sum_bank_paycash, shop_sum_amount=Decimal(shop_sum_amount), shop_sum_currency_paycash=shop_sum_currency_paycash, shop_sum_bank_paycash=shop_sum_bank_paycash, payment_payer_code=payment_payer_code, payment_type=payment_type, action=action, payment_datetime=parse_iso_dt(payment_datetime), cps_user_country_code=cps_user_country_code) sqldb.session.add(new_item) sqldb.session.commit() if action != u'paymentAviso': current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"invalid action id: %s" % unicode(action), admins_emails) return _xml_resp( invalid_request_error.replace( u'msg', u"invalid action id: %s" % unicode(action))) if yad_ip_list: if ip not in yad_ip_list: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"sender ip (%s) not in whitelist" % ip, admins_emails) return _xml_resp( invalid_request_error.replace(u'msg', u"sender ip not in whitelist")) else: current_app.logger.warn( u"Can't check IP address: YAD_IP_LIST config option is empty") if m.hexdigest().upper() != md5: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"arguments md5 digests do not match", admins_emails) return _xml_resp(authorization_error) try: auth_user_id = customer_number batch_id = orderId if orderId not in ('subscription_3', 'subscription_1') else None subs_type = orderId if orderId in ('subscription_3', 'subscription_1') else None if not batch_id and not subs_type: raise Exception("Invalid order number:%s" % orderId) except Exception: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"Invalid user id or batch id", admins_emails) return _xml_resp( invalid_request_error.replace(u'msg', u"Invalid user id or batch id")) user = AuthUser.query.filter_by(uuid=auth_user_id).scalar() if not user: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"User with id %s not found" % unicode(auth_user_id), admins_emails) return _xml_resp( invalid_request_error.replace(u'msg', u"User not found")) success_result = u"""<?xml version="1.0" encoding="UTF-8"?> <paymentAvisoResponse performedDatetime ="%s" code="0" invoiceId="%s" shopId="%s"/>""" % ( dt_str, invoice_id, shop_id) pay_info = { 'dt': datetime.now(), 'shop_id': shop_id, 'invoice_id': invoice_id, 'order_sum_amount': order_sum_amount, 'order_sum_currency_paycash': order_sum_currency_paycash, 'order_sum_bank_paycash': order_sum_bank_paycash } if shop_article_id is not None: pay_info['shop_article_id'] = shop_article_id if order_created_datetime: pay_info['order_created_datetime'] = order_created_datetime if shop_sum_amount: pay_info['shop_sum_amount'] = shop_sum_amount if shop_sum_currency_paycash: pay_info['shop_sum_currency_paycash'] = shop_sum_currency_paycash if shop_sum_bank_paycash: pay_info['shop_sum_bank_paycash'] = shop_sum_bank_paycash if payment_payer_code: pay_info['payment_payer_code'] = payment_payer_code if payment_type: pay_info['payment_type'] = payment_type if payment_datetime: pay_info['payment_datetime'] = payment_datetime if cps_user_country_code: pay_info['cps_user_country_code'] = cps_user_country_code if request_datetime: pay_info['request_datetime'] = request_datetime if batch_id: batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar() if not batch: current_app.logger.warn(u"Invalid request from yad: %s" % unicode(request.form)) _notify_admin(action, u"Batch with id %s not found" % batch_id, admins_emails) return _xml_resp( invalid_request_error.replace(u'msg', u"Batch not found")) modify_result = DocumentBatchDbObject.query.filter_by( id=batch_id).update({ "pay_info": pay_info, "paid": True }) if batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC: pay_info = PayInfoObject( user=batch._owner, batch=batch, pay_record_id=new_item.id, payment_provider=PaymentProvider.YAD, service_type=PurchaseServiceType.LLC_PURCHASE) sqldb.session.add(pay_info) sqldb.session.commit() elif batch.batch_type == DocumentBatchTypeEnum.DBT_OSAGO: count = PayInfoObject.query.filter_by(batch=batch).count() osago_service_code = PurchaseServiceType.OSAGO_PART1 if count < 1 else PurchaseServiceType.OSAGO_PART2 pay_info = PayInfoObject(user=batch._owner, batch=batch, pay_record_id=new_item.id, payment_provider=PaymentProvider.YAD, service_type=osago_service_code) sqldb.session.add(pay_info) batch.paid = True sqldb.session.commit() event = { PurchaseServiceType.OSAGO_PART1: 'rerender_pretension', PurchaseServiceType.OSAGO_PART2: 'rerender_claim' }.get(osago_service_code, None) if event: BatchManager.handle_event(batch_id, event, {'batch_id': batch_id}, current_app.logger, current_app.config) if modify_result is None: logger.error(u"Failed to set payment info to batch") _notify_admin(action, u"Failed to set payment info to batch", admins_emails) return _xml_resp( invalid_request_error.replace(u'msg', u"Failed to process")) try: for doc in BatchDocumentDbObject.query.filter_by( batch=batch, status=UserDocumentStatus.DS_RENDERED): if not doc.file: continue file_obj = doc.file if not file_obj: logger.error( u"Can't replace watermarked file: Failed to find file of batch %s" % batch_id) continue file_path = FileStorage.get_path(file_obj, current_app.config) if not file_path or not os.path.exists( file_path) or not os.path.exists(file_path + '.src'): logger.error( u"Can't replace watermarked file: Failed to find original or source file %s of batch %s" % (unicode(file_path + '.src'), batch_id)) continue os.rename(file_path + '.src', file_path) except Exception: current_app.logger.exception(u"FAILED TO REPLACE WATERMARKED DOCS") if current_app.config.get('PROD', False): appcraft.send_stat(batch.batch_type + '_payment_received', batch._owner, batch.id, batch.batch_type, int(invoice_id)) mixpanel_metrics.send_stat(batch.batch_type + '_payment_received', batch._owner, batch.id, batch.batch_type) try: if batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC: BatchManager.send_batch_docs_to_user(batch_id, current_app.config) except Exception: logger.exception(u"Failed to send documents to user.") elif subs_type: user_subs = PaymentSubscriptionObject.query.filter( PaymentSubscriptionObject.user == user, PaymentSubscriptionObject.end_dt.__ge__(datetime.utcnow())) if not user_subs.count(): end_date = datetime.utcnow() if subs_type == 'subscription_3': end_date += relativedelta(months=3) elif subs_type == 'subscription_1': end_date += relativedelta(months=1) new_subs = PaymentSubscriptionObject(pay_info=pay_info, created=datetime.utcnow(), end_dt=end_date, user=user, type=subs_type) sqldb.session.add(new_subs) sqldb.session.commit() from fw.async_tasks import not_paid_check_send not_paid_check_send.make_all_user_fin_batch_paid_and_replace_watermarked_docs_with_normal.delay( user_id=user.id) current_app.logger.info(u"yad - success") return _xml_resp(success_result)
def render_single_document(db_doc, doc_title, watermark, config, logger, task_id, render_doc_file): doc_id = db_doc.id doc_type = db_doc.document_type owner = db_doc._owner if not db_doc: logger.info(u"Exit 1") return False from fw.documents.doc_builder import DocBuilder if db_doc._celery_task_id is None: db_doc._celery_task_started = datetime.utcnow() db_doc._celery_task_id = task_id sqldb.session.commit() elif db_doc._celery_task_id != task_id: if db_doc._celery_task_started and abs((datetime.utcnow() - db_doc._celery_task_started).total_seconds()) > 60: db_doc._celery_task_started = datetime.utcnow() db_doc._celery_task_id = task_id sqldb.session.commit() else: logger.info(u"Exit 2") return False user_doc = UserDocument.db_obj_to_field(db_doc) try: doc_data = user_doc.get_db_object_data()["data"] db_doc.status = UserDocumentStatus.DS_RENDERING db_doc.tried_to_render = True sqldb.session.commit() result = DocBuilder.process(doc_data, doc_type, config, owner.id, add_watermark=watermark, render_doc_file=render_doc_file) if result: current_doc = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar() if not current_doc or current_doc._celery_task_id != task_id: logger.warn(u"Failed to set result: user document has been captured by another task") logger.info(u"Exit 3") return False for doc_file in db_doc.files: old_file_id = doc_file.id FileStorage.remove_file(old_file_id, config) db_doc.files.remove(doc_file) file_id = None for file_id in result: new_doc_file = DocumentFilesObject(doc_id=doc_id, files_id=file_id) sqldb.session.add(new_doc_file) if file_id: updated_count = BatchDocumentDbObject.query.filter_by(id=doc_id, _celery_task_id=task_id).update({ 'status': UserDocumentStatus.DS_RENDERED, 'caption': doc_title, 'pages_count': FileStorage.get_pdf_file_page_count(file_id, config) }) if not updated_count: logger.warn(u"Failed to set result: user document has been captured by another task 2") logger.info(u"Exit 4") sqldb.session.rollback() return False sqldb.session.commit() logger.info(u"Exit 5") return True else: raise Exception(u"Failed to generate document %s" % doc_type) except Exception, ex: logger.exception(u"Failed to render document %s" % doc_type) db_doc.status = UserDocumentStatus.DS_RENDERING_FAILED sqldb.session.commit() try: error_message = ex.message if isinstance(ex.message, unicode) else unicode(ex.message, errors='ignore') logger.exception(error_message) except Exception: logger.error(u"Failed to make error")
def render_batch_document_raw(batch_db_object_id, doc_id, config): start_time = datetime.utcnow() request_id = current_task.request.id logger = celery.log.get_default_logger() logger.info(u"Starting rendering document %s of %s" % (doc_id, batch_db_object_id)) target_document = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar() if not target_document: logger.warn(u" - O_o - ") return False if celery.conf['CELERY_ALWAYS_EAGER']: request_id = "test" target_document._celery_task_id = request_id target_document._celery_task_started = start_time logger.info(u"doc type of %s is %s" % (doc_id, target_document.document_type)) if target_document and target_document.status == UserDocumentStatus.DS_RENDERING: assert (target_document._celery_task_id) assert (target_document._celery_task_started) if target_document._celery_task_id != request_id: logger.info(u" - B - ") if abs( (datetime.utcnow() - target_document._celery_task_started).total_seconds()) < 60: logger.info(u" - C - ") logger.info( u"Task for rendering %s of %s is already being run. Exiting..." % (doc_id, batch_db_object_id)) return True result = BatchDocumentDbObject.query.filter_by( id=doc_id, status=UserDocumentStatus.DS_RENDERING).update({ '_celery_task_id': request_id, '_celery_task_started': start_time }) sqldb.session.commit() if not result: logger.error( u"Failed to change status of document being rendered into RENDERING state. \n" u"Probably document is being generated now already 1") return True logger.info(u" - F - ") batch_db_object = DocumentBatchDbObject.query.filter_by( id=batch_db_object_id).scalar() if not batch_db_object: logger.error(u"Failed to find batch with id %s" % batch_db_object_id) return True from fw.documents.batch_manager import BatchManager owner_id = batch_db_object._owner_id logger.info(u" - I - ") if target_document.file: file_obj = target_document.file target_document.file = None FileStorage.remove_file(file_obj.id, config) BatchDocumentDbObject.query.filter_by(id=target_document.id).update({ 'file_id': None, 'status': UserDocumentStatus.DS_RENDERING, 'data': {}, '_celery_task_id': request_id, '_celery_task_started': start_time, 'tried_to_render': True }) sqldb.session.commit() target_document = BatchDocumentDbObject.query.filter_by( id=target_document.id).scalar() logger.info(u" - J updated %s - " % target_document.id) try: target_document = UserDocument.db_obj_to_field(target_document) _ = batch_db_object.data detached_batch = batch_db_object sqldb.session.expunge(detached_batch) make_transient(detached_batch) target_document_data = BatchManager.make_document( detached_batch, target_document.document_type.db_value()) batch_db_object = DocumentBatchDbObject.query.filter_by( id=batch_db_object_id).scalar() logger.info(u" - T - ") if not target_document_data: logger.info(u" - U - ") BatchDocumentDbObject.query.filter_by( id=doc_id, _celery_task_id=request_id).update({ 'status': UserDocumentStatus.DS_RENDERING_FAILED, '_celery_task_id': None, '_celery_task_started': None }) sqldb.session.commit() return True target_document.data.value = target_document_data target_document.data.initialized = True doc_data = target_document.get_db_object_data()["data"] result = BatchDocumentDbObject.query.filter_by( id=doc_id, _celery_task_id=request_id).update({'data': doc_data}) sqldb.session.commit() # batch_descriptor = DocRequisitiesStorage.get_batch_descriptor(batch_db_object.batch_type) # validator_condition_schema = batch_descriptor.get('validation_condition', None) # from fw.documents.schema.var_construction import VarConstructorFactory # validator_condition = VarConstructorFactory.make_constructor(validator_condition_schema) if validator_condition_schema else None # # validation_type = ValidationTypeEnum.VT_STRICT # # if validator_condition: # context = { # '<document>': target_document, # '<batch>': batch_db_object # } # validation_type = validator_condition.build(context) # # if validation_type != ValidationTypeEnum.VT_NO: # target_document.validate(strict=(validation_type == ValidationTypeEnum.VT_STRICT)) target_document.validate(strict=True) logger.info(u" - V - ") except Exception, ex: logger.info(u" - W - ") logger.exception(u"Failed to make document %s from batch %s" % (doc_id, batch_db_object_id)) ext_data = [] if getattr(ex, 'ext_data', None): ext_data.extend(ex.ext_data) if ext_data: error_info_ext = error_tree_to_list(ext_data) error_info_ext = [{ 'field': '.'.join(i['field'].split('.')[1:]) if '.' in i['field'] else i['field'], 'error_code': i['error_code'] } for i in error_info_ext] if not batch_db_object.error_info or 'error_ext' not in batch_db_object.error_info: batch_db_object.error_info = {'error_ext': error_info_ext} else: error_info_fields_set = set([ i['field'] for i in batch_db_object.error_info['error_ext'] ]) merged_error_info = copy(batch_db_object.error_info) for i in error_info_ext: if i['field'] not in error_info_fields_set: merged_error_info['error_ext'].append({ 'field': i['field'], 'error_code': i['error_code'] }) sqldb.session.commit() batch_db_object.error_info = merged_error_info logger.info(u" - pre YY -> XX: %s - " % str(doc_id)) result = BatchDocumentDbObject.query.filter_by( id=doc_id, _celery_task_id=request_id).update({ 'status': UserDocumentStatus.DS_RENDERING_FAILED, '_celery_task_id': None, '_celery_task_started': None }) sqldb.session.commit() if not result: logger.warn(u"Failed to mark rendering document as failed") logger.info(u" - X - ") logger.info(u" - XX - ") return True
def render_single_document(db_doc, doc_title, watermark, config, logger, task_id, render_doc_file): doc_id = db_doc.id doc_type = db_doc.document_type owner = db_doc._owner if not db_doc: logger.info(u"Exit 1") return False from fw.documents.doc_builder import DocBuilder if db_doc._celery_task_id is None: db_doc._celery_task_started = datetime.utcnow() db_doc._celery_task_id = task_id sqldb.session.commit() elif db_doc._celery_task_id != task_id: if db_doc._celery_task_started and abs( (datetime.utcnow() - db_doc._celery_task_started).total_seconds()) > 60: db_doc._celery_task_started = datetime.utcnow() db_doc._celery_task_id = task_id sqldb.session.commit() else: logger.info(u"Exit 2") return False user_doc = UserDocument.db_obj_to_field(db_doc) try: doc_data = user_doc.get_db_object_data()["data"] db_doc.status = UserDocumentStatus.DS_RENDERING db_doc.tried_to_render = True sqldb.session.commit() result = DocBuilder.process(doc_data, doc_type, config, owner.id, add_watermark=watermark, render_doc_file=render_doc_file) if result: current_doc = BatchDocumentDbObject.query.filter_by( id=doc_id).scalar() if not current_doc or current_doc._celery_task_id != task_id: logger.warn( u"Failed to set result: user document has been captured by another task" ) logger.info(u"Exit 3") return False for doc_file in db_doc.files: old_file_id = doc_file.id FileStorage.remove_file(old_file_id, config) db_doc.files.remove(doc_file) file_id = None for file_id in result: new_doc_file = DocumentFilesObject(doc_id=doc_id, files_id=file_id) sqldb.session.add(new_doc_file) if file_id: updated_count = BatchDocumentDbObject.query.filter_by( id=doc_id, _celery_task_id=task_id).update({ 'status': UserDocumentStatus.DS_RENDERED, 'caption': doc_title, 'pages_count': FileStorage.get_pdf_file_page_count(file_id, config) }) if not updated_count: logger.warn( u"Failed to set result: user document has been captured by another task 2" ) logger.info(u"Exit 4") sqldb.session.rollback() return False sqldb.session.commit() logger.info(u"Exit 5") return True else: raise Exception(u"Failed to generate document %s" % doc_type) except Exception, ex: logger.exception(u"Failed to render document %s" % doc_type) db_doc.status = UserDocumentStatus.DS_RENDERING_FAILED sqldb.session.commit() try: error_message = ex.message if isinstance( ex.message, unicode) else unicode(ex.message, errors='ignore') logger.exception(error_message) except Exception: logger.error(u"Failed to make error")
def yurist_set(batch_id=None, check=None, file_list=None, typos_correction=False): typos_correction = bool(typos_correction) batch_db = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first() if not batch_db: raise errors.BatchNotFound() if check: new = True # search for any active check cur_check_obj = YuristBatchCheckObject.query.filter( YuristBatchCheckObject.batch_id == batch_id, YuristBatchCheckObject.status.notin_(YuristBatchCheckStatus.FINAL_STATUSES) ).order_by(YuristBatchCheckObject.create_date.desc()).first() if cur_check_obj: new = False real_file_list = [] file_descr = [] if file_list: try: file_list_data = json.loads(file_list) for file_obj in file_list_data: file_id = file_obj['id'] file_obj = FileStorage.get_file(file_id) if file_obj: real_file_list.append(file_obj) else: current_app.logger.warn(u"Failed to find file with id %s" % file_id) except Exception: current_app.logger.exception(u"Failed to parse file list: %s" % file_list) # Insert new check if new: yurist_batch_check = YuristBatchCheckObject(**{ 'batch_id': batch_id, 'create_date': datetime.utcnow(), 'status': YuristBatchCheckStatus.YBS_WAIT, 'typos_correction': typos_correction }) sqldb.session.add(yurist_batch_check) for file_obj in real_file_list: attach = YuristCheckFilesObject() attach.files_id = file_obj.id yurist_batch_check.attached_files.append(attach) else: YuristCheckFilesObject.query.filter_by(check_id=cur_check_obj.id).delete() for file_obj in real_file_list: attach = YuristCheckFilesObject() attach.files_id = file_obj.id cur_check_obj.attached_files.append(attach) cur_check_obj.create_date = datetime.utcnow() cur_check_obj.typos_correction = typos_correction cur_check_obj.status = YuristBatchCheckStatus.YBS_WAIT sqldb.session.commit() yurist_manager.yurist_check(current_app.config, batch_db, real_file_list, current_app.logger) else: # search for active check cur_check_obj = YuristBatchCheckObject.query.filter( YuristBatchCheckObject.batch_id == batch_id, YuristBatchCheckObject.status.notin_(YuristBatchCheckStatus.FINAL_STATUSES) ).order_by(YuristBatchCheckObject.create_date.desc()).first() # If found any: set status to refused if cur_check_obj: cur_check_obj.status = YuristBatchCheckStatus.YBS_REFUSED sqldb.session.commit() return {'result': True} return {'result': True}