def get_uah_amount_from_value(request, value, logging_params): amount = float(value["amount"]) currency = value["currency"] if currency != "UAH": for row in request.currency_rates: if row["cc"] == currency: currency_rate = row["rate"] break else: raise raise_operation_error( request, "Couldn't find currency {} on bank.gov.ua".format(currency), status=422 ) amount *= currency_rate LOGGER.info( "Converting {} {} into {} UAH using rate {}".format( value["amount"], value["currency"], amount, currency_rate ), extra=context_unpack( request, {"MESSAGE_ID": "complaint_exchange_rate"}, logging_params ), ) return amount
def upload_file_to_docservice(request, document, in_file, filename, content_type): parsed_url = urlparse(request.registry.docservice_url) url = request.registry.docservice_upload_url or urlunsplit( (parsed_url.scheme, parsed_url.netloc, "/upload", "", "")) files = {"file": (filename, in_file, content_type)} doc_url = None index = 10 while index: try: r = SESSION.post( url, files=files, headers={ "X-Client-Request-ID": request.environ.get("REQUEST_ID", "") }, auth=(request.registry.docservice_username, request.registry.docservice_password), ) json_data = r.json() except Exception as e: LOGGER.warning( "Raised exception '{}' on uploading document to document service': {}." .format(type(e), e), extra=context_unpack( request, {"MESSAGE_ID": "document_service_exception"}, {"file_size": in_file.tell()}), ) else: if r.status_code == 200 and json_data.get("data", {}).get("url"): doc_url = json_data["data"]["url"] doc_hash = json_data["data"]["hash"] break else: LOGGER.warning( "Error {} on uploading document to document service '{}': {}" .format(r.status_code, url, r.text), extra=context_unpack( request, {"MESSAGE_ID": "document_service_error"}, { "ERROR_STATUS": r.status_code, "file_size": in_file.tell() }, ), ) in_file.seek(0) index -= 1 else: request.errors.add("body", "body", "Can't upload document to document service.") request.errors.status = 422 raise error_handler(request) document.hash = doc_hash key = urlparse(doc_url).path.split("/")[-1] return key
def error_handler(errors, request_params=True): params = { 'ERROR_STATUS': errors.status } if request_params: params['ROLE'] = str(errors.request.authenticated_role) if errors.request.params: params['PARAMS'] = str(dict(errors.request.params)) if errors.request.matchdict: for x, j in errors.request.matchdict.items(): params[x.upper()] = j errors.request.registry.notify(ErrorDesctiptorEvent(errors, params)) LOGGER.info('Error on processing request "{}"'.format(dumps(errors, indent=4)), extra=context_unpack(errors.request, {'MESSAGE_ID': 'error_handler'}, params)) return json_error(errors)
def error_handler(errors, request_params=True): params = {'ERROR_STATUS': errors.status} if request_params: params['ROLE'] = str(errors.request.authenticated_role) if errors.request.params: params['PARAMS'] = str(dict(errors.request.params)) if errors.request.matchdict: for x, j in errors.request.matchdict.items(): params[x.upper()] = j errors.request.registry.notify(ErrorDesctiptorEvent(errors, params)) LOGGER.info('Error on processing request "{}"'.format( dumps(errors, indent=4)), extra=context_unpack(errors.request, {'MESSAGE_ID': 'error_handler'}, params)) return json_error(errors)
def error_handler(request, request_params=True): errors = request.errors params = {"ERROR_STATUS": errors.status} if request_params: params["ROLE"] = str(request.authenticated_role) if request.params: params["PARAMS"] = str(dict(request.params)) if request.matchdict: for x, j in request.matchdict.items(): params[x.upper()] = j request.registry.notify(ErrorDescriptorEvent(request, params)) LOGGER.info( 'Error on processing request "{}"'.format(dumps(errors, indent=4)), extra=context_unpack(request, {"MESSAGE_ID": "error_handler"}, params), ) return json_error(request)
def upload_file(request, blacklisted_fields=DOCUMENT_BLACKLISTED_FIELDS, whitelisted_fields=DOCUMENT_WHITELISTED_FIELDS): first_document = request.validated['documents'][-1] if 'documents' in request.validated and request.validated['documents'] else None if 'data' in request.validated and request.validated['data']: document = request.validated['document'] check_document(request, document, 'body') if first_document: for attr_name in type(first_document)._fields: if attr_name in whitelisted_fields: setattr(document, attr_name, getattr(first_document, attr_name)) elif attr_name not in blacklisted_fields and attr_name not in request.validated['json_data']: setattr(document, attr_name, getattr(first_document, attr_name)) document_route = request.matched_route.name.replace("collection_", "") document = update_document_url(request, document, document_route, {}) return document if request.content_type == 'multipart/form-data': data = request.validated['file'] filename = get_filename(data) content_type = data.type in_file = data.file else: filename = first_document.title content_type = request.content_type in_file = request.body_file if hasattr(request.context, "documents"): # upload new document model = type(request.context).documents.model_class else: # update document model = type(request.context) document = model({'title': filename, 'format': content_type}) document.__parent__ = request.context if 'document_id' in request.validated: document.id = request.validated['document_id'] if first_document: for attr_name in type(first_document)._fields: if attr_name not in blacklisted_fields: setattr(document, attr_name, getattr(first_document, attr_name)) if request.registry.docservice_url: parsed_url = urlparse(request.registry.docservice_url) url = request.registry.docservice_upload_url or urlunsplit((parsed_url.scheme, parsed_url.netloc, '/upload', '', '')) files = {'file': (filename, in_file, content_type)} doc_url = None index = 10 while index: try: r = SESSION.post(url, files=files, headers={'X-Client-Request-ID': request.environ.get('REQUEST_ID', '')}, auth=(request.registry.docservice_username, request.registry.docservice_password) ) json_data = r.json() except Exception, e: LOGGER.warning("Raised exception '{}' on uploading document to document service': {}.".format(type(e), e), extra=context_unpack(request, {'MESSAGE_ID': 'document_service_exception'}, {'file_size': in_file.tell()})) else: if r.status_code == 200 and json_data.get('data', {}).get('url'): doc_url = json_data['data']['url'] doc_hash = json_data['data']['hash'] break else: LOGGER.warning("Error {} on uploading document to document service '{}': {}".format(r.status_code, url, r.text), extra=context_unpack(request, {'MESSAGE_ID': 'document_service_error'}, {'ERROR_STATUS': r.status_code, 'file_size': in_file.tell()})) in_file.seek(0) index -= 1 else: request.errors.add('body', 'data', "Can't upload document to document service.") request.errors.status = 422 raise error_handler(request.errors) document.hash = doc_hash key = urlparse(doc_url).path.split('/')[-1]
def upload_file(request, blacklisted_fields=DOCUMENT_BLACKLISTED_FIELDS, whitelisted_fields=DOCUMENT_WHITELISTED_FIELDS): first_document = request.validated['documents'][ -1] if 'documents' in request.validated and request.validated[ 'documents'] else None if 'data' in request.validated and request.validated['data']: document = request.validated['document'] check_document(request, document, 'body') if first_document: for attr_name in type(first_document)._fields: if attr_name in whitelisted_fields: setattr(document, attr_name, getattr(first_document, attr_name)) elif attr_name not in blacklisted_fields and attr_name not in request.validated[ 'json_data']: setattr(document, attr_name, getattr(first_document, attr_name)) document_route = request.matched_route.name.replace("collection_", "") document = update_document_url(request, document, document_route, {}) return document if request.content_type == 'multipart/form-data': data = request.validated['file'] filename = get_filename(data) content_type = data.type in_file = data.file else: filename = first_document.title content_type = request.content_type in_file = request.body_file if hasattr(request.context, "documents"): # upload new document model = type(request.context).documents.model_class else: # update document model = type(request.context) document = model({'title': filename, 'format': content_type}) document.__parent__ = request.context if 'document_id' in request.validated: document.id = request.validated['document_id'] if first_document: for attr_name in type(first_document)._fields: if attr_name not in blacklisted_fields: setattr(document, attr_name, getattr(first_document, attr_name)) if request.registry.docservice_url: parsed_url = urlparse(request.registry.docservice_url) url = request.registry.docservice_upload_url or urlunsplit( (parsed_url.scheme, parsed_url.netloc, '/upload', '', '')) files = {'file': (filename, in_file, content_type)} doc_url = None index = 10 while index: try: r = SESSION.post(url, files=files, headers={ 'X-Client-Request-ID': request.environ.get('REQUEST_ID', '') }, auth=(request.registry.docservice_username, request.registry.docservice_password)) json_data = r.json() except Exception, e: LOGGER.warning( "Raised exception '{}' on uploading document to document service': {}." .format(type(e), e), extra=context_unpack( request, {'MESSAGE_ID': 'document_service_exception'}, {'file_size': in_file.tell()})) else: if r.status_code == 200 and json_data.get('data', {}).get('url'): doc_url = json_data['data']['url'] doc_hash = json_data['data']['hash'] break else: LOGGER.warning( "Error {} on uploading document to document service '{}': {}" .format(r.status_code, url, r.text), extra=context_unpack( request, {'MESSAGE_ID': 'document_service_error'}, { 'ERROR_STATUS': r.status_code, 'file_size': in_file.tell() })) in_file.seek(0) index -= 1 else: request.errors.add('body', 'data', "Can't upload document to document service.") request.errors.status = 422 raise error_handler(request.errors) document.hash = doc_hash key = urlparse(doc_url).path.split('/')[-1]
def upload_file( request, blacklisted_fields=DOCUMENT_BLACKLISTED_FIELDS, whitelisted_fields=DOCUMENT_WHITELISTED_FIELDS ): first_document = ( request.validated["documents"][-1] if "documents" in request.validated and request.validated["documents"] else None ) if "data" in request.validated and request.validated["data"]: document = request.validated["document"] check_document(request, document, "body") if first_document: for attr_name in type(first_document)._fields: if attr_name in whitelisted_fields: setattr(document, attr_name, getattr(first_document, attr_name)) elif attr_name not in blacklisted_fields and attr_name not in request.validated["json_data"]: setattr(document, attr_name, getattr(first_document, attr_name)) document_route = request.matched_route.name.replace("collection_", "") document = update_document_url(request, document, document_route, {}) return document if request.content_type == "multipart/form-data": data = request.validated["file"] filename = get_filename(data) content_type = data.type in_file = data.file else: filename = first_document.title content_type = request.content_type in_file = request.body_file if hasattr(request.context, "documents"): # upload new document model = type(request.context).documents.model_class else: # update document model = type(request.context) document = model({"title": filename, "format": content_type}) document.__parent__ = request.context if "document_id" in request.validated: document.id = request.validated["document_id"] if first_document: for attr_name in type(first_document)._fields: if attr_name not in blacklisted_fields: setattr(document, attr_name, getattr(first_document, attr_name)) if request.registry.docservice_url: parsed_url = urlparse(request.registry.docservice_url) url = request.registry.docservice_upload_url or urlunsplit( (parsed_url.scheme, parsed_url.netloc, "/upload", "", "") ) files = {"file": (filename, in_file, content_type)} doc_url = None index = 10 while index: try: r = SESSION.post( url, files=files, headers={"X-Client-Request-ID": request.environ.get("REQUEST_ID", "")}, auth=(request.registry.docservice_username, request.registry.docservice_password), ) json_data = r.json() except Exception as e: LOGGER.warning( "Raised exception '{}' on uploading document to document service': {}.".format(type(e), e), extra=context_unpack( request, {"MESSAGE_ID": "document_service_exception"}, {"file_size": in_file.tell()} ), ) else: if r.status_code == 200 and json_data.get("data", {}).get("url"): doc_url = json_data["data"]["url"] doc_hash = json_data["data"]["hash"] break else: LOGGER.warning( "Error {} on uploading document to document service '{}': {}".format( r.status_code, url, r.text ), extra=context_unpack( request, {"MESSAGE_ID": "document_service_error"}, {"ERROR_STATUS": r.status_code, "file_size": in_file.tell()}, ), ) in_file.seek(0) index -= 1 else: request.errors.add("body", "data", "Can't upload document to document service.") request.errors.status = 422 raise error_handler(request.errors) document.hash = doc_hash key = urlparse(doc_url).path.split("/")[-1] else: key = generate_id() filename = "{}_{}".format(document.id, key) request.validated["db_doc"]["_attachments"][filename] = { "content_type": document.format, "data": b64encode(in_file.read()), } document_route = request.matched_route.name.replace("collection_", "") document_path = request.current_route_path( _route_name=document_route, document_id=document.id, _query={"download": key} ) document.url = "/" + "/".join(document_path.split("/")[3:]) update_logging_context(request, {"file_size": in_file.tell()}) return document