def post(self): # 获取注册用户信息 agrs = user_region_parser.parse_args() u_name = agrs.get('username') u_password = agrs.get('password') u_password = generate_password(u_password) u_email = agrs.get('email') u_token = uuid.uuid4() print(u_name, u_password, u_token) # 插入 try: user = User(u_name=u_name, u_password=u_password, u_email=u_email, u_token=u_token) db.session.add(user) db.session.commit() except Exception as e: print(e) return {"returnCode": "406", "msg": "用户注册失败"} # 设置缓存 cache.set(u_token, user.id, timeout=60 * 60) async_send_mail_util(subject='用户账号激活', recipients=[u_email], emailTmp='userRegion', username=u_name, active_url="http://localhost:5000/activation/?u_token=" + str(u_token)) # 返回 return {"returnCode": "201", "msg": "用户注册成功", "returnValue": user}
def get_workzone_drawer() -> DrawArucoZone: config_path = "../robot_work_zone_estimation/aruco_config.json" drawer = cache.get("workzone_drawer") if drawer is None: drawer = DrawArucoZone(config_path) cache.set("workzone_drawer", drawer) return drawer
def _get_NRIS_token(): result = cache.get(NRIS_CACHE_PREFIX + 'token') if result is None: params = { 'disableDeveloperFilter': 'true', 'grant_type': 'client_credentials', 'scope': 'NRISWS.*' } url = current_app.config['NRIS_TOKEN_URL'] if url is None: raise TypeError('Could not load the NRIS URL.') else: resp = requests.get(url=url, params=params, auth=(current_app.config['NRIS_USER_NAME'], current_app.config['NRIS_PASS'])) try: resp.raise_for_status() except: raise result = resp.json().get('access_token') cache.set(NRIS_CACHE_PREFIX + 'token', result, timeout=TIMEOUT_12_HOURS) return result
def put(self): otp_guid = None issued_time_utc = None timeout = AuthorizationResource.ONE_TIME_PASSWORD_TIMEOUT_SECONDS data = AuthorizationResource.parser.parse_args() otl_guid = data.get('otl_guid') app_guid = cache.get(otl_guid) current_app.logger.info(f'this is app_guid: {app_guid}') if otl_guid and app_guid: cache.delete(otl_guid) current_app.logger.info(f"OTL_GUID_VALUE: {cache.get(otl_guid)}") otp_guid = uuid.uuid4() issued_time_utc = datetime.now(timezone.utc) cache.set(str(otp_guid), app_guid, timeout=timeout) else: abort(401) return jsonify({ "OTP": otp_guid, "issued_time_utc": issued_time_utc.strftime("%d %b %Y %H:%M:%S %z"), "timeout_seconds": AuthorizationResource.ONE_TIME_PASSWORD_TIMEOUT_SECONDS, "application_guid": app_guid })
def patch(self, document_guid=None): if document_guid is None: return self.create_error_payload( 400, 'Must specify document GUID in PATCH'), 400 file_path = cache.get(FILE_UPLOAD_PATH(document_guid)) if file_path is None or not os.path.lexists(file_path): return self.create_error_payload( 404, 'PATCH sent for a upload that does not exist'), 404 request_offset = int(request.headers.get('Upload-Offset', 0)) file_offset = cache.get(FILE_UPLOAD_OFFSET(document_guid)) if request_offset != file_offset: return self.create_error_payload( 409, "Offset in request does not match uploaded file's offest"), 409 chunk_size = request.headers.get('Content-Length') if chunk_size is None: return self.create_error_payload( 400, 'No Content-Length header in request'), 400 chunk_size = int(chunk_size) new_offset = file_offset + chunk_size file_size = cache.get(FILE_UPLOAD_SIZE(document_guid)) if new_offset > file_size: return self.create_error_payload( 413, 'The uploaded chunk would put the file above its declared file size.' ), 413 try: with open(file_path, "r+b") as f: f.seek(file_offset) f.write(request.data) except IOError as e: return self.create_error_payload(500, 'Unable to write to file'), 500 if new_offset == file_size: # File transfer complete. doc = DocumentManager.find_by_document_manager_guid(document_guid) doc.upload_completed_date = datetime.now() doc.save() cache.delete(FILE_UPLOAD_SIZE(document_guid)) cache.delete(FILE_UPLOAD_OFFSET(document_guid)) cache.delete(FILE_UPLOAD_PATH(document_guid)) else: # File upload still in progress cache.set(FILE_UPLOAD_OFFSET(document_guid), new_offset, TIMEOUT_24_HOURS) response = make_response("", 204) response.headers['Tus-Resumable'] = TUS_API_VERSION response.headers['Tus-Version'] = TUS_API_SUPPORTED_VERSIONS response.headers['Upload-Offset'] = new_offset response.headers[ 'Access-Control-Expose-Headers'] = "Tus-Resumable,Tus-Version,Upload-Offset" return response
def get(self, mine_no): result = cache.get(NRIS_COMPLIANCE_DATA(mine_no)) if result is None: mine = Mine.find_by_mine_no_or_guid(mine_no) if not mine: raise NotFound("No mine record in CORE.") try: raw_data = NRIS_API_service._get_NRIS_data_by_mine( request.headers.get('Authorization'), mine_no) except requests.exceptions.Timeout: current_app.logger.error( f'NRIS_API Connection Timeout <mine_no={mine_no}>') raise except requests.exceptions.HTTPError as e: current_app.logger.error( f'NRIS_API Connection HTTPError <mine_no={mine_no}>, {str(e)}' ) raise result = NRIS_API_service._process_NRIS_data(raw_data) if len(result['orders']) > 0: cache.set(NRIS_COMPLIANCE_DATA(mine_no), result, timeout=TIMEOUT_60_MINUTES) return result
def get_currency_rates(config): if cache.get("currency_rates"): return cache.get("currency_rates") else: rates = fetch_currency_rates(config) cache.set("currency_rates", rates) return rates
def get_face_detection_drawer() -> DrawFaceDetection: config = {"model_type": "cnn", "number_of_times_to_upsample": 0} drawer = cache.get("face_det_drawer") if drawer is None: drawer = DrawFaceDetection(FaceRecognitionLibWrapper(config)) cache.set("face_det_drawer", drawer) return drawer
def get_zone_estimator(): aruco_params = cache.get("aruco_params") if aruco_params is None: with open("robot_work_zone_estimation/aruco_config.json") as conf_file: aruco_params = json.load(conf_file) cache.set("aruco_params", aruco_params) marker_id = aruco_params["marker_idx"] wz_cx = aruco_params["wz_cx"] wz_cy = aruco_params["wz_cy"] wz_height = aruco_params["wz_height"] wz_width = aruco_params["wz_width"] marker_world_size = aruco_params["marker_world_size"] marker_size = aruco_params["marker_size"] camera_params = aruco_params["camera_params"] camera_params = CameraParams( np.array(camera_params['camera_mtx'], dtype=np.float), np.array(camera_params['distortion_vec'], dtype=np.float), np.array(camera_params['rotation_vec'], dtype=np.float), np.array(camera_params['translation_vec'], dtype=np.float)) zone = Workzone(wz_cx, wz_cy, wz_height, wz_width) estimator = cache.get("zone_estimator") if estimator is None: estimator = ArucoZoneEstimator(marker_world_size, ARUCO_MARKER_SIZE[marker_size], marker_id, camera_params, zone) cache.set("zone_estimator", estimator) return estimator
def _get_NRIS_token(): result = cache.get(NRIS_REMOTE_TOKEN) if result is None: _change_default_cipher() params = { 'disableDeveloperFilter': 'true', 'grant_type': 'client_credentials', 'scope': 'NRISWS.*' } url = current_app.config['NRIS_REMOTE_TOKEN_URL'] if url is None: raise TypeError('Could not load the NRIS URL.') else: resp = requests.get( url=url, params=params, auth=(current_app.config['NRIS_REMOTE_CLIENT_ID'], current_app.config['NRIS_REMOTE_CLIENT_SECRET'])) try: resp.raise_for_status() except: raise result = resp.json().get('access_token') cache.set(NRIS_REMOTE_TOKEN, result, timeout=TIMEOUT_60_MINUTES) return result
def get(self): if dict(request.args) == ALL_INSPECTORS_QUERY_PARAMS: result = cache.get(GET_ALL_INSPECTORS_KEY) if result: current_app.logger.debug( f'CACHE HIT - {GET_ALL_INSPECTORS_KEY}') return result else: current_app.logger.debug( f'CACHE MISS - {GET_ALL_INSPECTORS_KEY}') paginated_parties, pagination_details = self.apply_filter_and_search( request.args) if not paginated_parties: raise BadRequest('Unable to fetch parties') result = marshal( { 'records': paginated_parties.all(), 'current_page': pagination_details.page_number, 'total_pages': pagination_details.num_pages, 'items_per_page': pagination_details.page_size, 'total': pagination_details.total_results, }, PAGINATED_PARTY_LIST) if dict( request.args ) == ALL_INSPECTORS_QUERY_PARAMS and pagination_details.total_results > 0: current_app.logger.debug(f'SET CACHE - {GET_ALL_INSPECTORS_KEY}') cache.set(GET_ALL_INSPECTORS_KEY, result, timeout=TIMEOUT_12_HOURS) return result
def get_segmentation_drawer() -> DrawSegmentation: drawer = cache.get("segmentation_drawer") if drawer is None: drawer = DrawSegmentation(TorchvisionSegmentationWrapper()) cache.set("segmentation_drawer", DrawSegmentation(TorchvisionSegmentationWrapper())) return drawer
def post(self, document_type_code): document_type = NOWApplicationDocumentType.query.get( document_type_code) if not document_type: raise NotFound('Document type not found') if not document_type.document_template: raise BadRequest(f'Cannot generate a {document_type.description}') # TODO: Generate document using the provided data. data = self.parser.parse_args() template_data = data['template_data'] ##ENFORCE READ-ONLY CONTEXT DATA enforced_data = [ x for x in document_type.document_template._form_spec_with_context( data['now_application_guid']) if x.get('read-only', False) ] for enforced_item in enforced_data: if template_data.get( enforced_item['id']) != enforced_item['context-value']: current_app.logger.debug( f'OVERWRITING ENFORCED key={enforced_item["id"]}, value={template_data.get(enforced_item["id"])} -> {enforced_item["context-value"]}' ) template_data[enforced_item['id']] = enforced_item['context-value'] token = uuid.uuid4() cache.set( NOW_DOCUMENT_DOWNLOAD_TOKEN(token), { 'document_type_code': document_type_code, 'now_application_guid': data['now_application_guid'], 'template_data': template_data }, TIMEOUT_5_MINUTES) return {'token': token}
def get(self, application_guid, id): application = Application.find_by_application_guid(application_guid) if not application: raise NotFound('Application not found') document = next( (document for document in application.documents if document.id == id), None) if not document: raise NotFound('Document not found') #fallback if originating_system is not set in the database originating_system = application.originating_system if not originating_system: if "j200.gov.bc.ca" in document.documenturl: originating_system = "VFCBC" if "api.nrs.gov.bc.ca" in document.documenturl: originating_system = "NROS" token_guid = uuid.uuid4() cache.set( DOWNLOAD_TOKEN(token_guid), { 'originating_system': originating_system, 'documenturl': document.documenturl, 'filename': document.filename }, TIMEOUT_5_MINUTES) return {'token_guid': token_guid}
def download(file_url): _nros_token = cache.get(NROS_TOKEN) if _nros_token is None: _nros_client_id = current_app.config['NROS_CLIENT_ID'] _nros_client_secret = current_app.config['NROS_CLIENT_SECRET'] _nros_token_url = current_app.config['NROS_TOKEN_URL'] _nros_auth = HTTPBasicAuth(_nros_client_id, _nros_client_secret) _nros_resp = requests.get(_nros_token_url, auth=_nros_auth) _nros_resp_body = json.loads(_nros_resp.text) _nros_token = _nros_resp_body["access_token"] cache.set(NROS_TOKEN, _nros_token, timeout=TIMEOUT_60_MINUTES) file_info_req = requests.get( file_url, stream=True, headers={"Authorization": f"Bearer {_nros_token}"}) file_info_body = json.loads(file_info_req.text) file_download_req = requests.get( f'{file_url}/content', stream=True, headers={"Authorization": f"Bearer {_nros_token}"}) file_download_resp = Response( stream_with_context( file_download_req.iter_content(chunk_size=2048))) file_download_resp.headers['Content-Type'] = file_download_req.headers[ 'Content-Type'] file_download_resp.headers[ 'Content-Disposition'] = f'attachment; filename="{quote(file_info_body["filename"])}"' return file_download_resp
def post(self, document_guid): if not document_guid: raise BadRequest('Must specify document GUID') token = str(uuid.uuid4()) cache.set(DOWNLOAD_TOKEN(token), document_guid, TIMEOUT_5_MINUTES) return {'token': token}
def wrapper(*args, **kwargs): key = 'evil_' + str(current_user._id) rv = cache.get(key) if not rv: # 15秒 cache.set(key, object(), timeout=15) else: abort(403) return f(*args, **kwargs)
def post(self): # 获取表单数据 args = parser.parse_args() username = args.get('username') password = args.get('password') email = args.get('email') # re 邮箱验证 mailre = "[a-z0-9_]+@[a-z0-9]+\.[a-z]{2,4}" if len(re.findall(mailre, email, re.I)) != 0: print(username, password, email) u_token = uuid.uuid4() print(u_token) # password = genetrate_password(password) # print(password) # 插入 user = User(username=username, email=email, u_token=u_token) user.generate_password(password) try: db.session.add(user) db.session.commit() # 发送激活邮 # 设置缓存,用来保存用户信息{字典} cache.set(str(u_token), username, timeout=60 * 2) ''' subject, recipients, emailTmp, **kwargs ''' send_mail_util.delay( subject='账号激活', recipients=[email], emailTmp='activationUserEmail', username=username, url='http://127.0.0.1:5000/activation?u_token=' + str(u_token)) # async_send_mail_util(subject='账号激活', # recipients=[email], # emailTmp='activationUserEmail', # username=username, # url='http://127.0.0.1:5000/activation?u_token=' + str(u_token)) except Exception as e: # 回滚 # print(e) logs().error(e) db.session.rollback() return {"msg": "用户已存在", 'status': 422} else: return {"msg": "邮箱格式错误", 'status': 422} return {"msg": "用户注册成功", 'status': 201}
def get(self, mine_no_or_guid=None): if mine_no_or_guid: mine = Mine.find_by_mine_no_or_guid(mine_no_or_guid) if mine: return mine.json() return self.create_error_payload(404, 'Mine not found'), 404 else: # Handle MapView request _map = request.args.get('map', None, type=str) if _map and _map.lower() == 'true': # Below caches the mine map response object in redis with a timeout. # Generating and jsonifying the map data takes 4-7 seconds with 50,000 points, # so caching seems justified. # # TODO: Use some custom representation of this data vs JSON. The # json string is massive (with 50,000 points: 16mb uncompressed, 2.5mb compressed). # A quick test using delimented data brings this down to ~1mb compressed. map_result = cache.get(MINE_MAP_CACHE) last_modified = cache.get(MINE_MAP_CACHE + '_LAST_MODIFIED') if not map_result: records = MineMapViewLocation.query.filter(MineMapViewLocation.latitude != None) last_modified = datetime.now() # jsonify then store in cache map_result = json.dumps( { 'mines': list((map(lambda x: x.json_for_map(), records))) }, separators=(',', ':')) cache.set(MINE_MAP_CACHE, map_result, timeout=TIMEOUT_12_HOURS) cache.set( MINE_MAP_CACHE + '_LAST_MODIFIED', last_modified, timeout=TIMEOUT_12_HOURS) # It's more efficient to store the json to avoid re-initializing all of the objects # and jsonifying on every request, so a flask response is returned to prevent # flask_restplus from jsonifying the data again, which would mangle the json. response = make_response(map_result) response.headers['content-type'] = 'application/json' # While we're at it, let's set a last modified date and have flask return not modified # if it hasn't so the client doesn't download it again unless needed. response.last_modified = last_modified response.make_conditional(request) return response paginated_mine_query, pagination_details = self.apply_filter_and_search(request.args) mines = paginated_mine_query.all() return { 'mines': list(map(lambda x: x.json_for_list(), mines)), 'current_page': pagination_details.page_number, 'total_pages': pagination_details.num_pages, 'items_per_page': pagination_details.page_size, 'total': pagination_details.total_results, }
def get(self, inspection_id, attachment_id): documenturl = f'https://api.nrs.gov.bc.ca/nrisws-api/v1/attachments/{inspection_id}/attachment/{attachment_id}' filename = request.args['file_name'] token_guid = uuid.uuid4() cache.set(DOWNLOAD_TOKEN(token_guid), { 'documenturl': documenturl, 'filename': filename }, TIMEOUT_5_MINUTES) return {'token_guid': token_guid}
def get(self, application_guid, document_guid): payment_document = PaymentDocument.find_by_guid( application_guid, document_guid) if not payment_document: raise NotFound('Application payment document not found') token_guid = uuid.uuid4() cache.set(DOWNLOAD_TOKEN(token_guid), {'document_guid': document_guid}, TIMEOUT_5_MINUTES) return {'token_guid': token_guid}
def rebuild_and_return_map_cache(): records = MineMapViewLocation.query.filter(MineMapViewLocation.latitude != None).all() last_modified = datetime.utcnow() # jsonify then store in cache map_result = json.dumps({'mines': list((map(lambda x: x.json(), records)))}, separators=(',', ':')) cache.set(MINE_MAP_CACHE, map_result, timeout=TIMEOUT_12_HOURS) cache.set(MINE_MAP_CACHE + '_LAST_MODIFIED', last_modified, timeout=TIMEOUT_12_HOURS) return map_result
def post(self): data = AuthorizationResource.parser.parse_args() application_guid = data.get('application_guid') application = Application.find_by_guid(application_guid) otl_guid = uuid.uuid4() if application is None: raise NotFound( 'No application was found matching the provided reference number' ) html_content = f""" <table width="100%" style="font-size:12.0pt; color:#595959 " > <tr> <td> You have requested access to the Dormant Sites Reclamation Program site to view information about an application (see Reference Number above). <br/> <br/> Use the button below to access your application information and submit payment requests. <br/> This button can only be used once and access expires after four hours. If you need to access the application again, request another link on the website. </td> </tr> <tr> <td> <br/> <table style="margin-left: auto; margin-right: auto;"> <tr> <td style="border-radius: 2px;" bgcolor="#003366" > <a href="{ONE_TIME_LINK_FRONTEND_URL(otl_guid)}" target="_blank" style="padding: 8px 12px; border: 1px solid #003366;border-radius: 2px;font-size: 14px; color: #ffffff;text-decoration: none;font-weight:bold;display: inline-block;"> View Application </a> </td> </tr> </table> <br/> </td> </tr> </table>""" with EmailService() as es: es.send_email_to_applicant( application, f"Dormant Site Reclamation Program – Access Request", html_content) current_app.logger.debug(f"This is a OTL: {otl_guid}") cache.set( str(otl_guid), application_guid, timeout=AuthorizationResource.ONE_TIME_PASSWORD_TIMEOUT_SECONDS) return "OK", 200
def wrapper(*args, **kwargs): if len(args) > 1: cache_key = key % args[1:] else: cache_key = key rv = cache.get(cache_key) if rv is not None: return rv rv = f(*args, **kwargs) if rv: cache.set(cache_key, rv, timeout=timeout) return rv
def recent_terms(): """ Returns most recent submissions table; caches query results for half a minute. """ submissions_key = 'recent-submissions' if not cache.get(submissions_key): wiki_records = get_all_records() data = format_record(wiki_records['Items']) cache.set(submissions_key, data, timeout=30) else: data = cache.get(submissions_key) return jsonify(data), 200
def patch(self, document_guid): file_path = cache.get(FILE_UPLOAD_PATH(document_guid)) if file_path is None or not os.path.lexists(file_path): raise NotFound('PATCH sent for a upload that does not exist') request_offset = int(request.headers.get('Upload-Offset', 0)) file_offset = cache.get(FILE_UPLOAD_OFFSET(document_guid)) if request_offset != file_offset: raise Conflict( "Offset in request does not match uploaded file's offset") chunk_size = request.headers.get('Content-Length') if chunk_size is None: raise BadRequest('No Content-Length header in request') chunk_size = int(chunk_size) new_offset = file_offset + chunk_size file_size = cache.get(FILE_UPLOAD_SIZE(document_guid)) if new_offset > file_size: raise RequestEntityTooLarge( 'The uploaded chunk would put the file above its declared file size.' ) try: with open(file_path, "r+b") as f: f.seek(file_offset) f.write(request.data) except IOError as e: raise InternalServerError('Unable to write to file') if new_offset == file_size: # File transfer complete. doc = Document.find_by_document_guid(document_guid) doc.upload_completed_date = datetime.utcnow() doc.save() cache.delete(FILE_UPLOAD_SIZE(document_guid)) cache.delete(FILE_UPLOAD_OFFSET(document_guid)) cache.delete(FILE_UPLOAD_PATH(document_guid)) else: # File upload still in progress cache.set(FILE_UPLOAD_OFFSET(document_guid), new_offset, TIMEOUT_24_HOURS) response = make_response('', 204) response.headers['Tus-Resumable'] = TUS_API_VERSION response.headers['Tus-Version'] = TUS_API_SUPPORTED_VERSIONS response.headers['Upload-Offset'] = new_offset response.headers[ 'Access-Control-Expose-Headers'] = "Tus-Resumable,Tus-Version,Upload-Offset" return response
def get_wrapper(target: str) -> Tuple[BaseWrapper, str]: if target == "segmentation": wrapper = cache.get("segmentation_wrapper") if wrapper is None: wrapper = TorchvisionSegmentationWrapper() wrapper.load() cache.set("segmentation_wrapper", wrapper) target_name = "mask" elif target == "detection": wrapper = cache.get("detection_wrapper") if wrapper is None: wrapper = YOLOWrapper() wrapper.load() cache.set("detection_wrapper", wrapper) target_name = "boxes" elif target == "keypoints": wrapper = cache.get("keypoints_wrapper") if wrapper is None: wrapper = TorchvisionKeypointsWrapper() wrapper.load() cache.set("keypoints_wrapper", wrapper) target_name = "keypoints" else: wrapper = cache.get("helmet_det") if wrapper is None: wrapper = HelmetnetWrapper() wrapper.load() cache.set("helmet_det", wrapper) target_name = "boxes" return wrapper, target_name
def nris_etl_job(): """This nightly job initiates the ETL from NRIS into our app domain.""" job_running = cache.get(NRIS_JOB_PREFIX + NRIS_ETL_JOB) if job_running is None: try: cache.set(NRIS_JOB_PREFIX + NRIS_ETL_JOB, 'True', timeout=TIMEOUT_12_HOURS) _run_nris_etl() finally: cache.delete(NRIS_JOB_PREFIX + NRIS_ETL_JOB) else: print("Job is running")
def _run_ETL(): with sched.app.app_context(): job_running = cache.get(ETL) if not job_running: cache.set(ETL, 'True', timeout=TIMEOUT_24_HOURS) db.session.execute('select transfer_mine_information();') db.session.execute('commit;') db.session.execute('select transfer_mine_manager_information();') db.session.execute('commit;') db.session.execute( 'select transfer_permit_permitee_information();') db.session.execute('commit;') db.session.execute('select transfer_mine_status_information();') db.session.execute('commit;')
def get(self): content_json = cache.get(STATIC_CONTENT_KEY) if not content_json: current_app.logger.debug('CACHE MISS - core-static-content') content = generate_static_content_dict() assert content content_dict = marshal(content, STATIC_CONTENT_MODEL) content_json = json.dumps(content_dict, separators=(',', ':')) cache.set(STATIC_CONTENT_KEY, content_json, TIMEOUT_60_MINUTES) response = make_response(content_json) response.headers['content-type'] = 'application/json' return response
def post(self, document_guid): if not document_guid: raise BadRequest('Must specify document GUID') doc = Document.find_by_document_guid(document_guid) if not doc: raise NotFound('Could not find document') if not doc.upload_completed_date: raise BadRequest('File upload not complete') token = str(uuid.uuid4()) cache.set(DOWNLOAD_TOKEN(token), document_guid, TIMEOUT_5_MINUTES) return {'token': token}
def stats(cls): active_users = cache.get('active_users') if not active_users: active_users = cls.query.filter_by(active=True).count() cache.set('active_users', active_users) inactive_users = cache.get('inactive_users') if not inactive_users: inactive_users = cls.query.filter_by(active=False).count() cache.set('inactive_users', inactive_users) return { 'all': active_users + inactive_users, 'active': active_users, 'inactive': inactive_users }
def _run_ETL(): #try to desynchronize the two pods. sleep(0.01 * randint(1, 1000)) with sched.app.app_context(): job_running = cache.get(ETL) if not job_running: cache.set(ETL, 'True', timeout=TIMEOUT_24_HOURS) db.session.execute('select transfer_mine_information();') db.session.execute('commit;') db.session.execute('select transfer_mine_manager_information();') db.session.execute('commit;') db.session.execute( 'select transfer_permit_permitee_information();') db.session.execute('commit;') db.session.execute('select transfer_mine_status_information();') db.session.execute('commit;')