def get(self): token_guid = request.args.get('token', '') attachment = request.args.get('as_attachment', None) token_data = cache.get(DOWNLOAD_TOKEN(token_guid)) cache.delete(DOWNLOAD_TOKEN(token_guid)) current_app.logger.debug('redis_data' + str(token_data)) if not token_data: raise BadRequest('Valid token required for download') document_guid = token_data['document_guid'] payment_doc = PaymentDocument.query.filter_by( document_guid=document_guid).one_or_none() if not payment_doc: raise NotFound( 'No document was found with the corresponding download token') if attachment is not None: attach_style = True if attachment == 'true' else False else: attach_style = '.pdf' not in payment_doc.document_name.lower() file_resp = ObjectStoreStorageService().download_file( path=payment_doc.object_store_path, display_name=quote(payment_doc.document_name), as_attachment=attach_style) return file_resp
def put(self): otp_guid = None issued_time_utc = None timeout = AuthorizationResource.ONE_TIME_PASSWORD_TIMEOUT_SECONDS data = AuthorizationResource.parser.parse_args() otl_guid = data.get('otl_guid') app_guid = cache.get(otl_guid) current_app.logger.info(f'this is app_guid: {app_guid}') if otl_guid and app_guid: cache.delete(otl_guid) current_app.logger.info(f"OTL_GUID_VALUE: {cache.get(otl_guid)}") otp_guid = uuid.uuid4() issued_time_utc = datetime.now(timezone.utc) cache.set(str(otp_guid), app_guid, timeout=timeout) else: abort(401) return jsonify({ "OTP": otp_guid, "issued_time_utc": issued_time_utc.strftime("%d %b %Y %H:%M:%S %z"), "timeout_seconds": AuthorizationResource.ONE_TIME_PASSWORD_TIMEOUT_SECONDS, "application_guid": app_guid })
def post(self): data = self.parser.parse_args() lat = data.get('latitude') lon = data.get('longitude') if (lat and not lon) or (not lat and lon): raise BadRequest( 'latitude and longitude must both be empty, or both provided') # query the mine tables and check if that mine name exists _throw_error_if_mine_exists(data.get('mine_name')) mine = Mine(mine_no=generate_mine_no(), mine_name=data.get('mine_name'), mine_note=data.get('mine_note'), major_mine_ind=data.get('major_mine_ind'), mine_region=data.get('mine_region'), ohsc_ind=data.get('ohsc_ind'), union_ind=data.get('union_ind')) db.session.add(mine) if lat and lon: mine.mine_location = MineLocation(latitude=lat, longitude=lon) cache.delete(MINE_MAP_CACHE) mine_status = _mine_status_processor(data.get('mine_status'), data.get('status_date'), mine) db.session.commit() return mine
def post(self): data = self.parser.parse_args() lat = data.get('latitude') lon = data.get('longitude') if (lat and not lon) or (not lat and lon): raise BadRequest( 'latitude and longitude must both be empty, or both provided') # query the mine tables and check if that mine name exists _throw_error_if_mine_exists(data.get('mine_name')) mine = Mine(mine_no=generate_mine_no(), mine_name=data.get('mine_name'), mine_note=data.get('mine_note'), major_mine_ind=data.get('major_mine_ind'), mine_region=data.get('mine_region'), ohsc_ind=data.get('ohsc_ind'), union_ind=data.get('union_ind'), latitude=lat, longitude=lon) mine_status = _mine_status_processor(data.get('mine_status'), data.get('status_date'), mine) mine.save() # Clear and rebuild the cache after committing changes to db if lat and lon: cache.delete(MINE_MAP_CACHE) MineMapResource.rebuild_map_cache_async() # generate & set hybrid_properties to include in response payload mine.init_on_load() return mine
def get(self): token_guid = request.args.get('token', '') as_attachment = request.args.get('as_attachment', None) document_guid = cache.get(DOWNLOAD_TOKEN(token_guid)) cache.delete(DOWNLOAD_TOKEN(token_guid)) if not document_guid: raise BadRequest('Valid token required for download') document = Document.query.filter_by( document_guid=document_guid).first() if not document: raise NotFound( 'Could not find the document corresponding to the token') if as_attachment is not None: as_attachment = True if as_attachment == 'true' else False else: as_attachment = '.pdf' not in document.file_display_name.lower() if document.object_store_path: return ObjectStoreStorageService().download_file( path=document.object_store_path, display_name=document.file_display_name, as_attachment=as_attachment) else: return send_file(filename_or_fp=document.full_storage_path, attachment_filename=document.file_display_name, as_attachment=as_attachment)
def put(self, mine_no_or_guid): mine = Mine.find_by_mine_no_or_guid(mine_no_or_guid) if not mine: raise NotFound("Mine not found.") data = self.parser.parse_args() tenure = data.get('tenure_number_id') lat = data.get('latitude') lon = data.get('longitude') if (lat and not lon) or (not lat and lon): raise BadRequest('latitude and longitude must both be empty, or both provided') # Mine Detail if 'mine_name' in data and mine.mine_name != data['mine_name']: _throw_error_if_mine_exists(data['mine_name']) mine.mine_name = data['mine_name'] if 'mine_note' in data: mine.mine_note = data['mine_note'] if 'major_mine_ind' in data: mine.major_mine_ind = data['major_mine_ind'] if 'mine_region' in data: mine.mine_region = data['mine_region'] if 'ohsc_ind' in data: mine.ohsc_ind = data['ohsc_ind'] if 'union_ind' in data: mine.union_ind = data['union_ind'] mine.save() # Tenure validation if tenure: tenure_exists = MineralTenureXref.find_by_tenure(tenure) if tenure_exists: if tenure_exists.mine_guid == mine.mine_guid: raise BadRequest('Error: Field tenure_id already exists for this mine.') tenure = MineralTenureXref( mineral_tenure_xref_guid=uuid.uuid4(), mine_guid=mine.mine_guid, tenure_number_id=tenure) tenure.save() if mine.mine_location: #update existing record if "latitude" in data: mine.mine_location.latitude = data['latitude'] if "longitude" in data: mine.mine_location.longitude = data['longitude'] mine.mine_location.save() elif data.get('latitude') and data.get('longitude') and not mine.mine_location: mine.mine_location = MineLocation( latitude=data['latitude'], longitude=data['longitude']) mine.save() cache.delete(MINE_MAP_CACHE) # Status validation _mine_status_processor(data.get('mine_status'), data.get('status_date'), mine) return mine
def get(self, inspection_id, attachment_id): token_guid = request.args.get('token', '') document_info = cache.get(DOWNLOAD_TOKEN(token_guid)) cache.delete(DOWNLOAD_TOKEN(token_guid)) if not document_info: raise BadRequest('Valid token requred for download') return NRISDownloadService.download(document_info["documenturl"], document_info["filename"])
def get(self): # Ensure that the token is valid token = request.args.get('token', '') token_data = cache.get(NOW_DOCUMENT_DOWNLOAD_TOKEN(token)) cache.delete(NOW_DOCUMENT_DOWNLOAD_TOKEN(token)) if not token_data: raise BadRequest('Valid token required for download') # Get the template associated with the token doc_type = NOWApplicationDocumentType.query.unbound_unsafe().get( token_data['document_type_code']) template_path = os.path.join(current_app.root_path, doc_type.document_template.template_file_path) # Generate the document using the template and template data docgen_resp = DocumentGeneratorService.generate_document_and_stream_response( template_path, data=token_data['template_data']) # Push the document to the Document Manager filename = docgen_resp.headers['X-Report-Name'] now_application_guid = token_data['now_application_guid'] now_application_identity = NOWApplicationIdentity.query.unbound_unsafe().get( now_application_guid) document_manager_guid = DocumentManagerService.pushFileToDocumentManager( file_content=docgen_resp.content, filename=filename, mine=now_application_identity.mine, document_category='noticeofwork', authorization_header=token_data['authorization_header']) # Add the document to the Notice of Work's documents username = token_data['username'] new_mine_doc = MineDocument( mine_guid=now_application_identity.now_application.mine_guid, document_manager_guid=document_manager_guid, document_name=filename, create_user=username, update_user=username) now_doc = NOWApplicationDocumentXref( mine_document=new_mine_doc, now_application_document_type=doc_type, now_application_id=now_application_identity.now_application_id, create_user=username, update_user=username) now_application_identity.now_application.documents.append(now_doc) now_application_identity.save() # Return the generated document file_gen_resp = Response( stream_with_context(docgen_resp.iter_content(chunk_size=2048)), headers=dict(docgen_resp.headers)) return file_gen_resp
def get(self, application_guid, id): token_guid = request.args.get('token', '') document_info = cache.get(DOWNLOAD_TOKEN(token_guid)) cache.delete(DOWNLOAD_TOKEN(token_guid)) if not document_info: raise BadRequest('Valid token requred for download') if document_info["originating_system"] == "VFCBC": return VFCBCDownloadService.download(document_info["documenturl"], document_info["filename"]) if document_info["originating_system"] == "NROS": return NROSDownloadService.download(document_info["documenturl"]) raise InternalServerError('Unknown application document server')
def post(self, mine_no_or_guid=None): if mine_no_or_guid: self.raise_error(400, 'Error: Unexpected mine number in Url.'), 400 data = self.parser.parse_args() lat = data['latitude'] lon = data['longitude'] note = data['note'] location = None mine_region = None status = data['mine_status'] major_mine_ind = data['major_mine_ind'] mine_region = data['mine_region'] mine = Mine(mine_guid=uuid.uuid4(), **self.get_create_update_dict()) try: # query the mine tables and check if that mine name exists self.throw_error_if_mine_exists( data['name']) mine = Mine( mine_guid=uuid.uuid4(), mine_no=generate_mine_no(), mine_name=data['name'], mine_note=note if note else '', major_mine_ind=major_mine_ind, mine_region=mine_region, **self.get_create_update_dict()) except AssertionError as e: self.raise_error(400, 'Error: {}'.format(e)) mine.save() if lat and lon: location = MineLocation( mine_location_guid=uuid.uuid4(), mine_guid=mine.mine_guid, latitude=lat, longitude=lon, **self.get_create_update_dict()) location.save() cache.delete(MINE_MAP_CACHE) mine_status = self.mine_status_processor(status, mine.mine_guid) if status else None return { 'mine_guid': str(mine.mine_guid), 'mine_no': mine.mine_no, 'mine_name': mine.mine_name, 'mine_note': mine.mine_note, 'major_mine_ind': mine.major_mine_ind, 'latitude': str(location.latitude) if location else None, 'longitude': str(location.longitude) if location else None, 'mine_status': mine_status.json() if mine_status else None, 'mine_region': mine.mine_region if mine_region else None, }
def nris_etl_job(): """This nightly job initiates the ETL from NRIS into our app domain.""" job_running = cache.get(NRIS_JOB_PREFIX + NRIS_ETL_JOB) if job_running is None: try: cache.set(NRIS_JOB_PREFIX + NRIS_ETL_JOB, 'True', timeout=TIMEOUT_12_HOURS) _run_nris_etl() finally: cache.delete(NRIS_JOB_PREFIX + NRIS_ETL_JOB) else: print("Job is running")
def cancel(): form = CancelSubscriptionForm() if form.validate_on_submit(): # Cancel the user's subscription if current_user.subscription: subscription = Subscription() canceled = subscription.cancel(user=current_user) else: # If there is no subscription, then delete the user canceled = True if canceled: # Get the user's email email = current_user.email # Clear the cache. mailbox_id = current_user.mailbox_id if cache.get(mailbox_id): cache.delete(mailbox_id) from app.blueprints.parse.models.mailbox import Mailbox # Delete the credentials from MG for mailbox in Mailbox.query.filter_by(user_email=email).all(): delete_inbox(mailbox.mailbox_id) # Delete all emails, rules and mailboxes belonging to the user. from app.blueprints.user.tasks import delete_all delete_all.delay(email, mailbox_id) # Delete the user. from app.blueprints.billing.tasks import delete_users ids = [current_user.id] delete_users(ids) # Send a cancellation email. from app.blueprints.user.tasks import send_cancel_email send_cancel_email.delay(email) flash('Sorry to see you go! Your subscription has been canceled.', 'success') return redirect(url_for('user.login')) return render_template('billing/cancel.html', form=form)
def put(self, mine_no_or_guid): mine = Mine.find_by_mine_no_or_guid(mine_no_or_guid) refresh_cache = False if not mine: raise NotFound("Mine not found.") data = self.parser.parse_args() lat = data.get('latitude') lon = data.get('longitude') if (lat and not lon) or (not lat and lon): raise BadRequest( 'latitude and longitude must both be empty, or both provided') # Mine Detail if 'mine_name' in data and mine.mine_name != data['mine_name']: _throw_error_if_mine_exists(data['mine_name']) mine.mine_name = data['mine_name'] refresh_cache = True if 'mine_note' in data: mine.mine_note = data['mine_note'] if 'major_mine_ind' in data: mine.major_mine_ind = data['major_mine_ind'] if 'mine_region' in data: mine.mine_region = data['mine_region'] if 'ohsc_ind' in data: mine.ohsc_ind = data['ohsc_ind'] if 'union_ind' in data: mine.union_ind = data['union_ind'] if 'latitude' in data and 'longitude' in data: mine.latitude = data['latitude'] mine.longitude = data['longitude'] refresh_cache = True if 'exemption_fee_status_code' in data: mine.exemption_fee_status_code = data['exemption_fee_status_code'] if 'exemption_fee_status_code' in data: mine.exemption_fee_status_note = data['exemption_fee_status_note'] mine.save() _mine_status_processor(data.get('mine_status'), data.get('status_date'), mine) # refresh cache will need to be called for all supported fields, should more be added in the future if refresh_cache: cache.delete(MINE_MAP_CACHE) MineMapResource.rebuild_map_cache_async() return mine
def get(self): token_guid = request.args.get('token', '') doc_guid = cache.get(DOWNLOAD_TOKEN(token_guid)) cache.delete(DOWNLOAD_TOKEN(token_guid)) if not doc_guid: raise BadRequest('Valid token requred for download') doc = Document.query.filter_by(document_guid=doc_guid).first() if not doc: raise NotFound('Could not find the document corresponding to the token') not_pdf = '.pdf' not in doc.file_display_name.lower() return send_file(filename_or_fp=doc.full_storage_path, attachment_filename=doc.file_display_name, as_attachment=not_pdf)
def get(self, token): if not token: raise BadRequest('Must specify token') doc_guid = cache.get(DOWNLOAD_TOKEN(token)) cache.delete(DOWNLOAD_TOKEN(token)) if not doc_guid: raise NotFound('Could not find token') doc = Document.find_by_document_guid(doc_guid) if not doc: raise NotFound('Could not find document') return send_file(filename_or_fp=doc.full_storage_path, attachment_filename=doc.filename, as_attachment=True)
def get(self): token_guid = request.args.get('token', '') attachment = request.args.get('as_attachment', None) token_data = cache.get(DOWNLOAD_TOKEN(token_guid)) cache.delete(DOWNLOAD_TOKEN(token_guid)) current_app.logger.debug('redis_data' + str(token_data)) if not token_data: raise BadRequest('Valid token required for download') # Generation token file_resp = None if token_data.get('generation', False): application = Application.find_by_guid( token_data['application_guid']) template_path = token_data['template_path'] docgen_resp = DocumentGeneratorService.generate_document_and_stream_response( template_path, application.shared_cost_agreement_template_json, 'pdf') headers = dict(docgen_resp.headers) headers[ 'Content-Disposition'] = f'attachment; filename=shared_cost_agreement_{application.company_name}.pdf' file_resp = Response(stream_with_context( docgen_resp.iter_content(chunk_size=2048)), headers=headers) # Download token else: document_guid = token_data['document_guid'] app_doc = ApplicationDocument.query.filter_by( application_document_guid=document_guid).one_or_none() if not app_doc: raise NotFound( 'No document was found with the corresponding download token' ) if attachment is not None: attach_style = True if attachment == 'true' else False else: attach_style = '.pdf' not in app_doc.document_name.lower() file_resp = ObjectStoreStorageService().download_file( path=app_doc.object_store_path, display_name=quote(app_doc.document_name), as_attachment=attach_style) return file_resp
def get(self): token = request.args.get('token', '') token_data = cache.get(NOW_DOCUMENT_DOWNLOAD_TOKEN(token)) cache.delete(NOW_DOCUMENT_DOWNLOAD_TOKEN(token)) if not token_data: raise BadRequest('Valid token required for download') doc_type = NOWApplicationDocumentType.query.unbound_unsafe().get( token_data['document_type_code']) template_path = os.path.join(current_app.root_path, doc_type.document_template.template_file_path) file_gen_resp = DocumentGeneratorService.generate_document_and_stream_response( template_path, data=token_data['template_data']) return file_gen_resp
def get(self): args = parser_token.parse_args() # print(u_token) # 获取 u_token = args.get('u_token') username = cache.get(u_token) print(u_token, username) # 删除 cache.delete(u_token) user = User.query.filter_by(username=username).first() user.u_status = True db.session.add(user) return {'msg': "账号被激活", 'status': 200}
def scrape_term(): form = dict(request.form) if not form.get('search-term'): flash(*FlashMessages.bad_search_form) return redirect(url_for('web.index')) # clear the cache on recent submissions table cache.delete('recent-submissions') # create/save the term to Postgres term = Term.create(term=form.get('search-term'), links=[]) term_payload = term_schema.dump(term) logger.info(f'This is term payload: {term_payload}') put(term_payload) # insert term into DynamoDB # [6/30/2021] this is done by lambda # insert_record(item=term_payload) # send term off on to SQS to be further processed send_to_queue(item=term_payload) # add to elasticsearch index # [6/30/2021] this is done by lambda # add(os.getenv('ELASTICSEARCH_INDEX_NAME'), term) return redirect(url_for('web.index'))
def patch(self, document_guid=None): if document_guid is None: return self.create_error_payload( 400, 'Must specify document GUID in PATCH'), 400 file_path = cache.get(FILE_UPLOAD_PATH(document_guid)) if file_path is None or not os.path.lexists(file_path): return self.create_error_payload( 404, 'PATCH sent for a upload that does not exist'), 404 request_offset = int(request.headers.get('Upload-Offset', 0)) file_offset = cache.get(FILE_UPLOAD_OFFSET(document_guid)) if request_offset != file_offset: return self.create_error_payload( 409, "Offset in request does not match uploaded file's offest"), 409 chunk_size = request.headers.get('Content-Length') if chunk_size is None: return self.create_error_payload( 400, 'No Content-Length header in request'), 400 chunk_size = int(chunk_size) new_offset = file_offset + chunk_size file_size = cache.get(FILE_UPLOAD_SIZE(document_guid)) if new_offset > file_size: return self.create_error_payload( 413, 'The uploaded chunk would put the file above its declared file size.' ), 413 try: with open(file_path, "r+b") as f: f.seek(file_offset) f.write(request.data) except IOError as e: return self.create_error_payload(500, 'Unable to write to file'), 500 if new_offset == file_size: # File transfer complete. doc = DocumentManager.find_by_document_manager_guid(document_guid) doc.upload_completed_date = datetime.now() doc.save() cache.delete(FILE_UPLOAD_SIZE(document_guid)) cache.delete(FILE_UPLOAD_OFFSET(document_guid)) cache.delete(FILE_UPLOAD_PATH(document_guid)) else: # File upload still in progress cache.set(FILE_UPLOAD_OFFSET(document_guid), new_offset, TIMEOUT_24_HOURS) response = make_response("", 204) response.headers['Tus-Resumable'] = TUS_API_VERSION response.headers['Tus-Version'] = TUS_API_SUPPORTED_VERSIONS response.headers['Upload-Offset'] = new_offset response.headers[ 'Access-Control-Expose-Headers'] = "Tus-Resumable,Tus-Version,Upload-Offset" return response
def get(self): token_guid = request.args.get('token', '') attachment = request.args.get('as_attachment', None) doc_guid = cache.get(DOWNLOAD_TOKEN(token_guid)) cache.delete(DOWNLOAD_TOKEN(token_guid)) if not doc_guid: raise BadRequest('Valid token required for download') doc = Document.query.filter_by(document_guid=doc_guid).first() if not doc: raise NotFound( 'Could not find the document corresponding to the token') current_app.logger.debug(attachment) if attachment is not None: attach_style = True if attachment == 'true' else False else: attach_style = '.pdf' not in doc.file_display_name.lower() return send_file(filename_or_fp=doc.full_storage_path, attachment_filename=doc.file_display_name, as_attachment=attach_style)
def patch(self, document_guid): file_path = cache.get(FILE_UPLOAD_PATH(document_guid)) if file_path is None or not os.path.lexists(file_path): raise NotFound('PATCH sent for a upload that does not exist') request_offset = int(request.headers.get('Upload-Offset', 0)) file_offset = cache.get(FILE_UPLOAD_OFFSET(document_guid)) if request_offset != file_offset: raise Conflict( "Offset in request does not match uploaded file's offset") chunk_size = request.headers.get('Content-Length') if chunk_size is None: raise BadRequest('No Content-Length header in request') chunk_size = int(chunk_size) new_offset = file_offset + chunk_size file_size = cache.get(FILE_UPLOAD_SIZE(document_guid)) if new_offset > file_size: raise RequestEntityTooLarge( 'The uploaded chunk would put the file above its declared file size.' ) try: with open(file_path, "r+b") as f: f.seek(file_offset) f.write(request.data) except IOError as e: raise InternalServerError('Unable to write to file') if new_offset == file_size: # File transfer complete. doc = Document.find_by_document_guid(document_guid) doc.upload_completed_date = datetime.utcnow() doc.save() cache.delete(FILE_UPLOAD_SIZE(document_guid)) cache.delete(FILE_UPLOAD_OFFSET(document_guid)) cache.delete(FILE_UPLOAD_PATH(document_guid)) else: # File upload still in progress cache.set(FILE_UPLOAD_OFFSET(document_guid), new_offset, TIMEOUT_24_HOURS) response = make_response('', 204) response.headers['Tus-Resumable'] = TUS_API_VERSION response.headers['Tus-Version'] = TUS_API_SUPPORTED_VERSIONS response.headers['Upload-Offset'] = new_offset response.headers[ 'Access-Control-Expose-Headers'] = "Tus-Resumable,Tus-Version,Upload-Offset" return response
def delete(self): # Should check request size so a large body wont be processed if request.content_length > 350: raise BadRequest('Request body too large') document_guid = request.get_data(as_text=True) if not document_guid: raise BadRequest('Must supply a document GUID') doc = Document.find_by_document_guid(document_guid) if not doc or doc.submitted: raise NotFound(f'Upload not found with GUID {document_guid}') try: os.remove(doc.full_storage_path) except IOError as e: raise InternalServerError('Unable to delete file') doc.delete() cache.delete(FILE_UPLOAD_SIZE(document_guid)) cache.delete(FILE_UPLOAD_OFFSET(document_guid)) cache.delete(FILE_UPLOAD_PATH(document_guid))
def put(self, mine_no_or_guid): data = self.parser.parse_args() tenure = data['tenure_number_id'] lat = data['latitude'] lon = data['longitude'] mine_name = data['name'].strip() if data['name'] else None mine_note = data['note'] status = data['mine_status'] major_mine_ind = data['major_mine_ind'] region = data['mine_region'] if (not tenure and not (lat and lon) and not mine_name and not mine_note and not status and not region and major_mine_ind is None): self.raise_error(400, 'Error: No fields filled.') mine = Mine.find_by_mine_no_or_guid(mine_no_or_guid) if not mine: return self.create_error_payload(404, 'Mine not found'), 404 # Mine Detail if mine_name or mine_note or major_mine_ind is not None: try: mine.update_user = self.get_update_user() if mine_name: if mine.mine_name != mine_name: self.throw_error_if_mine_exists(mine_name) mine.mine_name = mine_name mine.mine_note = mine_note if major_mine_ind is not None: mine.major_mine_ind = major_mine_ind if region: mine.mine_region = region except AssertionError as e: self.raise_error(400, 'Error: {}'.format(e)) mine.save() # Tenure validation if tenure: tenure_exists = MineralTenureXref.find_by_tenure(tenure) if tenure_exists: tenure_exists_mine_guid = tenure_exists.mine_guid if tenure_exists_mine_guid == mine.mine_guid: self.raise_error(400, 'Error: Field tenure_id already exists for this mine.') try: tenure = MineralTenureXref( mineral_tenure_xref_guid=uuid.uuid4(), mine_guid=mine.mine_guid, tenure_number_id=tenure, **self.get_create_update_dict()) except AssertionError as e: self.raise_error(400, 'Error: {}'.format(e)) tenure.save() if (lat and not lon) or (lon and not lat): self.raise_error(400, 'latitude and longitude must both be empty, or both provided') if mine.mine_location: #update existing record if "latitude" in data.keys(): mine.mine_location.latitude = lat if "longitude" in data.keys(): mine.mine_location.longitude = lon mine.mine_location.save() if lat and lon and not mine.mine_location: location = MineLocation( mine_location_guid=uuid.uuid4(), mine_guid=mine.mine_guid, latitude=lat, longitude=lon, **self.get_create_update_dict()) location.save() cache.delete(MINE_MAP_CACHE) # Status validation self.mine_status_processor(status, mine.mine_guid) if status else None return mine.json()
def patch(self, document_guid): # Get and validate the file path (not required if object store is enabled) file_path = cache.get(FILE_UPLOAD_PATH(document_guid)) if not Config.OBJECT_STORE_ENABLED and ( file_path is None or not os.path.lexists(file_path)): raise NotFound('File does not exist') # Get and validate the upload offset request_offset = int(request.headers.get('Upload-Offset', 0)) file_offset = cache.get(FILE_UPLOAD_OFFSET(document_guid)) if request_offset != file_offset: raise Conflict( 'Upload offset in request does not match the file\'s upload offset' ) # Get and validate the content length and the expected new upload offset chunk_size = request.headers.get('Content-Length') if chunk_size is None: raise BadRequest('No Content-Length header in request') chunk_size = int(chunk_size) new_offset = file_offset + chunk_size file_size = cache.get(FILE_UPLOAD_SIZE(document_guid)) if new_offset > file_size: raise RequestEntityTooLarge( 'The uploaded chunk would put the file above its declared file size' ) # If the object store is enabled, send the patch request through to TUSD to the object store if Config.OBJECT_STORE_ENABLED: object_store_upload_resource = cache.get( OBJECT_STORE_UPLOAD_RESOURCE(document_guid)) url = f'{Config.TUSD_URL}{object_store_upload_resource}' headers = { key: value for (key, value) in request.headers if key != 'Host' } resp = requests.patch(url=url, headers=headers, data=request.data) if resp.status_code not in [ requests.codes.ok, requests.codes.no_content ]: message = f'Cannot upload file. Object store responded with {resp.status_code} ({resp.reason}): {resp._content}' current_app.logger.error( f'PATCH resp.request:\n{resp.request.__dict__}') current_app.logger.error(f'PATCH resp:\n{resp.__dict__}') current_app.logger.error(message) raise BadGateway(message) # Else, write the content to the file in the file system else: try: with open(file_path, 'r+b') as f: f.seek(file_offset) f.write(request.data) except IOError as e: current_app.logger.error(e) raise InternalServerError('Unable to write to file') # If the file upload is complete, set the upload completion date and delete cached data if new_offset == file_size: document = Document.find_by_document_guid(document_guid) document.upload_completed_date = datetime.utcnow() document.save() cache.delete(FILE_UPLOAD_SIZE(document_guid)) cache.delete(FILE_UPLOAD_OFFSET(document_guid)) cache.delete(FILE_UPLOAD_PATH(document_guid)) cache.delete(OBJECT_STORE_PATH(document_guid)) cache.delete(OBJECT_STORE_UPLOAD_RESOURCE(document_guid)) # Else, the file upload is still in progress, update its upload offset in the cache else: cache.set(FILE_UPLOAD_OFFSET(document_guid), new_offset, TIMEOUT_24_HOURS) response = make_response('', 204) response.headers['Tus-Resumable'] = TUS_API_VERSION response.headers['Tus-Version'] = TUS_API_SUPPORTED_VERSIONS response.headers['Upload-Offset'] = new_offset response.headers[ 'Access-Control-Expose-Headers'] = 'Tus-Resumable,Tus-Version,Upload-Offset' return response
def setup_info(test_client): date = get_date_one_month_ahead() expected_data = { 'last_inspection': '2018-12-10 18:36', 'last_inspector': 'TEST', 'num_open_orders': 3, 'num_overdue_orders': 1, 'advisories': 3, 'warnings': 2, 'section_35_orders': 2, 'open_orders': [ { 'order_no': '162409-1', 'code_violation': '1.1.2', 'report_no': 162409, 'inspector': 'TEST', 'due_date': date, 'overdue': False, }, { 'order_no': '162409-2', 'code_violation': '2.4.2', 'report_no': 162409, 'inspector': 'TEST', 'due_date': date, 'overdue': False, }, { 'order_no': '100018-1', 'code_violation': 'C.8 (a) (i)', 'report_no': 100018, 'inspector': 'TEST', 'due_date': '2018-12-10 13:52', 'overdue': True, }, ] } NRIS_Mock_data = [{ 'assessmentId': 100018, 'assessmentDate': '2018-11-01 13:36', 'assessmentType': 'INSPECTION', 'assessor': 'IDIR\\TEST', 'inspection': { 'stops': [{ 'stopOrders': [{ 'orderStatus': 'Open', 'orderCompletionDate': '2018-12-10 13:52', 'orderAuthoritySection': 'Section 15(4)', 'orderPermits': [{ "permitSectionNumber": "C.8 (a) (i)", }], }], 'stopAdvisories': [{ 'advisoryDetail': 'test' }, { 'advisoryDetail': 'test' }, { 'advisoryDetail': 'test' }], 'stopWarnings': [{ 'warningDetail': 'test' }, { 'warningDetail': 'test' }], }] } }, { 'assessmentId': 162409, 'assessmentDate': '2018-12-10 18:36', 'assessmentType': 'INSPECTION', 'assessor': 'IDIR\\TEST', 'inspection': { 'stops': [{ 'stopOrders': [{ 'orderStatus': 'Open', 'orderCompletionDate': date, 'orderAuthoritySection': 'Section 15(4)', "orderLegislations": [{ "section": "1.1.2", }], }, { 'orderStatus': 'Open', 'orderCompletionDate': date, 'orderAuthoritySection': 'Section 35', "orderLegislations": [{ "section": "2.4.2", }], }], 'stopAdvisories': [], 'stopWarnings': [], }] } }, { 'assessmentId': 90519, 'assessmentDate': '2017-12-01 18:36', 'assessmentType': 'INSPECTION', 'assessor': 'IDIR\\TEST', 'inspection': { 'stops': [{ 'stopOrders': [{ 'orderStatus': 'Closed', 'orderCompletionDate': '2018-01-15 13:52', 'orderAuthoritySection': 'Section 15(4)', }, { 'orderStatus': 'Closed', 'orderCompletionDate': '2018-01-14 13:52', 'orderAuthoritySection': 'Section 35', }], 'stopAdvisories': [{ 'advisoryDetail': 'test' }, { 'advisoryDetail': 'test' }, { 'advisoryDetail': 'test' }], 'stopWarnings': [{ 'warningDetail': 'test' }, { 'warningDetail': 'test' }, { 'warningDetail': 'test' }], }] } }] yield dict(NRIS_Mock_data=NRIS_Mock_data, expected_data=expected_data) cache.delete(NRIS_TOKEN)
def setup_info(test_client): date = get_date_one_month_ahead() expected_data = { 'last_inspection': "2018-09-17T14:00:00", 'last_inspector': "APOOLEY", 'num_open_orders': 0, 'num_overdue_orders': 0, #'section_35_orders': 0, no aggregate, FE filters will show where violation = 35 'all_time': { 'num_inspections': 0, 'num_advisories': 0, 'num_warnings': 0, 'num_requests': 0, }, 'last_12_months': { 'num_inspections': 0, 'num_advisories': 3, 'num_warnings': 0, 'num_requests': 0, }, 'current_fiscal': { 'num_inspections': 0, 'num_advisories': 0, 'num_warnings': 0, 'num_requests': 0, }, 'orders': [ { 'order_no': '162409-1', 'code_violation': '1.1.2', 'report_no': 162409, 'inspector': 'TEST', 'due_date': date, 'overdue': False, }, { 'order_no': '162409-2', 'code_violation': '2.4.2', 'report_no': 162409, 'inspector': 'TEST', 'due_date': date, 'overdue': False, }, { 'order_no': '100018-1', 'code_violation': 'C.8 (a) (i)', 'report_no': 100018, 'inspector': 'TEST', 'due_date': '2018-12-10 13:52', 'overdue': True, }, ] } NRIS_Mock_data = { "records": [{ "external_id": 102271, "inspection_date": "2018-09-17T14:00:00", "completed_date": "2018-10-19T10:29:00", "inspection_status_code": "Complete", "inspection_type_code": "Health and Safety", "inspection_report_sent_date": "2018-09-21T12:17:00", "business_area": "EMPR", "mine_no": "0900002", "inspector_idir": "IDIR\\APOOLEY", "inspection_introduction": "<p>Bullmoose Mine has been permanently closed since 2002. The purpose of the inspection was to accompany geotechnical inspectors Victor Marques and Jennifer Brash. Environmental sampling and upkeep activities are carried out by Teck Resources. No orders related to health and safety resulted from this inspection,</p>", "inspection_preamble": None, "inspection_closing": None, "officer_notes": None, "documents": [{ "external_id": 143278, "document_date": "2018-09-24T09:20:00", "document_type": "Report", "file_name": "EMPR_InspectionReport_102271.pdf", "comment": "INSPECTION Report" }, { "external_id": 143302, "document_date": "2018-09-24T09:59:00", "document_type": "Report", "file_name": "EMPR_InspectionReport_102271.docx", "comment": "INSPECTION Report" }, { "external_id": 147238, "document_date": "2018-10-19T10:28:00", "document_type": "Mine Manager Response", "file_name": "EMPR_InspectionReport_102271 - Response.pdf", "comment": "Manager response" }], "inspected_locations": [{ "inspected_location_type": "stop", "location": { "description": "Former tailings pond, settling ponds", "notes": None, "latitude": None, "longitude": None, "utm_easting": None, "utm_northing": None, "zone_number": None, "zone_letter": None }, "documents": [], "advisory_details": [], "request_details": [], "stop_details": [], "warning_details": [] }] }, { "external_id": 102585, "inspection_date": "2018-09-17T13:56:00", "completed_date": "2018-10-22T15:14:00", "inspection_status_code": "Complete", "inspection_type_code": "Geotechnical", "inspection_report_sent_date": "2018-09-25T00:00:00", "business_area": "EMPR", "mine_no": "0900002", "inspector_idir": "IDIR\\JBRASH", "inspection_introduction": None, "inspection_preamble": None, "inspection_closing": None, "officer_notes": None, "documents": [{ "external_id": 145110, "document_date": "2018-10-04T15:38:00", "document_type": "Final Report", "file_name": "2018 09 25_Bullmoose_EMPR Geotechnical Inspection.pdf", "comment": "Inspection Report" }, { "external_id": 147662, "document_date": "2018-10-22T15:05:00", "document_type": "Mine Manager Response", "file_name": "2018 10 10 Response to the 2018 09 25 Geotechnical Inspection.pdf", "comment": "MM response" }], "inspected_locations": [{ "inspected_location_type": "stop", "location": { "description": "Sediment Pond 2", "notes": None, "latitude": None, "longitude": None, "utm_easting": None, "utm_northing": None, "zone_number": None, "zone_letter": None }, "documents": [], "advisory_details": [{ "detail": "Advisory 1: \nPlease submit the 2018 Sediment Ponds OMS manual, incorporating the updates recommended in the 2017 DSI, to the Chief Inspector upon completion in Q3 2018." }, { "detail": "Advisory 2: \nPlease submit the Sediment Pond 2 spillway design to EMPR upon completion." }], "request_details": [], "stop_details": [], "warning_details": [] }, { "inspected_location_type": "stop", "location": { "description": "Sediment Pond 3", "notes": None, "latitude": None, "longitude": None, "utm_easting": None, "utm_northing": None, "zone_number": None, "zone_letter": None }, "documents": [], "advisory_details": [{ "detail": "Advisory 3: \nThe crest of Sediment Pond 3 should be graded such that ponding of water is eliminated." }], "request_details": [{ "detail": "nformation Request 1: \nPlease inform EMPR of the results of the SP3 spillway flooding assessment (DSI recommendation SP-2017-03), upon completion in Q3 2018.", "response": "As part of looking at long term erosion protection issues related to Sediment Pond 3 spillway,\nKCB presented Teck with an options review. As part of this review, it was concluded that if a\n200 yr return period flood of West Bullmoose Creek were to occur, there is risk that the water\nlevel in the creek would be high enough to flow through the spillway and into Sediment Pond 3.\nHowever, neither the resulting increase in pond level, nor the decrease in storage capacity of\nthe facility would represent a risk to the dam structure. KCB believe the actual pond sediments\nare some distance from the inlet of the spillway and would be in a relatively ?still? area of the\npond (as it is dead ended) so the potential for erosion of the disturbed sediments back into the\ncreek are considered minimal.", "respond_date": None }], "stop_details": [{ "detail": "Order 1 (Inspection of Mines)\nIssued Pursuant To: Mines Act Section 15(4)\n\nObservation of Contravention:\nRiprap installed to protect the toe of Sediment Pond 3 dam from creek erosion appeared damaged at the time of inspection. The 2017 DSI recommends that the erosion be repaired to protect against future recurrence (SP-2012-01), with a target deadline of Q3 2018. \n\nRemedial Action/Results To Be Obtained:\nThe Mine shall complete repairs of the erosion protection at the toe of the SP3 dam, in accordance with the requirements of the Engineer of Record. Pursuant to Code clause 10.5.1, the Mine shall submit to the Chief Inspector as-built documentation for the Sediment Pond 3 erosion protection and shall ensure that the Engineer of Record certifies the construction.\n\nRectify By/Completion Date: 2018-12-31", "stop_type": "Inspection Mines", "response_status": "Accepted", "stop_status": "Closed", "observation": "Order 1 (Inspection of Mines)\nIssued Pursuant To: Mines Act Section 15(4)\n\nObservation of Contravention:\nRiprap installed to protect the toe of Sediment Pond 3 dam from creek erosion appeared damaged at the time of inspection. The 2017 DSI recommends that the erosion be repaired to protect against future recurrence (SP-2012-01), with a target deadline of Q3 2018. \n\nRemedial Action/Results To Be Obtained:\nThe Mine shall complete repairs of the erosion protection at the toe of the SP3 dam, in accordance with the requirements of the Engineer of Record. Pursuant to Code clause 10.5.1, the Mine shall submit to the Chief Inspector as-built documentation for the Sediment Pond 3 erosion protection and shall ensure that the Engineer of Record certifies the construction.\n\nRectify By/Completion Date: 2018-12-31", "response": "as-built docs received and filed. MM response:\n\"The observed contravention is incorrect as stated. The riprap observed is at East end of the\ndam not the riprap that was referred to in SP-2012-01 in the 2017 DSI.\nThe riprap observed that appeared damaged was riprap that was installed in 2015. The Certified\nas-built report for this riprap entitled ?Bullmoose SP3 Erosion Repair Construction and\nEnvironmental Monitoring Summary Report? dated December 2015 is being submitted to EMPR\nin a separate submission using our Secure File Transfer system due to the large size of the\nreport. Section 5.3 of the report describes modifications that were made to avoid disturbing the\ncreek bed during installation of the geotextile liner. The liner was anchored by folding the\ngeotextile material back on top of the riprap and then placing additional rock overtop as shown\nin drawing D-001, Detail 1 of the report. In the report, Photo 4 shows how this approach left\ngeotextile visibly exposed at the edge of the creek so it can be easily mistaken to look like some\nof the riprap has been eroded. The EoR inspected this area during the 2018 DSI and had no\nconcerns that any erosion had occurred to the riprap since installation.\nThere is an area west of the riprap discussed above where the creek is gradually eroding the\ncreek bank closer to the dam toe. This is the area referred to SP-2012-01 of the 2017 DSI. Teck\nhad planned on installing riprap in this area this past summer, but during the design process, it\nwas decided to first conduct an options review as part of long term dam closure planning to try\nensure this work was consistent with potential long term closure plans. This has caused a delay\nof one year (to 2019) to complete the design and schedule the riprap installation. The EoR\nrecognized the potential for a delay of installation as stated in the EoR?s recommendations in\nthe 2017 DSI (Recommendation SP-2016-05). The EoR recommended that if installation of the\nriprap was not complete by the end of 2018 that a monitoring program be put into place. Teck\nhas contracted WSP Engineers/Surveyors to install a monitoring system using stakes prior to\nthe end of October 2018 to comply with the 2017 DSI recommendation.\"", "response_received": "2018-10-10T00:00:00", "completion_date": "2018-10-22T15:05:00", "noncompliance_legislations": [], "noncompliance_permits": [], "authority_act": "Mines Act", "authority_act_section": "Section 15(4)", "documents": [] }], "warning_details": [] }] }] } yield dict(NRIS_Mock_data=NRIS_Mock_data, expected_data=expected_data) cache.delete(NRIS_TOKEN)