def show_form(): form = app.forms.QuestionForm() if form.validate_on_submit(): model = app.model.Model(form) model.predict() return flask.render_template('predict.html', title='Résultat', form=form, model=model) else: print("Erreur de validation") return flask.render_template('predict.html', title='Question à catégoriser', form=form)
def go(): # save user input in query query = request.args.get('query', '') query_df = pd.DataFrame({"LotArea": [query]}) print(model.predict(query_df)) # use model to predict classification for query classification_label = model.predict(query_df)[0] # This will render the go.html Please see that file. return render_template( 'go.html', query=query, classification_result={"result": classification_label})
def test_prediction(): # Making prediction on saved model pred = [1,0,0,0.835,1,40.92] prediction = model.predict([pred])[0] assert prediction == 0
def show_form(): # Formulaire de description du vol form = app.forms.FlightDetailsForm() print(form.airline.choices) if form.validate_on_submit(): model = app.model.Model(form) model.predict() return flask.render_template('predict.html', title='Résultat', form=form, model=model) else: print("Erreur de validation") return flask.render_template('predict.html', title='Paramètres du vol', form=form)
def post(self): args = parser.parse_args() image = np.array(json.loads(args['image'])) image = process_image(image) with graph.as_default(): prediction = model.predict(image) return prediction.tolist()[0]
def user_input(session_id): image = process_new(model_params, True) preds = model.predict(image) preds = preds[0] preds = [round(val, 2) for val in preds[:3]] image_loc = "../static/images/user_input_image" return render_template("pages/input.html", image=image_loc, preds=preds)
def leaf_predict(np_image): import keras.backend.tensorflow_backend as tb tb._SYMBOLIC_SCOPE.value = True from keras.applications.resnet50 import preprocess_input image = preprocess_input(np_image) predict = model.predict(np.expand_dims(image, axis=0)) return predict[0][0]
def init_input(): image = process_new(model_params, False) with graph.as_default(): preds = model.predict(image) preds = preds[0] preds = [round(val, 2) for val in preds[:3]] image_loc = "../static/images/demo2.jpg" return render_template("pages/input.html", image=image_loc, preds=preds)
def get_score(): if request.method == "POST": if request.files: data = {"success": False} image = request.files["image"] image.save( os.path.join(app.config["IMAGE_UPLOADS"], image.filename)) img_link = os.path.join(app.config["IMAGE_UPLOADS"], image.filename) img = cv2.cvtColor(cv2.imread(f'{img_link}'), cv2.COLOR_BGR2RGB) resp = img.shape print(f'{resp}') if resp: image_r = cv2.resize(img, (224, 224)) image_r = np.expand_dims(preprocess_image( image_r, preprocess_input), axis=0) print(f'Input image: {image_r.shape}') with session.as_default(): with session.graph.as_default(): p = model.predict(image_r) print(f'Predicitiong shape: {p.shape}') mask = preprocess_mask(np.squeeze(p), threshold) mask = cv2.resize(mask, (img.shape[1], img.shape[0])) mask = mask[..., 1] points = approx_polygon(mask) segment = {'data': [], 'points': points, 'mask': mask} if (len(points) == 4): solid_mask = np.zeros((img.shape[0], img.shape[1])) solid_mask = cv2.fillPoly(solid_mask, np.int32([points]), color=255).astype(np.uint8) segment['data'] = crop_segment(img, solid_mask) print(f"Segment shape {segment['data'].shape}") segment_gray_inv = 255 - cv2.cvtColor(segment['data'], cv2.COLOR_RGB2GRAY) text = str(extract_score(segment_gray_inv, lang='fifa_score')) data["extracted_text"] = text data["success"] = True return render_template("index.html", data=text, file_name=image.filename) return render_template("index.html")
def postInput(): insertValues = request.get_json() x1 = insertValues['picture'] #x1 = eval(x1) print(x1) input = np.array(x1) input = input.reshape(1, 28, 28, 1) input = input.astype('float32') result = model.predict(input) return jsonify({'return': str(result)})
def postInput(): # 取得前端傳過來的數值 insertValues = request.get_json() x1=insertValues['sepalLengthCm'] x2=insertValues['sepalWidthCm'] x3=insertValues['petalLengthCm'] x4=insertValues['petalWidthCm'] input = np.array([[x1, x2, x3, x4]]) # 進行預測 result = model.predict(input) return jsonify({'result': str(result)})
def scan_doc(): docs = glob.glob(f"{current_app.config['UPLOAD_FOLDER']}/*.txt") doc_names = [] results = [] for doc in docs: with open(doc, 'r') as f: lines = f.readlines() _results = model.predict(lines, current_app) results.append(_results) os.remove(doc) doc_name = doc.replace("-", ".").replace(".txt", "") doc_names.append(os.path.basename(doc_name)) return render_template('scan.html', results=results, doc_names=doc_names)
def go(): # save user input in query age = int(request.args.get('age', '')) ward = int(request.args.get('ward', '')) semi = int(request.args.get('semi', '')) intensive = int(request.args.get('intensive', '')) response_list = [age, ward, semi, intensive] # Create empty dictionary to be used to create DataFrame df_dict = {} # List column features cols = [ 'Patient age quantile', 'Patient addmited to regular ward (1=yes, 0=no)', 'Patient addmited to semi-intensive unit (1=yes, 0=no)', 'Patient addmited to intensive care unit (1=yes, 0=no)' ] # zip puts two lists side by side for l, c in zip(response_list, cols): df_dict[c] = [l] query_df = pd.DataFrame(df_dict) print(model.predict(query_df)) # use model to predict classification for query classification_label = model.predict(query_df)[0] # This will render the go.html Please see that file. return render_template( 'go.html', query= f"Age: {age} , Admitted to ward: {ward} , Admitted to semi-intensive unit: {semi} , Admitted to intensive care unit: {intensive}", classification_result={"result": classification_label})
def download(): url = request.args['url'] filename = request.args.get('filename', 'image.png') r = requests.get(url) imgnamedate = datetime.datetime.now().strftime("%Y%m%d%H%M%S") with app.open_instance_resource(imgnamedate + '.jpg', 'wb') as f: f.write(r.content) imagepath = app.open_instance_resource(imgnamedate + '.jpg') img = image.load_img(imagepath, target_size=(224, 224)) x = image.img_to_array(img) x = np.expand_dims(x, axis=0) x = mobilenet.preprocess_input(x) global graph with graph.as_default(): prediction = model.predict(x) prediction = model.predict(x) results = imagenet_utils.decode_predictions(prediction) userimage = imgnamedate + '.jpg' rst = results[0][0][1] return render_template('uploaded.html', title='Success', predictions=rst, user_image=userimage)
def extract_text(): data = {"success": False} params = request.json if (params == None): img_file = request.args.get('img_file') print(f'params: {img_file}') # if parameters are found, return a prediction if (img_file != None): img = cv2.cvtColor(cv2.imread(f'{images_path}/{img_file}'), cv2.COLOR_BGR2RGB) image_r = cv2.resize(img, (224, 224)) image_r = np.expand_dims(preprocess_image(image_r, preprocess_input), axis=0) print(f'Input image: {image_r.shape}') with session.as_default(): with session.graph.as_default(): p = model.predict(image_r) print(f'Predicitiong shape: {p.shape}') mask = preprocess_mask(np.squeeze(p), threshold) mask = cv2.resize(mask, (img.shape[1], img.shape[0])) mask = mask[..., 1] points = approx_polygon(mask) segment = {'data': [], 'points': points, 'mask': mask} if (len(points) == 4): solid_mask = np.zeros((img.shape[0], img.shape[1])) solid_mask = cv2.fillPoly(solid_mask, np.int32([points]), color=255).astype(np.uint8) segment['data'] = crop_segment(img, solid_mask) print(f"Segment shape {segment['data'].shape}") segment_gray_inv = 255 - cv2.cvtColor(segment['data'], cv2.COLOR_RGB2GRAY) text = extract_score(segment_gray_inv, lang='fifa_score') data["extracted_text"] = str(text) data["success"] = True # return a response in json format return jsonify(data)
def upload_file(): if request.method == 'POST': f = request.files['file'] path = os.path.join(app.config['UPLOAD_FOLDER'], f.filename) f.save(path) img = image.load_img(path, target_size=(224, 224)) x = image.img_to_array(img) x = np.expand_dims(x, axis=0) x = mobilenet.preprocess_input(x) global graph with graph.as_default(): prediction = model.predict(x) results = imagenet_utils.decode_predictions(prediction) rst = results[0][0][1] return render_template('uploaded.html', title='Success', predictions=rst, user_image=f.filename)
def home(): form = MyForm() if form.validate_on_submit(): data = { 'projects': [form.data['projects']], "avg_hours": [form.data['avg_hours']], "time": [form.data['time_spend']], 'emp_identity': [int(form.data['emp_identity'])], "emp_role": [int(form.data['emp_role'])], 'percent_remote': [form.data['percent_remote']] } features = pd.DataFrame(data) p = model.predict(features) evaluation_result = p[0] else: evaluation_result = '' return render_template('home/home.html', form=form, evaluation_result=evaluation_result)
def check_all_mailboxes() -> None: logger.info("Routine task: checking all active mailboxes..") all_active_mailboxes = db.session.query(EmailAddress)\ .filter(EmailAddress.active == True)\ .all() if all_active_mailboxes: logger.info("Checking through all active mailboxes") for mailaddr in all_active_mailboxes: try: imap_svr = get_imap_svr(mailaddr.get_email_address()) logger.info("Email: %s -- IMAP: %s"\ , mailaddr.get_email_address(), imap_svr) mailbox = MailBox(imap_svr) mailbox.login(mailaddr.get_email_address()\ , mailaddr.get_decrypted_email_password()) logger.info("Successfully logged in via IMAP") except ConnectionRefusedError: logger.error("Unable to connect to mailbox for %s"\ , mailaddr.get_email_address()) continue last_updated = mailaddr.get_last_updated() \ if mailaddr.get_last_updated() else datetime.today() - timedelta(days=1) mailbox.folder.set("INBOX") logger.info("Fetching mails..") check_criteria = AND(date_gte=last_updated.date(), seen=False) all_mails = mailbox.fetch(check_criteria, reverse=True\ , mark_seen=False, bulk=True) logger.info("Mails fetched..") detection_count = 0 mail_check_count = 0 phishing_mails_detected = [] for mail in all_mails: try: sender = mail.from_ except HeaderParseError: logger.error("HeaderParseError, unparseable msg.from_."\ " Setting sender as INVALID_SENDER") sender = 'INVALID_SENDER' if (check_valid_time(last_updated, mail.date))\ and check_valid_sender(sender, mailaddr.get_email_address()): mail_check_count += 1 mail_item = EmailData(mail.subject, sender, mail.attachments\ , (mail.text + mail.html), mail.headers) mail_item.generate_features() result = model.predict(mail_item.repr_in_arr()) logger.info("Checking mail: %s -- Result: %s"\ , mail_item.get_subject(), result) if result == 1: logger.info("Phishing mail detected, subject: %s"\ , mail.subject) mail_exist = check_p_mail_exist(mailaddr.get_email_id()\ , mail.subject, mail_item.get_content()) if not mail_exist: phishing_mails_detected.append(Mail(sender, \ mail.date.astimezone(timezone('Asia/Singapore')), mail.subject)) detection_count += 1 detected_mail = PhishingEmail( \ sender_address = sender, \ subject = mail.subject, \ content = mail_item.get_content(), \ created_at = datetime.now(), \ receiver_id = mailaddr.get_email_id() ) db.session.add(detected_mail) logger.info("Updating mailbox last updated from %s to %s",\ last_updated.strftime("%d-%m-%Y %H:%M:%S"), datetime.now()) mailaddr.set_last_updated(datetime.now()) mailaddr.set_phishing_mail_detected(detection_count) mailaddr.set_total_mails_checked(mail_check_count) logger.info("Finished checking mails.. logging out") mailbox.logout() db.session.commit() if phishing_mails_detected: logger.info( "Phishing emails detected in automated scan, sending mail!" ) send_phish_check_notice_context(mailaddr.get_email_address()\ , phishing_mails_detected, app) else: logger.info( "No phishing emails detected in automated scan, moving to next.." )
def get(self): host_url = request.url_root image = np.array( requests.get(host_url + "galaxy_api/process_image").json()) pred = model.predict(image) return pred.tolist()
def check_phish(mid): phishing_mails = [] # Retrieves the email address instance logger.info("Click-to-check entered..") owner_id = get_owner_id_from_email_id(mid) if current_user.is_anonymous or not owner_id == current_user.get_id(): logger.warning("Anonymous or unauthorized user attempting"\ " phish check of address ID {}!".format(mid)) return redirect(url_for('index')) mailaddr = get_email_address_by_email_id(mid) # Redirects back to page if selected email is inactive if mailaddr.get_active_status() == False: logger.warning("Redirecting.. User selected inactive email address %s"\ , mailaddr.get_email_address()) flash("Email is inactive!", 'error') return redirect(url_for('dash_email')) logger.info("Mailbox selected is %s", mailaddr.get_email_address()) try: # Logs in to mailbox by retrieving the corresponding IMAP server imap_svr = get_imap_svr(mailaddr.get_email_address()) logger.info("Retrieving IMAP server: %s", imap_svr) mailbox = MailBox(imap_svr) logger.info("Attempting connection..") mailbox.login(mailaddr.get_email_address()\ , mailaddr.get_decrypted_email_password()) logger.info("Connected to mailbox %s", mailaddr.get_email_address()) except ConnectionRefusedError: logger.error("Unable to connect to mailbox for %s", mailaddr.get_email_address()) flash("Unable to connect to mailbox, please update your password!", 'error') return redirect(url_for('dash_email')) # Retrieves date last updated # if new email address is added column is empty # sets last_updated to today - 1 day so that mails in the last 24 hours # are checked last_updated = mailaddr.get_last_updated() \ if mailaddr.get_last_updated() else datetime.today() - timedelta(days=1) # Selects mailbox to Inbox only mailbox.folder.set("INBOX") logger.info("Fetching mails..") # Sets a check criteria so that # only mails newer than last_updated and unread mails are checked check_criteria = AND(date_gte=last_updated.date(), seen=False) """ FOR DEMO USE """ # Test code to intentionally pull retrieve backdated emails for demo purposes # last_updated = datetime(2020, 12,17, 0, 0, 0) # check_criteria = AND(date_gte=[date(2020, 12, 17)], seen=False) # Fetch mails from mailbox based on criteria, does not "read" the mail # and retrieves in bulk for faster performance at higher computing cost all_mails = mailbox.fetch(check_criteria, reverse=True, mark_seen=False, bulk=True) logger.info("Mails fetched..") # Iterates through the mails that are not sent from the sender's address # Creates a EmailData instance for each mail to generate features based on # preprocessing logic, passes it into the model - if predict returns 1 it is a detected phish # appends the detected mail into a list of Mail (phishing_mails) # The purpose of Mail class is for easier display - the values are pulled from the # imap_tool's Mail item instead of our EmailData. # Inserts all phishing mails to the database data = { 'total_count': 0, 'detection_count': 0, 'check_time': datetime.now().strftime('%d-%m-%Y, %H:%M') } mail_check_count = 0 for msg in all_mails: try: sender = msg.from_ except HeaderParseError: # Exception happens when a msg.from_ is malformed resulting in # unparseable values. Automatically assume phishing email and add to record. # Denote Sender as 'INVALID_SENDER' logger.error("HeaderParseError, unparseable msg.from_. \ Setting sender as INVALID_SENDER") sender = 'INVALID_SENDER' if (check_valid_time(last_updated, msg.date)) \ and check_valid_sender(sender, mailaddr.get_email_address()): data['total_count'] += 1 mail_check_count += 1 mail_item = EmailData(msg.subject, sender, msg.attachments\ , (msg.text + msg.html), msg.headers) mail_item.generate_features() result = model.predict(mail_item.repr_in_arr()) logger.info("Checking mail: %s -- Result: %s"\ , mail_item.get_subject(), result) if result == 1: logger.info("Phishing mail detected, subject: %s", msg.subject) mail_exist = check_p_mail_exist(mailaddr.get_email_id()\ , msg.subject, mail_item.get_content()) if not mail_exist: phishing_mails.append(Mail(sender, \ msg.date.astimezone(timezone('Asia/Singapore')), msg.subject)) data['detection_count'] += 1 detected_mail = PhishingEmail( \ sender_address = sender, \ subject = msg.subject, \ content = mail_item.get_content(), \ created_at = datetime.now(), \ receiver_id = mailaddr.get_email_id() ) db.session.add(detected_mail) # Updates last updated to current time mailaddr.set_last_updated(datetime.now()) logger.info("Updating mailbox last updated from %s to %s",\ last_updated.strftime("%d-%m-%Y, %H:%M:%S"), datetime.now()) mailaddr.set_phishing_mail_detected(data['detection_count']) mailaddr.set_total_mails_checked(mail_check_count) db.session.commit() logger.info("Finished checking mails.. logging out") mailbox.logout() send_phish_check_notice(mailaddr.get_email_address(), phishing_mails) mailaddr = get_email_address_by_email_id(mid) mail_address = mailaddr.get_email_address() # return redirect(url_for('dashboard')) return render_template('dashboard/detection_results.html', \ phishing_mails = phishing_mails, data=data, mail_address = mail_address)
def post(self): im_args = im_parser.parse_args() image = np.array(im_args['image']) image = image.reshape((1, ) + image.shape) pred = model.predict(image) return pred.tolist()
def getResult(): input = np.array([[5.5, 2.4, 2.7, 1.]]) result = model.predict(input) return jsonify({'result': str(result)})
def get_prediction(model, lb, le, question): question = clean_question(question) predictions, raw_outputs = model.predict([question]) y = lb.inverse_transform(raw_outputs, threshold=0.1) y = le.inverse_transform(y) return y
def get_prediction(self): data = self.get_history() return model.predict(data)