def create_app(model): app = Flask(__name__) app.config.from_object(__name__) app.model = model @app.route('/') def index(): return jsonify(app.model.as_json()) @app.route('/classify', methods=['GET', 'POST']) def classify(): with tempfile.NamedTemporaryFile() as image_file: image_path = image_file.name if request.method == 'POST': if 'image' not in request.files: return jsonify({'error': 'missing image file'}) image_file.write(request.files['image'].read()) image_file.flush() else: if 'image' not in request.args: return jsonify({'error': 'missing image url'}) stored.sync(request.args['image'], image_path) predictions = app.model.classify_image(image_path) return jsonify(predictions) @app.route('/services/ping') def services_ping(): return jsonify(ping='pong') return app
def create_app() -> Flask: app = Flask(__name__) app.config.from_mapping( SECRET_KEY=os.environ.get('SECRET_KEY') or 'dev_key') db_url = os.environ.get('DATABASE_URL') if not db_url: user = os.environ.get('POSTGRES_USER') pw = os.environ.get('POSTGRES_PW') url = os.environ.get('POSTGRES_URL') _db = os.environ.get('POSTGRES_DB') db_url = f'postgresql+psycopg2://{user}:{pw}@{url}/{_db}' app.config['SQLALCHEMY_DATABASE_URI'] = db_url app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db.init_app(app) from . import recommender_api app.register_blueprint(recommender_api.bp) with app.app_context(): app.model = RecommenderModel() app.model.load_product_vectors('product_vectors') return app
def build_app(data_provider: typing.Optional[GordoBaseDataProvider] = None): """ Build app and any associated routes """ app = Flask(__name__) app.config.from_object(Config()) app.register_blueprint(views.base_blueprint) app.register_blueprint(views.anomaly_blueprint) app.wsgi_app = adapt_proxy_deployment(app.wsgi_app) # type: ignore app.url_map.strict_slashes = False # /path and /path/ are ok. @app.before_request def _reg_data_provider(): g.data_provider = data_provider @app.before_request def _start_timer(): g.start_time = timeit.default_timer() @app.after_request def _log_time_taken(response): runtime_s = timeit.default_timer() - g.start_time logger.debug(f"Total runtime for request: {runtime_s}s") response.headers["Server-Timing"] = f"request_walltime_s;dur={runtime_s}" return response with app.app_context(): app.model, app.metadata = load_model_and_metadata( app.config["MODEL_LOCATION_ENV_VAR"] ) return app
def create_app(): """Create webapp. Factory for webapp. Returns ------- app : flask.app.Flask Flask app object. """ app = Flask(__name__) app.model = Model.load() Bootstrap(app) CORS(app) # Serve assets locally for privacy reasons app.extensions['bootstrap']['cdns']['jquery'] = StaticCDN() app.extensions['bootstrap']['cdns']['bootstrap'] = StaticCDN() from .views import main as main_blueprint app.register_blueprint(main_blueprint) return app
def run(): app = Flask(__name__.split('.')[0]) app.register_blueprint(api.blueprint) app.config['PROFILE'] = True app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[10]) app.model = models.AlexNet() app.run(host='0.0.0.0', debug=True, port=5067, threaded=False, use_reloader=True)
def create_app(model_config): app = Flask(__name__) app.model_config = model_config app.model = None CORS(app) def get_model(model_name): return model_config.load_model(model_name) @app.route('/models') def index(): body = model_config.models() return jsonify(body) @app.route('/models/<string:model_name>/query/<string:word>') def query(model_name, word): count = int(request.args.get('count', 30)) similarWords = get_model(model_name).word_embedding.wv.similar_by_word(word, topn=count) body = [{"label": result[0], "value": result[1]} for result in similarWords] return jsonify(body) @app.route('/models/<string:model_name>/info') def info(model_name): return get_model(model_name).collectionInfo.toJson() @app.route('/models/<string:model_name>/reliability') def reliability(model_name): body = [{"section": elem['section'], "nr_total": elem["nr_total"], "nr_correct": elem["nr_correct"]} for elem in get_model(model_name).accuracy] return jsonify(body) @app.route('/models/<string:model_name>/keywordMapping', methods=['POST']) def keywordMapping(model_name): data = request.get_json() mapping = get_model(model_name).keywordMapping(data['keywords'], data['left'], data['right']) body = {'mapping': list(mapping)} return jsonify(body) @app.route('/models/<string:model_name>/modelInfo') def modelInfo(model_name): return get_model(model_name).modelInfo.toJson() @app.route('/analogies', methods=['POST']) def generateAnalogies(model_name): data = [{'x':'x', 'y':'y', 'score':0.75}, {'x':'x2', 'y':'y2', 'score':0.33}] return jsonify(data) return app
def create_app(config_name): app = Flask(__name__, instance_relative_config=True) app.config.from_object(config_by_name[config_name]) obj = PriceModel.load_model() app.model = obj.get('model', None) app.score = obj.get('score', None) app.error = obj.get('error', None) bootstrap = Bootstrap(app) # Register blueprints app.register_blueprint(api_blueprint) app.register_blueprint(app_blueprint) return app
def create_app(): dir_path = Path( os.path.dirname(os.path.realpath(__file__)) ) # get the path to the directory in which run_app.py resides logs_path = dir_path / "logs/logging.yaml" initialize_logging(logs_path, dir_path=dir_path) # load and configure logging app = Flask(__name__) app.logger.info("Initializing a Flask app...") # Determine path to the model file based on location of this file model_path = dir_path / 'model_training/models/sentiment_dense_nn.keras' app.model = load_nn_model(model_path) app.register_blueprint(ml_model_bp) return app
def create_app(app_name='SWEETTWEET'): app = Flask(app_name) # load config: twilio attributes app.config.from_object('sweettweet.config.BaseConfig') # allow cross-origin for api routes cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) # load api routes from sweettweet.api import api app.register_blueprint(api, url_prefix="/api") # load LSTM model from sweettweet.services.lstm_model import LstmModel app.model = LstmModel() app.graph = tf.get_default_graph() return app
def build_app(data_provider: typing.Optional[GordoBaseDataProvider] = None): """ Build app and any associated routes """ app = Flask(__name__) app.config.from_object(Config()) api.init_app(app) api.add_resource(PredictionApiView, "/prediction") api.add_resource(MetaDataView, "/metadata", "/healthcheck") api.add_resource(DownloadModel, "/download-model") app.wsgi_app = adapt_proxy_deployment(app.wsgi_app) # type: ignore app.url_map.strict_slashes = False # /path and /path/ are ok. @app.before_request def _reg_data_provider(): g.data_provider = data_provider with app.app_context(): app.model, app.metadata = load_model_and_metadata( app.config["MODEL_LOCATION_ENV_VAR"]) return app
def create_app(database_credentials): """Creates a flask application. :param database_credentials: the database credentials. :return: the app """ app = Flask(__name__) from yetiserver.database import connect_to_database app.db = connect_to_database(database_credentials) from yetiserver.authentication import auth_manager_from_redis_connection app.auth = auth_manager_from_redis_connection(app.db) from yetiserver.model import model_manager_from_redis_conn app.model = model_manager_from_redis_conn(app.db) from yetiserver.model_log import model_log_from_redis_conn app.model_log = model_log_from_redis_conn(app.db) from yetiserver.api.v1 import register_api_blueprints register_api_blueprints(app) return app
def create_app(config=None): """Create and configure an instance of the Flask application.""" app = Flask(__name__, static_folder=CLIENT_ROOT) if config is not None: for key, val in config.items(): app.config[key] = val # load dataset if app.config['DATASET'] == 'diabetes': app.dataset = load_diabetes_dataset() elif app.config['DATASET'] == 'german-credit': app.dataset = load_german_credit_dataset() else: raise NotImplementedError # load model app.model = PytorchModelManager(app.dataset, model_name=app.config['MODEL']) app.dir_manager = app.model.dir_manager try: app.model.load_model() except FileNotFoundError: app.model.train() app.model.save_model() app.model.save_reports() app.dir_manager.clean_subset_cache() # init engine app.cf_engine = CFEnginePytorch(app.dataset, app.model) app.register_blueprint(page) app.register_blueprint(api, url_prefix='/api') CORS(app, resources={r"/api/*": {"origins": "*"}}) return app
import numpy as np import os import sys import pickle from flask import Flask, request, render_template from sklearn.linear_model import LogisticRegression from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer, HashingVectorizer vectorizer_filepath = './model/vec_bow.pickle' model_filepath = './model/logit.pickle' app = Flask(__name__) app.vectorizer = pickle.load(open(vectorizer_filepath, 'rb')) app.model = pickle.load(open(model_filepath, 'rb')) app.score = 0 app.text = '' def preprocess_text_ignore_non_letters(text): russian_letters = 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя' text = text.lower().strip() text = ''.join(char for char in text if char in russian_letters or char.isspace()) text = [word for word in text.split() if word[0] != '@' and word] text = ' '.join(text) return text def predict_score(text): text_bow = app.vectorizer.transform([text]) return int(100 * app.model.predict_proba(text_bow)[0, 1])
from geopy import geocoders import dill #dill.settings["recurse"] = True import requests from bokeh.plotting import figure from bokeh.embed import components import pandas as pd app = Flask(__name__) app.vars = {} with open("knn_model.pkl", "rb") as f: app.model = dill.load(f) @app.route('/') def main(): return redirect('/index') @app.route('/index', methods=['GET', 'POST']) def index(): if request.method=='GET': return render_template('index.html') else: app.vars["street"] = request.form["street"] app.vars["city"] = request.form["city"] app.vars["state"] = request.form["state"] app.vars["numberofblightviolationtickets"] = request.form["numberofblightviolationtickets"] app.vars["numberofcrimes"] = request.form["numberofcrimes"]
class SSHNet(object): def __init__(self): cfg_from_file('SSH/configs/wider.yml') cfg.GPU_ID = 0 # Loading the network caffe.set_mode_gpu() caffe.set_device(0) self.net = caffe.Net('SSH/models/test_ssh.prototxt', 'SSH-FL-OHEM-ver2_iter_40000.caffemodel', caffe.TEST) self.net.name = 'SSH' def start_tornado(app, port=8880): http_server = tornado.httpserver.HTTPServer( tornado.wsgi.WSGIContainer(app)) http_server.listen(port) tornado.ioloop.IOLoop.instance().start() if __name__ == '__main__': app.ssh = SSHNet() app.vgg = VGG16(weights='imagenet') app.x = app.vgg.get_layer('fc2').output app.prediction = Dense(2, activation='softmax', name='predictions')(app.x) app.model = Model(input=app.vgg.input, outputs=app.prediction) app.model.load_weights("./gender/pretrained_weight.hdf5") start_tornado(app)
def create_app(): app = Flask(__name__) app.logger.setLevel('INFO') # cache model for prediction app.model = None @app.route('/') def hello_world(): app.logger.info('asdfghjk') return 'Hello, World!' @app.route('/gdp-per-capita', methods=['POST']) def predict_gpd_per_capita(): req = request.json app.logger.info(req) if not req: return make_response( ({'message': 'unable to parse request'}, 400, {'Content-Type': 'application/json'}) ) # 2 mandatory params plus 5 discovered if req and len(req) < 7: rsp = make_response( jsonify({'message': 'params are missing'}), 409) rsp.headers['Content-Type'] = 'application/json' return rsp if app.model is None: app.model = load_prediction_model() prediction_data = prepare_prediction_data(req) predicted_result = app.model.predict(prediction_data) return {'gdpPerCapita': predicted_result[0]} def prepare_prediction_data(jsonData): ''' PCPI Inflation, average consumer prices PCPIE Inflation, end of period consumer prices LUR Unemployment rate LE Employment LP Population ''' return [[ jsonData['PCPI'], jsonData['PCPIE'], jsonData['LUR'], jsonData['LE'], jsonData['LP']]] def load_prediction_model(): file_name = 'filename5.joblib' return load(file_name) return app
app.gameFactors = load('gameFactors.npy') app.cats, app.mechs, app.gameNorm = category_and_mechanic_table(app.gameData) app.transformer = StandardScaler().fit(app.gameNorm) cNorm = [] outData = app.transformer.transform(app.gameNorm) for i in app.gameNorm: j = 1. / sqrt(float(dot(i, i))) cNorm.append(list(map(lambda x: j * x, i))) app.gameNorm = cNorm del cNorm app.model = lode(open('gamescoremodel', 'rb')) @app.route('/') def main(): return render_template('landing.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/user', methods=['GET', 'POST']) def user(): '''
if event == same_event: return event if __name__ == '__main__': if len(sys.argv) != 2 and len(sys.argv) != 3: print('Usage: server.py config.toml [--generate_events=false]') sys.exit(1) if len(sys.argv) == 2: print('Generating event data...') generate_events() cfg = sys.argv[1] app.idx = metapy.index.make_inverted_index(cfg) app.fidx = metapy.index.make_forward_index(cfg) app.events = [] app.mapping = {} for d_id in range(0, app.idx.num_docs()): event = Event(app.idx, d_id) if event not in app.events: app.mapping[str(d_id)] = event app.events.append(event) else: app.mapping[str(d_id)] = get_event(event) app.sorted_events = sorted(list(app.events)) app.searcher = Searcher(app.idx) app.model = TopicModeler(app.fidx, app.events) app.run(debug=True, use_reloader=False)
class Model(object): def __init__(self, validator, predictor): self.validator = validator self.predictor = predictor def __call__(self, args): if self.validator(args): return self.predictor(args) else: raise ValueError( "I don't know how to score the args (%r) you supplied" % args) app.model = None @app.route('/') def index(): return "Make a prediction by POSTing to /predict" @app.route('/predict', methods=['POST']) def predict(): import json args = cPloads(base64.b64decode(request.form['args'])) try: return json.dumps(app.model(args)) except ValueError as ve: return str(ve)
from flask import Flask, redirect, url_for, request, jsonify from PIL import Image from flasgger import Swagger from nima.inference.inference_model import InferenceModel app = Flask(__name__) Swagger(app=app) app.model = InferenceModel.create_model() @app.route('/') def index(): return redirect(url_for('health_check')) @app.route('/api/health_check') def health_check(): return "ok" @app.route('/api/get_scores', methods=['POST']) def get_scores(): """ NIMA Pytorch --- tags: - Get Scores consumes:
parser = argparse.ArgumentParser() parser.add_argument('--model_path', type=str, default=os.path.join('weights', 'base_rnn.ckpt'), help="Path RNN model weights") parser.add_argument('--normalizer_path', type=str, default=os.path.join('weights', f'data_normalizer.pkl'), help="Path to sklearn Normalizer") parser.add_argument('--debug', type=boolean_string, default=True, help="Use debug mode") parser.add_argument('--port', type=int, default=5000, help="The port of the webserver. Defaults to `5000`") parser.add_argument('--use_gpu', type=boolean_string, default=True, help="Use gpu for training and inference") parser.add_argument('--hparams', type=str, default=os.path.join('weights', 'rnn_hparams.json'), help="Path to model hparams") return parser.parse_args() if __name__ == '__main__': args = get_args() if os.path.isfile(args.model_path) and os.path.isfile(args.normalizer_path): # load model if exist app.model = LstmModel.load_from_checkpoint(checkpoint_path=args.model_path) app.preprocessor = joblib.load(args.normalizer_path) else: app.model = None app.preprocessor = None try: app.hparams = json.load(open(args.hparams)) except FileNotFoundError: print("To start app need hparams file") app.rnn_task_queue = create_queue() # using queue to avoid predict/train conflicts and gpu out of memory error app.queue_thread = create_thread(app.rnn_task_queue) # thread to process queue app.queue_thread.start() app.gpus = args.use_gpu app.run(debug=args.debug, port=args.port)
import os from flask import Flask, render_template, request, redirect, url_for, send_from_directory from werkzeug import secure_filename import cPickle as pkl from single_img_processing import PreprocessPredict app = Flask(__name__) app.config['UPLOAD_FOLDER'] = 'static/uploads/' app.config['ALLOWED_EXTENSIONS'] = set( ['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif']) app.model = None def load_pickle(): print 'Loading Model...' app.model = pkl.load(open('model.pkl', 'rb')) def predict_watch_type(img): return app.model.predict() def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] @app.route('/') def index():
def create_app(model_config): app = Flask(__name__) app.model_config = model_config app.model = None CORS(app) if app.debug: app.wsgi_app = DebuggedApplication(app.wsgi_app, evalex=True) @app.route('/selectModel', methods=['POST']) def set_model(): data = request.get_json() app.model = model_config.load_model(data['model_name']) return '{}' @app.route('/models') def index(): body = model_config.models() return jsonify(body) @app.route('/query/<string:word>') def query(word): count = int(request.args.get('count', 50)) similarWords = app.model.word_embedding.wv.similar_by_word(word, topn=count) body = [{"label": result[0], "value": result[1]} for result in similarWords] return jsonify(body) @app.route('/info') def info(): return app.model.collectionInfo.toJson() @app.route('/reliability') def reliability(): body = [] for elem in app.model.accuracy: try: percentage = (elem["nr_correct"]/elem["nr_total"]) * 100 except ZeroDivisionError: percentage = 0 body.append({"section": elem['section'], "nr_total": elem["nr_total"], "nr_correct": elem["nr_correct"], "percentage": round(percentage, 2)}) return jsonify(body) @app.route('/keywordMapping', methods=['POST']) def keywordMapping(): data = request.get_json() keywords, oov_kw = app.model.filterNonVocabWords(data['keywords']) left_axis, oov_left = app.model.filterNonVocabWords(data['left']) right_axis, oov_right = app.model.filterNonVocabWords(data['right']) oov = oov_kw + oov_left + oov_right if len(right_axis)==0 or len(left_axis)==0: mapping = [] else: mapping = app.model.keywordMapping(keywords, left_axis, right_axis).tolist() body = {'keywords': keywords, 'mapping': mapping, 'oov': oov} return jsonify(body) @app.route('/modelInfo') def modelInfo(): return app.model.modelInfo.toJson() @app.route('/analogies', methods=['POST']) def generateAnalogies(): data = request.get_json()['wordpair'] analogies = app.model.generate_analogies(data['a'], data['b'], 3000) analogies = analogies[[data['a'], data['b'], 'score']] analogies = analogies[analogies['score']>=0.15][:25] analogies['score'] = analogies['score'].round(4) analogies.rename(columns={data['a']: 'x', data['b']: 'y'}, inplace=True) return jsonify(analogies.to_dict(orient='records')) return app
labels_json = codecs.open(config_dir + '/labels.json', 'r', encoding).read() app.labels = json.loads(labels_json) # Cubes Model print 'Loading Cubes Model...' cubes_conn = httplib.HTTPConnection( app.appjson.get('cubes_hostname') or 'localhost', app.appjson.get('cubes_port') or 5000 ) try: cubes_conn.request('GET', "/model") except Exception, e: raise Exception('Failed to access the Cubes server...(' + str(e) + ')') app.model = json.loads( cubes_conn.getresponse().read().decode(encoding) ) # Cubes Dimensions app.dimensions = [] for name, elem in app.model.get('dimensions').items(): levels = [] for level_name in elem.get('levels').keys(): if level_name == 'default': break else: levels.append(str(level_name)) app.dimensions.append( CubesDimension( name = str(name), label = app.labels.get('dimensions').get(name) or name, levels = levels
from __future__ import unicode_literals import json from flask import Flask, request from sklearn.externals import joblib from settings import MODEL_FILENAME app = Flask("Fraud Detection") # load model at startup time app.model = joblib.load(MODEL_FILENAME) @app.route(u"/predict", methods=[u"POST"]) def predict_fraud(): input_data = request.get_json() if u"features" not in input_data: return json.dumps({u"error": u"No features found in input"}), 400 if not input_data[u"features"] or not isinstance(input_data[u"features"], list): return json.dumps({u"error": u"No feature values available"}), 400 if isinstance(input_data[u"features"][0], list): results = app.model.predict_proba(input_data[u"features"]).tolist() else: results = app.model.predict_proba([input_data[u"features"]]).tolist() return json.dumps({u"scores": [result[1] for result in results]}), 200
'''Controller dell'applicazione web 'Insegnamenti' Formattazione salva righe per i lucidi! @author: posenato''' import logging from flask import Flask, request from flask.templating import render_template from Model import Model logging.basicConfig(level=logging.DEBUG) app = Flask(__name__) # Applicazione Flask! app.jinja_env.line_statement_prefix = '#' # attivo Line statements in JINJA app.model = Model() app.facolta = app.model.getFacolta("Scienze Matematiche Fisiche e Naturali") @app.route('/') def homePage(): '''Home page deve presentare form per la scelta corso studi e anno accademico tra i corsi della facoltà di Scienze MM FF NN.''' corsiStudi = app.model.getCorsiStudi(app.facolta['id']) aA = app.model.getAnniAccademici(app.facolta['id']) return render_template('homepage.html', facolta=app.facolta, corsiStudi=corsiStudi, aa=aA, prova="<b>prova</b>") @app.route('/insegnamenti', methods=['POST', 'GET']) def insegnamenti(): '''Elenco degli insegnamenti di un corso di studi in un a.a.''' if request.method == 'POST': idCorsoStudi = request.form['idCorsoStudi']
from lib.hardcode import TOP_ITEMS from lib.logger import configure_logger from lib.product_store_features import ProductStoreStats from lib.recommender import CatBoostRecommenderWithPopularFallback, cols from lib.utils import read_products_file, pickle_load logger = configure_logger(logger_name='server', log_dir='') logger.info('starting to load all stuff') ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) config = TrainConfig.from_json('configs/config.json') app = Flask(__name__) app.products_data = read_products_file(config.products_enriched_file) app.model = catboost.CatBoost() app.model.load_model(config.catboost.model_file) app.item_vectors = pickle_load(config.implicit.vectors_file) with open(config.implicit.model_file, 'rb') as f: app.implicit_model = pickle.load(f) app.product_store_stats = ProductStoreStats() app.recommender = CatBoostRecommenderWithPopularFallback( model=app.model, implicit_model=app.implicit_model, item_vectors=app.item_vectors, products_data=app.products_data, product_store_stats=app.product_store_stats, feature_names=cols, )
from flask import Flask, current_app from flask import jsonify, redirect, url_for, escape from flask import request, session from flask import g as Globals from actions import update app = Flask(__name__, static_url_path='') app.model = 0 @app.route('/<path:path>') def static_proxy(path): return app.send_static_file(path) @app.route('/') def index(): return app.send_static_file('index.html') @app.route('/api', methods=['POST']) def api(): blob = request.get_json() app.model = update(blob, app.model) return jsonify({ 'model': app.model })
from gevent import monkey from flask import Flask, request from gevent import pywsgi import data_set import numpy as np import time from face_model_based_facenet_and_fcnet import ModelBasedFaceNetAndFcNet monkey.patch_all() # os.environ["CUDA_VISIBLE_DEVICES"] = "" #不使用GPU app = Flask(__name__) app.config['JSON_AS_ASCII'] = False app.model = ModelBasedFaceNetAndFcNet( 'E:/vscodeworkspace/FaceRecognition/FaceRecognitionCore/models/20170512-110547/20170512-110547.pb', "E:/vscodeworkspace/FaceRecognition/FaceRecognitionCore/models/facenet_based_face_model_fc/") @app.route('/hello') def response_request(): return "hi" @app.route('/restore', methods=['GET']) def restore(): app.model.load() return "success" @app.route('/predict', methods=['POST'])
import time from datetime import datetime from keras.models import load_model from keras.preprocessing import image from keras.models import model_from_json import numpy as np from PIL import Image import keras app = Flask(__name__) json_file = open('model-covers.json', 'r') loaded_model_json = json_file.read() json_file.close() app.model = model_from_json(loaded_model_json) app.rtsp_url = os.getenv('RTSP_URL') # load weights into new model app.model.load_weights("model_covers_saved.h5") logging.info("Loaded model") @app.route('/covers-status', methods=['GET']) def cover_status(): ret_val = 'unknown' rtsp_url = app.rtsp_url
# Setup mysql app.config['MYSQL_USER'] = '******' app.config['MYSQL_PASSWORD'] = '******' app.config['MYSQL_DB'] = 'insights' app.config[ 'MYSQL_HOST'] = 'jobsensedb.chhnlo3xihgd.ap-southeast-1.rds.amazonaws.com' app.config['MYSQL_CURSORCLASS'] = 'DictCursor' app.mysql = MySQL(app) print("MySQL connection established.") # Initialize heavy loading content, esp. the machine learning model # load model app.pre_model = spremodel_load(common.BASE_MODEL_PATH + common.PRE_MODEL_FILE_MAIN) app.model = smodel_load(common.BASE_MODEL_PATH + common.MODEL_FILE) print("All model loading done. Your API service are now ready~") # Create restful api flask app, add all resources require api = Api(app) api.prefix = '/predictions' api.add_resource(JobAdPerformance, '/job-ad-performance') @app.route('/healthcheck') def healthcheck(): """Health check endpoint""" return jsonify({'status': 'healthy'})
import json import pickle from flask import Flask import joblib import shap from geopy.geocoders import Nominatim with open("data/options.json", "r") as f: options = json.load(f) app = Flask(__name__) app.model_columns = options["column order"] app.categorical_features = options["categorical features"] app.model = joblib.load("data/model.joblib") app.feature_names = app.model.steps[0][1].get_feature_names() app.explainer = shap.TreeExplainer(app.model.steps[1][1]) app.nominatim = Nominatim(user_agent="nyc_rent_estimator") app.all_hands_on_deck = pickle.load(open("data/geopy_mock_response.pkl", "rb")) app.ti_enabled = False from app import views
import pandas as pd from flask import Flask, jsonify, request from estimationService import GetResources from collections import OrderedDict app = Flask(__name__) app.model = GetResources.getModel() @app.route('/predict', methods=['GET']) def predict(): # Store the query-string argument in the arg_list variable and then convert its format to pandas dataframe arg_list = request.args.to_dict(flat=False) query_df = pd.DataFrame.from_dict(OrderedDict(arg_list)) print(arg_list) for feat in query_df.columns: if isinstance(query_df[feat], object): query_df[feat] = query_df[feat].str.replace(",", "").astype("float64") else: query_df[feat] = query_df[feat].astype("float64") # For each model, generated the estimated value. try: print(query_df) predict_val = app.model.predict(query_df).astype('float64')[0] return jsonify(predict=str(predict_val)) except Exception as ex: template = "An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) print(message) return "Some input parameters are missing " + message
from flask import Flask, request, Response from model import load_model, create_csv app = Flask(__name__) model = load_model() @app.route('/predict', methods=['GET', 'POST']) def predict(): # data has to be a list of dict with a required field "dialogId" data = request.get_json(silent=True) # y_pred has to be a (n, 2) list (1st column corresponds to Alice's scores) y_pred = app.model.predict(data) csv = create_csv(data, y_pred) return Response(csv, mimetype='text/csv', headers={ 'Content-disposition': 'attachment; filename=predictions.csv' }) if __name__ == '__main__': app.model = load_model() app.run(host='0.0.0.0')
import os from flask import Flask, render_template, request, redirect, url_for, send_from_directory from werkzeug import secure_filename import cPickle as pkl from single_img_processing import PreprocessPredict app = Flask(__name__) app.config['UPLOAD_FOLDER'] = 'static/uploads/' app.config['ALLOWED_EXTENSIONS'] = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif']) app.model = None def load_pickle(): print 'Loading Model...' app.model = pkl.load(open('model.pkl', 'rb')) def predict_watch_type(img): return app.model.predict() def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] @app.route('/') def index(): return render_template('index.html', prediction='This is an example of a casual watch', img_path='static/uploads/audemar.png') @app.route('/upload', methods=['POST'])