コード例 #1
0
 def run(self, ngrok=False) -> None:
     if ngrok:
         run_with_ngrok(self._app)
     self._app.run()
コード例 #2
0
ファイル: server.py プロジェクト: arbaz52/darts-python-server
    def startWebServer(self):
        self.log("INFO", "Starting web server")
        app = Flask("Python server")
        run_with_ngrok(app)
        self.app = app

        @app.route("/logs")
        def logs():
            try:
                s = ""
                with open("log.txt", 'r') as fp:
                    for line in fp.readlines():
                        s += line + "<br>"
                    return make_response(s, 200)
            except:
                return make_response("No logs", 200)

        @app.route("/start")
        def start():
            self.startProcessingFrames()
            self.log("START", "Starting server")
            return make_response("Starting server", 200)

        @app.route("/stop")
        def stop():
            self.stopProcessingFrames()
            self.log("STOP", "Stopping server")
            return make_response("Stopping server", 200)

        @app.route("/updatep")
        def updatePreprocessingValues():
            self.log("UPDATE", "Updating preprocessing values")
            self.loadPreprocessingValuesFromWeb()
            return make_response("Updating preprocessing values", 200)

        #server commands to stop processing thread
        @app.route("/startfp")
        def startfp():
            self.startProcessingFrames()
            return make_response("Done!", 200)

        @app.route("/stopfp")
        def stopfp():
            self.stopProcessingFrames()
            return make_response("Done!", 200)

        @app.route("/camera/<cameraId>")
        def sendProcessedFrame(cameraId):
            with self.lock:
                if self.xo == 1:
                    self.log("HAP", "Value changed")
                    self.xo = 0

            if cameraId not in list(self.cameras.keys()):
                self.log("WARN", "Camera doesn't exist")
                return make_response("camera doesn't exist", 404)

            #self.log("INFO", "sending processing frame")
            return Response(
                self.gen(cameraId),
                mimetype='multipart/x-mixed-replace; boundary=frame')

        #self.app.run(host='0.0.0.0', port=self.SERVER_PORT, debug=True,

#threaded=True, use_reloader=False)
        self.app.run()
コード例 #3
0
from flask import Flask, render_template, request
from flask_ngrok import run_with_ngrok
import nltk
from keras.models import load_model
from nltk.stem import WordNetLemmatizer

lemmatizer = WordNetLemmatizer()

# chat initialization
model = load_model("static/models/chatbot_model.h5")
intents = json.loads(open("intents.json").read())
words = pickle.load(open("words.pkl", "rb"))
classes = pickle.load(open("classes.pkl", "rb"))

chatbot_app = Flask(__name__)
run_with_ngrok(chatbot_app)

# @chatbot_app.route("/")
# def home():
#     return render_template("ChatBot.html")


@chatbot_app.route("/get", methods=["POST"])
def chatbot_response():
    msg = request.form["msg"]
    if msg.startswith('my name is'):
        name = msg[11:]
        ints = predict_class(msg, model)
        res1 = getResponse(ints, intents)
        res = res1.replace("{n}", name)
    elif msg.startswith('hi my name is'):
コード例 #4
0
def main():
    print('Web-panel is started!')
    db_session.global_init()
    address = run_with_ngrok(app)
    app.run()
コード例 #5
0
from flask import Flask, jsonify, request, send_file
from flask_ngrok import run_with_ngrok
from PIL import Image
import time
server = Flask(__name__)
run_with_ngrok(server)
from sightings import Sightings
import predictor
import uuid
import json
import socket
print(socket.gethostbyname(socket.getfqdn(socket.gethostname())))

sightings = Sightings()


@server.route("/still-data", methods=["POST"])
def load_still():
    rtime = time.time()
    data = request.files.get('still')
    img = Image.open(data.stream)
    sid = str(uuid.uuid1().int)
    img.save("stills/" + sid + ".png")
    jsondata = json.loads(request.form['json'])
    num_geese, bbox = predictor.infer(sid)
    if num_geese > 0:
        sightings.add(sid, rtime, jsondata["location"], jsondata["telemetry"],
                      num_geese, bbox)
    return "Success", 200

コード例 #6
0
#predictions
predictions = model.predict(X_test)

#metrics
from sklearn.metrics import accuracy_score
print(accuracy_score(y_test, predictions)*100)

predictions = model.predict([[22.0, 1, 0, 7.2500, 1, 0, 1, 0, 1]])
predictions

!pip install flask-ngrok

from flask_ngrok import run_with_ngrok
from flask import Flask, jsonify
app = Flask(__name__)
run_with_ngrok(app) #starts ngrok when app is running
@app.route("/<int:Age>/<int:SibSp>/<int:Parch>/<float:Fare>/<Gender>/<int:PClass>/<Place>")
def home(Age, SibSp, Parch, Fare, Gender, PClass, Place):
  p = []
  p += [Age, SibSp, Parch, Fare]
  if Gender.casefold() == "m" :
    p+=[1]
  else:
    p+=[0]
  if PClass == 2:
    p+=[1,0]
  elif PClass == 3:
    p+=[0,1]
  else:
    p+=[0,0]
  if Place.casefold() == "queenstown":
コード例 #7
0
ファイル: web.py プロジェクト: vsemecky/ganimator-flask
# System info
@app.route("/sysinfo")
def sysinfo():
    sysinfo_data = {
        'cpu_cores': 2,
        'cpu_threads': 4,
        'ram': 24,
        'gpu_ram': 16,
        'gpu_name': "V100",
        'hdd_space': "100",
        'hdd_space_free': "30",
        'Running in Colab': IN_COLAB,
    }
    return flask.render_template('sysinfo.html', sysinfo=sysinfo_data)


@app.route("/api/add-image/<seed>")
def add_image(seed):
    project.add_image(seed)
    return redirect(url_for('images'))


if __name__ == "__main__":
    if IN_COLAB:
        print("Colab: YES")
        run_with_ngrok(app)  # In Google Colab run with Ngrok
    else:
        print("Colab: No")

    app.run()
コード例 #8
0
ファイル: app.py プロジェクト: daniele22/food-recognition
app.config['UPLOAD_EXTENSIONS'] = ['.jpg', '.png']
app.config[
    'UPLOAD_FOLDER'] = './food-recognition/static/uploads'  # The same path in prediction.html !!!
#app.config['MODEL_PATH'] = 'model/mask_rcnn_food-challenge_0026.h5'
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
app.secret_key = "thisisasupersecretkey"
app.config['SECRET_KEY'] = "thisisasupersecretkey"

app.config.update(
    DROPZONE_REDIRECT_VIEW='prediction',  # set redirect view
    DROPZONE_MAX_FILES=20,
)

# creation of a dropzone
dropzone = Dropzone(app)
run_with_ngrok(app)  # comment if not using colab

# instantiation of detector
detector = Detector()


#========================= UTILS FUN ======================================#
def validate_image(stream):
    """
    check if is a valid image
    """
    header = stream.read(512)
    stream.seek(0)
    format = imghdr.what(None, header)
    if not format:
        return None
コード例 #9
0
import hashlib  # secure hashes and message digests
from time import ctime
import json
import requests
import urllib.parse
from flask import Flask, redirect, url_for, session, request, jsonify
from flask_oauthlib.client import OAuth
from flask_ngrok import run_with_ngrok
from flask import request
from flask import render_template
import ntplib
import ssl
ssl._create_default_https_context = ssl._create_unverified_context

flaskApp = Flask(__name__)
run_with_ngrok(flaskApp)
database = "database.db"
flaskApp.config['GOOGLE_ID'] = "937632115184-49gtf8rtirdg5000g0eiie325k25mpvg.apps.googleusercontent.com"
flaskApp.config['GOOGLE_SECRET'] = "4f_qgKmV4vTwALuWlaLdV6n5"
flaskApp.debug = True
flaskApp.secret_key = 'development'
oauth = OAuth(flaskApp)

google = oauth.remote_app(
    'google',
    consumer_key=flaskApp.config.get('GOOGLE_ID'),
    consumer_secret=flaskApp.config.get('GOOGLE_SECRET'),
    request_token_params={
        'scope': 'email'
    },
    base_url='https://www.googleapis.com/oauth2/v1/',
コード例 #10
0
ファイル: main.py プロジェクト: Shagrat912/tasks
from flask import Flask, request
import flask_ngrok
import logging

# библиотека, которая нам понадобится для работы с JSON
import json

# создаём приложение
# мы передаём __name__, в нем содержится информация,
# в каком модуле мы находимся.
# В данном случае там содержится '__main__',
# так как мы обращаемся к переменной из запущенного модуля.
# если бы такое обращение, например,
# произошло внутри модуля logging, то мы бы получили 'logging'
app = Flask(__name__)
flask_ngrok.run_with_ngrok(app)

# Устанавливаем уровень логирования
logging.basicConfig(level=logging.INFO)

# Создадим словарь, чтобы для каждой сессии общения
# с навыком хранились подсказки, которые видел пользователь.
# Это поможет нам немного разнообразить подсказки ответов
# (buttons в JSON ответа).
# Когда новый пользователь напишет нашему навыку,
# то мы сохраним в этот словарь запись формата
# sessionStorage[user_id] = {'suggests': ["Не хочу.", "Не буду.", "Отстань!" ]}
# Такая запись говорит, что мы показали пользователю эти три подсказки.
# Когда он откажется купить слона,
# то мы уберем одну подсказку. Как будто что-то меняется :)
sessionStorage = {}
コード例 #11
0
from flask import Flask, render_template, redirect, request
import os
import sqlite3
import config as con
from work_with_db import db_session
from forms.user import RegisterForm, LoginForm
from work_with_db.Users import User
from flask_login import LoginManager, login_user
from locations import location_forest, location_caves, location_fields, attack
from flask_ngrok import run_with_ngrok


con.app = Flask(__name__)
run_with_ngrok(con.app)
con.app.config['SECRET_KEY'] = 'yandexlyceum_secret_key'
login_manager = LoginManager()
login_manager.init_app(con.app)
item = ''


@login_manager.user_loader
def load_user(user_id):
    db_sess = db_session.create_session()
    return db_sess.query(User).get(user_id)


@con.app.route('/', methods=['GET', 'POST'])
@con.app.route('/log_in', methods=['GET', 'POST'])
def login():
    con.hero.name = None
    form = LoginForm()
コード例 #12
0
ファイル: main.py プロジェクト: manas-verma/stock-trading
def run_app():
    if get_arg() in ('colab', 'google-colab', 'google', 'notebook'):
        flask_ngrok.run_with_ngrok(app)
        app.app.run()
    else:
        app.app.run(threaded=True, host='0.0.0.0', port=5000)
コード例 #13
0
import json
import time

from flask import Flask, jsonify, request
from flask_restful import Resource, Api
from flask_restful import reqparse
from flask_ngrok import run_with_ngrok

os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'

flags = tf.flags
FLAGS = flags.FLAGS

serverapp = Flask(__name__)
api = Api(serverapp)
run_with_ngrok(serverapp)  # Start ngrok when app is run

tokenizer = ""
estimator = ""
basic_tokenizer = ""
global eval_examples, eval_features
eval_features = list()
eval_examples = list()

## Required parameters
flags.DEFINE_string(
    "bert_config_file", "HanBert-54kN-MRC/bert_config.json",
    "The config json file corresponding to the pre-trained BERT model. "
    "This specifies the model architecture.")

flags.DEFINE_string("vocab_file", "HanBert-54kN-MRC/vocab_54k.txt",
コード例 #14
0
import os

from flask_ngrok import run_with_ngrok
from flask import Flask, render_template, request, send_from_directory, jsonify
from werkzeug.utils import secure_filename

from fastai.vision import *

app = Flask(__name__)
run_with_ngrok(app)  # Start ngrok when app is run
UPLOAD_DIR = '../uploads'
RESULT_DIR = 'results'
MAPPING = {
    0: '၀',
    1: '၁',
    2: '၂',
    3: '၃',
    4: '၄',
    5: '၅',
    6: '၆',
    7: '၇',
    8: '၈',
    9: '၉'
}


def create_folders():
    if not os.path.isdir(UPLOAD_DIR):
        os.makedirs(UPLOAD_DIR)
    if not os.path.isdir(RESULT_DIR):
        os.makedirs(RESULT_DIR)
コード例 #15
0
ファイル: flask_app.py プロジェクト: PolkovnikovPavel/all_web
from flask import Flask, request
from flask_ngrok import run_with_ngrok
import logging

import json

app = Flask(__name__)
run_with_ngrok(app)  # для того что бы можно было подключится по https
# потому что в Webhook URL должна быть указана
# схема https, а http не работает

logging.basicConfig(level=logging.INFO)

sessionStorage = {}


@app.route('/post', methods=['POST'])
def main():
    logging.info(f'Request: {request.json!r}')

    response = {
        'session': request.json['session'],
        'version': request.json['version'],
        'response': {
            'end_session': False
        }
    }

    handle_dialog(request.json, response)
    logging.info(f'Response:  {response!r}')
コード例 #16
0
        parser.add_argument('nX')


        args = parser.parse_args()  # creates dict
        print(args.values())
        X_new = np.fromiter(args.values(), dtype=float)  # convert input to array
        out = {'Prediction': BIO_MODEL.predict([X_new])[0]}
        print(out)
        return out, 200

if __name__ == '__main__':
	# serialize model
	#joblib.dump(clf, 'biodeg.mdl')

	app = Flask(__name__)
	run_with_ngrok(app)   #starts ngrok when the app is run only for google colab
	API = Api(app)
	BIO_MODEL = joblib.load('./biodeg.mdl')
	@app.route("/")
	def home():
    		return "<!DOCTYPE html>\
    		<html>\
    		<body>\
    		<h2>HTML Forms</h2>\
   		 <form method='post' action='/predict'>\
    		<label for='SpMax_L'>SpMax_L:</label><br>\
   		 <input type='text' id='SpMax_L' name='SpMax_L' value=''><br>\
    		<label for='J_Dz(e)'>J_Dz(e):</label><br>\
    		<input type='text' id='J_Dz(e)' name='J_Dz(e)' value=''><br>\
    		<label for='nHM'>nHM:</label><br>\
     		<input type='text' id='nHM' name='nHM' value=''><br>\
コード例 #17
0
def main():
    run_with_ngrok(app)
    app.run()
コード例 #18
0
import torch
from flask_ngrok import run_with_ngrok
import flask
from flask import Flask, request, render_template
import json
from transformers import BartTokenizer, BartForConditionalGeneration, BartConfig
from transformers import T5Tokenizer, T5ForConditionalGeneration, T5Config

BART_PATH = 'bart-large'
T5_PATH = 't5-base'
# BART_PATH = 'model/bart'
# T5_PATH = 'model/t5'

app = Flask(__name__)
# ngrok for this to work in Google collab - https://medium.com/@kshitijvijay271199/flask-on-google-colab-f6525986797b
run_with_ngrok(app)  #starts ngrok when the app is run
bart_model = BartForConditionalGeneration.from_pretrained(BART_PATH,
                                                          output_past=True)
bart_tokenizer = BartTokenizer.from_pretrained(BART_PATH, output_past=True)

t5_model = T5ForConditionalGeneration.from_pretrained(T5_PATH)
t5_tokenizer = T5Tokenizer.from_pretrained(T5_PATH)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')


def bart_summarize(input_text, num_beams=4, num_words=50):
    input_text = str(input_text)
    input_text = ' '.join(input_text.split())
    input_tokenized = bart_tokenizer.encode(input_text,
                                            return_tensors='pt').to(device)
    summary_ids = bart_model.generate(input_tokenized,
コード例 #19
0
ファイル: just_ship_it.py プロジェクト: gstaff/just-ship-it
def ship_it():
    app = Flask(__name__)
    run_with_ngrok(app)
    app.config.SWAGGER_UI_DOC_EXPANSION = 'list'
    app.config.SWAGGER_UI_REQUEST_DURATION = True

    hostfile = sys.argv[0]
    is_running_in_notebook = 'ipykernel_launcher.py' in hostfile

    hash = hashlib.md5(open(hostfile, 'rb').read()).hexdigest()
    filename = os.path.basename(hostfile)
    api = Api(app,
              version=hash,
              title=f'{filename} API',
              description=f'API for {filename} as of {datetime.now()}',
              ordered=True)

    # ns = api.namespace('functions', description='Call these')
    # utility namespace? for dumping files etc?

    def call_with_request_data(f):
        @wraps(f)
        def wrapper(self):  # Discards the self passed in
            data = request.get_json(force=True)
            return f(*[v for k, v in data.items()])

        return wrapper

    def make_class(name, f):
        function_with_input = call_with_request_data(f)
        new_class = type(name, (Resource, ), {
            "post": function_with_input,
        })
        return new_class

    def add_resource(name, f):
        resource = make_class(name, f)
        api.route(f'/{name}', methods=['POST'])(resource)
        parameter_names = f.__code__.co_varnames
        # TODO: default values, example values, args, kwargs
        # TODO: See how fire gets parameter types etc https://flask-restplus.readthedocs.io/en/stable/swagger.html#documenting-the-fields
        # TODO: use api.param? or keep all in json? query params have size limits but are bookmarkable, but I guess
        # TODO: autogen a requests script to query server from python?
        # TODO: Validation
        # TODO: Better error messages, see fire trace @api.response(404, 'User not found.') api.abort(404)
        # TODO: working with file uploads?
        # TODO: Hot reloading???
        # TODO: Use type hints to guess field type if available
        # TODO: Use return doc to specify output shape @api.marshal_with(model) for output shape

        sig = signature(f)
        model = api.model(
            f'{name}_parameters', {
                n: fields.String(required=True, example=f"<{n}>")
                for n in parameter_names
            })
        api.doc(description=str(sig), body=model)(resource)

    if is_running_in_notebook:
        calling_module = sys.modules['__main__']
    else:
        calling_module = importlib.import_module(
            os.path.basename(hostfile)[:-3])
    functions = inspect.getmembers(calling_module, inspect.isfunction)
    for fn_name, fn in functions:
        if os.path.basename(fn.__code__.co_filename) == os.path.basename(hostfile)\
                or (is_running_in_notebook and '<ipython-input' in str(fn.__code__.co_filename)):
            add_resource(fn_name, fn)
    app.run()
コード例 #20
0
ファイル: app.py プロジェクト: rkruser/ai4all-umd-2020
            return jsonify(results=results)

        except:
            app.logger.debug("Error: %s", traceback.print_exc())
            return jsonify("invalid image url")

    elif request.method == 'POST':
        try:
            file = request.files['file']
            app.logger.debug('file uploaded - %s', file)
            url = request.form.get("url", None)
            app.logger.debug('url provided - %s', url)

            input_tensor = transform_image(read_file(upload=file, url=url))
            values, indices = get_topk(input_tensor)
            results = render_prediction(values, indices)
            return jsonify(results=results)

        except:
            app.logger.debug("Error: %s", traceback.print_exc())
            return jsonify("invalid image")

    else:
        app.logger.debug("Error: %s", traceback.print_exc())
        return jsonify('invalid request')


if __name__ == '__main__':
    run_with_ngrok(app)
    app.run()
コード例 #21
0
ファイル: app.py プロジェクト: johnnywong30/PubMix
#
#   Johnny Wong
#   Flask App for PubMix
#
#########################
import os

from flask import Flask, render_template, redirect, url_for, session, request, flash, get_flashed_messages
from flask_ngrok import run_with_ngrok

from util import db

# instantiate Flask Object
app = Flask(__name__)
app.secret_key = os.urandom(32)
run_with_ngrok(app) # start ngrok when app run

DB_FILE = "data/pubmix.db"
db.create_db()

@app.route("/")
def login():
    '''Login Page'''
    return render_template('index.html')

@app.route("/register")
def register():
    '''Register Page'''
    return render_template('register.html')

@app.route("/authenticate", methods=['POST'])