def test_default_done_callback(app): executor = Executor(app) def callback(future): setattr(future, 'test', 'test') executor.add_default_done_callback(callback) with app.test_request_context('/'): future = executor.submit(fib, 5) concurrent.futures.wait([future]) assert hasattr(future, 'test')
def test_thread_decorator_map(default_app): iterable = list(range(5)) default_app.config['EXECUTOR_TYPE'] = 'thread' executor = Executor(default_app) @executor.job def decorated(n): return fib(n) with default_app.test_request_context(''): results = decorated.map(iterable) for i, r in zip(iterable, results): assert fib(i) == r
def test_thread_decorator_submit(default_app): default_app.config['EXECUTOR_TYPE'] = 'thread' executor = Executor(default_app) @executor.job def decorated(n): return fib(n) with default_app.test_request_context(''): future = decorated.submit(5) assert future.result() == fib(5)
def test_thread_decorator(app): app.config['EXECUTOR_TYPE'] = 'thread' executor = Executor(app) @executor.job def decorated(n): return fib(n) assert type(decorated) == ExecutorJob with app.app_context(): future = decorated.submit(5) assert type(future) == concurrent.futures.Future assert future.result() == fib(5)
def test_pre_init_executor(default_app): executor = Executor() @executor.job def decorated(n): return fib(n) assert executor executor.init_app(default_app) with default_app.test_request_context(''): future = decorated.submit(5) assert future.result() == fib(5)
def test_future_proxy(default_app): executor = Executor(default_app) with default_app.test_request_context(''): future = executor.submit(pow, 2, 4) # Test if we're returning a subclass of Future assert isinstance(future, concurrent.futures.Future) assert isinstance(future, FutureProxy) concurrent.futures.wait([future]) # test standard Future methods and attributes assert future._state == concurrent.futures._base.FINISHED assert future.done() assert future.exception(timeout=0) is None
def test_teardown_appcontext_is_not_called(default_app): default_app.config['EXECUTOR_MAX_WORKERS'] = 1 default_app.config['EXECUTOR_PUSH_APP_CONTEXT'] = False default_app.teardown_appcontext(clear_thread_local) executor = Executor(default_app) with pytest.raises(ValueError): with default_app.test_request_context(): for i in range(2): future = executor.submit(set_thread_local) concurrent.futures.wait([future]) propagate_exceptions_callback(future)
def test_process_decorator(default_app): ''' Using decorators should fail with a TypeError when using the ProcessPoolExecutor ''' default_app.config['EXECUTOR_TYPE'] = 'process' executor = Executor(default_app) try: @executor.job def decorated(n): return fib(n) except TypeError: pass else: assert 0
def test_thread_decorator_submit_stored(default_app): default_app.config['EXECUTOR_TYPE'] = 'thread' executor = Executor(default_app) @executor.job def decorated(n): return fib(n) with default_app.test_request_context(): future = decorated.submit_stored('fibonacci', 35) assert executor.futures.done('fibonacci') is False assert future in executor.futures executor.futures.pop('fibonacci') assert future not in executor.futures
def buildpublish_start(): global executor if executor == None: executor = Executor(current_app) # if build already in progress, dont start another one if executor.futures._state('buildpublish') == 'RUNNING': return jsonify(okay=False, error='Build already in progress') # get server to publish to server = request.form.get('server') or None if server == None: return jsonify(okay=False, error='No server specified to publish to') # start a build-publish executor.submit_stored('buildpublish', buildpublish, server) return jsonify(okay=True)
def buildpublish_result(): global executor if executor == None: executor = Executor(current_app) # if no build occurring, report that if executor.futures._state('buildpublish') == None: return jsonify(okay=False, error='No build currently in progress') # if build already in progress, dont start another one if not executor.futures.done('buildpublish'): return jsonify(okay=True, done=False, status=executor.futures._state('buildpublish')) # if build finished, pop it off executor, and report back executor.futures.pop('buildpublish') return jsonify(okay=True, done=True)
def create_app(): app = Flask(__name__) app.register_blueprint(bp) app.config['EXECUTOR'] = Executor(app) app.config['EXECUTOR_PROPAGATE_EXCEPTIONS'] = True app.config[ 'GRAPH_MESSAGES_ENDPOINT'] = "https://graph.facebook.com/v6.0/me/messages?access_token=%s" % os.getenv( 'PAGE_ACCESS_TOKEN', '') app.config['ES_HEADER'] = [{ "host": os.getenv('ELASTIC_HOST', ''), "port": os.getenv('ELASTIC_PORT', ''), }] print(app.config['ES_HEADER']) ping_es(app) return app
def create_app(): """ This method will create and return the flask REST API application. """ # Instantiate the application app = Flask(__name__, static_url_path="") # Set application configuration app_settings = os.getenv("APP_SETTINGS") app.config["APP_CONFIG"] = app_settings # Register blueprints from .apis import blueprint app.register_blueprint(blueprint) # Shell context for flask cli app.shell_context_processor({"app": app}) # Setup executor executor = Executor(app) return app
def create_app() -> Flask: app = Flask(__name__) container = Container() container.config.from_yaml('config.yml') container.config.github.auth_token.from_env('GITHUB_TOKEN') Container.executor = Executor(app) container.wire(modules=[views]) app.container = container app.add_url_rule('/analyse/json/<id>', 'analyse', views.analyse_json) app.add_url_rule('/upload', 'upload', views.file_upload, methods=['POST']) bootstrap = Bootstrap() bootstrap.init_app(app) # preload neural_net app.container.neural_net() return app
def init_flask(): app = Flask(__name__) app.config['SECRET_KEY'] = str(uuid.uuid4()) executor = Executor(app) auth = HTTPBasicAuth() loader = jinja2.ChoiceLoader([ app.jinja_loader, jinja2.FileSystemLoader( ['services/training/templates', 'services/prediction/templates']), ]) app.jinja_loader = loader @auth.get_password def get_password(username): if username in app.config['USERS']: return app.config['USERS'].get(username) return None return app, executor, auth
def create_app(): user_datastore = SQLAlchemyUserDatastore(sqlalchemy_db, User, Role) app = Flask(__name__) if os.getenv('SEMI_PROD', False): app.config.from_pyfile('{}.py'.format( os.path.join('settings', 'semi_production'))) else: app.config.from_pyfile('{}.py'.format( os.path.join('settings', os.getenv('FLASK_ENV', 'development')))) if 'REVERSE_PROXY_PATH' in app.config: ReverseProxyPrefixFix(app) app.logger.setLevel(logging.DEBUG) app.logger.addHandler(create_logger(app.config['LOG_PATH'])) sqlalchemy_db.init_app(app) Security(app, user_datastore, login_form=ExtendedLoginForm) # register filters app.jinja_env.filters['datetime'] = format_date # Propagate background task exceptions app.config['EXECUTOR_PROPAGATE_EXCEPTIONS'] = True # register blueprints app.register_blueprint(main) app.register_blueprint(collection) app.register_blueprint(verification) app.register_blueprint(token) app.register_blueprint(recording) app.register_blueprint(session) app.register_blueprint(user) app.register_blueprint(application) app.register_blueprint(configuration) app.register_blueprint(shop) app.register_blueprint(mos) app.executor = Executor(app) app.user_datastore = user_datastore return app
def test_sqlalchemy(default_app, caplog): default_app.config['SQLALCHEMY_ENGINE_OPTIONS'] = {'echo_pool': 'debug', 'echo': 'debug'} default_app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:' default_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False default_app.config['EXECUTOR_PUSH_APP_CONTEXT'] = True default_app.config['EXECUTOR_MAX_WORKERS'] = 1 db = flask_sqlalchemy.SQLAlchemy(default_app) def test_db(): return list(db.session.execute('select 1')) executor = Executor(default_app) with default_app.test_request_context(): for i in range(2): with caplog.at_level('DEBUG'): caplog.clear() future = executor.submit(test_db) concurrent.futures.wait([future]) future.result() assert 'checked out from pool' in caplog.text assert 'being returned to pool' in caplog.text
def test_add_done_callback(default_app): """Exceptions thrown in callbacks can't be easily caught and make it hard to test for callback failure. To combat this, a global variable is used to store the value of an exception and test for its existence. """ executor = Executor(default_app) global exception exception = None with default_app.test_request_context(''): future = executor.submit(time.sleep, 0.5) def callback(future): global exception try: executor.submit(time.sleep, 0) except RuntimeError as e: exception = e future.add_done_callback(callback) concurrent.futures.wait([future]) assert exception is None
def test_default_executor(): app = Flask(__name__) executor = Executor(app) with app.app_context(): executor.submit(fib, 5) assert type(executor._executor) == concurrent.futures.ThreadPoolExecutor
def test_init(): app = Flask(__name__) executor = Executor(app) assert 'executor' in app.extensions
def test_factory_init(): app = Flask(__name__) executor = Executor() executor.init_app(app) assert 'executor' in app.extensions
import os # 添加自己编写的算法 from Import_data import mainf from read_data import read_xml from DWPB_denoising import DWPB # Web应用程序设置 from flask_executor import Executor import pandas as pd from threading import Thread from sqlalchemy import create_engine from flask import Flask application = Flask(__name__) executor = Executor(application) # 获取mysql环境变量 env = os.environ # MYSQL_URI mysql+pymysql://test:[email protected]:3306/test mysql_uri = env.get('MYSQL_URI') sqlEngine = create_engine(mysql_uri, pool_recycle=3600) print ('=== mysql uri: ' + mysql_uri) # rest api(应用执行端口) @application.route('/') def hello(): executor.submit(threaded_task,'data') return b'mainf '
def test_submit(app): executor = Executor(app) future = executor.submit(fib, 5) assert future.result() == fib(5)
def test_max_workers(app): EXECUTOR_MAX_WORKERS = 10 app.config['EXECUTOR_MAX_WORKERS'] = EXECUTOR_MAX_WORKERS executor = Executor(app) assert executor._executor._max_workers == EXECUTOR_MAX_WORKERS
import logging from typing import List from flask import current_app as app from flask import abort from flask_executor import Executor from .git import GitFile, GitPushEventHandler, DiffNotFoundError from wbsync.synchronization import GraphDiffSyncAlgorithm, OntologySynchronizer from wbsync.triplestore import WikibaseAdapter from .webhook import WebHook from .uris_factory import HerculesURIsFactory EXECUTOR = Executor(app) LOGGER = logging.getLogger(__name__) WEBHOOK = WebHook(app, endpoint='/postreceive', key=app.config['WEBHOOK_SECRET']) @WEBHOOK.hook() def on_push(data): try: LOGGER.info("Got push with: %s", data) git_handler = GitPushEventHandler(data, app.config['GITHUB_OAUTH']) ontology_files = _extract_ontology_files(git_handler, 'ttl', None) LOGGER.info("Modified files: %s", ontology_files) except DiffNotFoundError: LOGGER.info("There was no diff to synchronize.") return 200, 'No diff'
self._fields = [] # self._include = [] self._exclude = [] self._set_fields() self.__prune_fields() def _set_fields(self): pass def __prune_fields(self): columns = inspect(self.__class__).columns if not self._fields: all_columns = set(columns.keys()) self._fields = list(all_columns - set(self._exclude)) def hide(self, *args): for key in args: self._fields.remove(key) return self def keys(self): return self._fields def __getitem__(self, key): return getattr(self, key) asynchronous_executor = Executor(name='asynchronous') transfor_executor = Executor(name='transfer')
future = executor.submit(vector_detect_process, action, g.session, g.src_file, g.test_file, g.form.uuids.data, **kwargs) future.add_done_callback(async_callback) ticket = g.session['ticket'] return make_response( { 'type': 'deferred', 'ticket': ticket, 'statusUri': "/jobs/status?ticket={ticket}".format(ticket=ticket) }, 202) # FLASK ROUTES executor = Executor() bp = Blueprint('vector', __name__, url_prefix='/vector') bp.before_request(_before_requests) bp.teardown_request(clean_working_path) @bp.route('/embed/fictitious', methods=['POST']) def embed_fictitious(): """**Flask POST rule**. Embed fictitious entries in the dataset. --- post: summary: Embed fictitious entries. description: Embed fictitious entries in the dataset, according to the given unique key. tags:
def test_process_executor_init(default_app): default_app.config['EXECUTOR_TYPE'] = 'process' executor = Executor(default_app) assert isinstance(executor._executor, concurrent.futures.ProcessPoolExecutor)
from flask import Flask, redirect, url_for from flask_executor import Executor from flask import render_template from send_email import send_email, send_email_2 from scraper import main_scraper from flask import send_file from flask import request import json import time import csv import os ################################# app = Flask(__name__) executor = Executor(app) ####Global Code################# chk = [] # MONITOR = True caution = None ###Global variable for sending response notification def start_monitor(): monitor.submit(monitor_stocks, 5) count = 0 def sensor():
def test_default_executor_init(default_app): executor = Executor(default_app) assert isinstance(executor._executor, concurrent.futures.ThreadPoolExecutor)