def run(host: Host, port: Port) -> None: # pragma: nocover """ Run the current app. """ from apistar.cli import get_current_app app = get_current_app() try: if not is_running_from_reloader(): click.echo('Starting up...') run_simple(host, port, app.wsgi, use_reloader=True, use_debugger=True, extra_files=['app.py']) except KeyboardInterrupt: pass
def exception_handler(environ: wsgi.WSGIEnviron, exc: Exception) -> http.Response: if isinstance(exc, exceptions.Found): return http.Response('', exc.status_code, {'Location': exc.location}) if isinstance(exc, exceptions.APIException): if isinstance(exc.detail, str): content = {'message': exc.detail} else: content = exc.detail return http.Response(content, exc.status_code) if is_running_from_reloader() or environ.get('APISTAR_RAISE_500_EXC'): raise message = traceback.format_exc() return http.Response(message, 500, {'Content-Type': 'text/plain; charset=utf-8'})
def start_http_server(self, port, host='0.0.0.0', endpoint='/metrics'): """ Start an HTTP server for exposing the metrics. This will be an individual Flask application, not the one registered with this class. :param port: the HTTP port to expose the metrics endpoint on :param host: the HTTP host to listen on (default: `0.0.0.0`) :param endpoint: the URL path to expose the endpoint on (default: `/metrics`) """ if is_running_from_reloader(): return app = Flask('prometheus-flask-exporter-%d' % port) self.register_endpoint(endpoint, app) def run_app(): app.run(host=host, port=port) thread = threading.Thread(target=run_app) thread.setDaemon(True) thread.start()
log_level = logging.DEBUG # Keep stream handler for Werkzeug logging.getLogger().setLevel(log_level) # Init logging for UDP / TCP server file_handler = add_file_handler('DestruckServer.log', parent='DestruckServer', level=log_level, filemode='a') stream_handler = add_stream_handler(parent='DestruckServer', level=log_level) # Only stream my logs but store https server logs (werkzeug) logging.getLogger('werkzeug').addHandler(file_handler) # Python anywhere import create_app from wsgi app, socketio, udpServer = create_app('dev') # Also add handler to Flask's logger for cases where Werkzeug isn't used as the underlying WSGI server. # app.logger.addHandler(file_handler) port = int(os.environ.get('PORT', 5000)) # Wrap with socketIO socketio.run(app, host='0.0.0.0', port=port, debug=True) # Closing gracefully UDP server if (not is_running_from_reloader()): udpServer.stop()
def return_app_and_start_background(): if not is_running_from_reloader(): background.start() logger.info('STARTING BACKGROUND PROCESS') return app
"""the entry point for the app""" import threading from werkzeug.serving import is_running_from_reloader from app.models import Stats from app import app, db import temp @app.shell_context_processor def make_shell_context(): """remember to add new db tables!""" return {'db': db, 'Stats': Stats} if not is_running_from_reloader(): threading.Thread(target=temp.temp_record_deamon).start() threading.Thread(target=temp.temp_cleanup_deamon).start()
def create_app(): app = Flask(__name__, static_url_path='/') def store_configuration(): global serverconfig p = multiprocessing.Process(target=multiprocessing_func, args=(serverconfig, )) p.start() p.join() # Update MD5 hashes from file, since data was written in separate process serverconfig.updateMd5HashFromFiles() serverconfig.postStore() sched = BackgroundScheduler(daemon=True) sched.add_job(store_configuration, 'interval', seconds=5) sched.start() def interrupt(): print('cancelling LED thread') global ledThread ledThread.cancel() ledThread.join() print('LED thread cancelled') @app.after_request def add_header(response): response.cache_control.max_age = 0 return response @app.route('/') def home(): return redirect("./index.html", code=302) @app.route('/<path:path>') def send_js(path): return send_from_directory('resources', path) @app.route('/slot/<int:slotId>/nodes', methods=['GET']) def slot_slotId_nodes_get(slotId): global proj fg = proj.getSlot(slotId) nodes = [node for node in fg._filterNodes] return jsonpickle.encode(nodes) @app.route('/slot/<int:slotId>/node/<nodeUid>', methods=['GET']) def slot_slotId_node_uid_get(slotId, nodeUid): global proj fg = proj.getSlot(slotId) try: node = next(node for node in fg._filterNodes if node.uid == nodeUid) return jsonpickle.encode(node) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>', methods=['DELETE']) def slot_slotId_node_uid_delete(slotId, nodeUid): global proj fg = proj.getSlot(slotId) try: node = next(node for node in fg._filterNodes if node.uid == nodeUid) fg.removeEffectNode(node.effect) return "OK" except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>', methods=['UPDATE']) def slot_slotId_node_uid_update(slotId, nodeUid): global proj fg = proj.getSlot(slotId) if not request.json: abort(400) try: node = next(node for node in fg._filterNodes if node.uid == nodeUid) # data = json.loads(request.json) print(request.json) node.effect.updateParameter(request.json) return jsonpickle.encode(node) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>/parameter', methods=['GET']) def slot_slotId_node_uid_parameter_get(slotId, nodeUid): global proj fg = proj.getSlot(slotId) try: node = next(node for node in fg._filterNodes if node.uid == nodeUid) return json.dumps(node.effect.getParameter()) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>/effect', methods=['GET']) def node_uid_effectname_get(slotId, nodeUid): global proj fg = proj.getSlot(slotId) try: node = next(node for node in fg._filterNodes if node.uid == nodeUid) return json.dumps(getFullClassName(node.effect)) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node', methods=['POST']) def slot_slotId_node_post(slotId): global proj fg = proj.getSlot(slotId) if not request.json: abort(400) full_class_name = request.json[0] parameters = request.json[1] print(parameters) module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) instance = class_(**parameters) node = fg.addEffectNode(instance) return jsonpickle.encode(node) @app.route('/slot/<int:slotId>/connections', methods=['GET']) def slot_slotId_connections_get(slotId): global proj fg = proj.getSlot(slotId) connections = [con for con in fg._filterConnections] return jsonpickle.encode(connections) @app.route('/slot/<int:slotId>/connection', methods=['POST']) def slot_slotId_connection_post(slotId): global proj fg = proj.getSlot(slotId) if not request.json: abort(400) json = request.json connection = fg.addNodeConnection(json['from_node_uid'], int(json['from_node_channel']), json['to_node_uid'], int(json['to_node_channel'])) return jsonpickle.encode(connection) @app.route('/slot/<int:slotId>/connection/<connectionUid>', methods=['DELETE']) def slot_slotId_connection_uid_delete(slotId, connectionUid): global proj fg = proj.getSlot(slotId) try: connection = next(connection for connection in fg._filterConnections if connection.uid == connectionUid) fg.removeConnection(connection.fromNode.effect, connection.fromChannel, connection.toNode.effect, connection.toChannel) return "OK" except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/configuration', methods=['GET']) def slot_slotId_configuration_get(slotId): global proj fg = proj.getSlot(slotId) config = jsonpickle.encode(fg) return config @app.route('/slot/<int:slotId>/configuration', methods=['POST']) def slot_slotId_configuration_post(slotId): global proj if not request.json: abort(400) newGraph = jsonpickle.decode(request.json) if not isinstance(newGraph, filtergraph.FilterGraph): raise RuntimeError("Not a FilterGraph") proj.setFiltergraphForSlot(slotId, newGraph) return "OK" @app.route('/effects', methods=['GET']) def effects_get(): childclasses = inheritors(effects.Effect) return jsonpickle.encode([child for child in childclasses]) @app.route('/effect/<full_class_name>/description', methods=['GET']) def effect_effectname_description_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) return class_.getEffectDescription() @app.route('/effect/<full_class_name>/args', methods=['GET']) def effect_effectname_args_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) argspec = inspect.getargspec(class_.__init__) if argspec.defaults is not None: argsWithDefaults = dict( zip(argspec.args[-len(argspec.defaults):], argspec.defaults)) else: argsWithDefaults = dict() result = argsWithDefaults.copy() if argspec.defaults is not None: result.update({ key: None for key in argspec.args[1:len(argspec.args) - len(argspec.defaults)] }) # 1 removes self result.update({ key: default_values[key] for key in default_values if key in result }) print(result) return jsonify(result) @app.route('/effect/<full_class_name>/parameter', methods=['GET']) def effect_effectname_parameters_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) return json.dumps(class_.getParameterDefinition()) @app.route('/effect/<full_class_name>/parameterHelp', methods=['GET']) def effect_effectname_parameterhelp_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) return json.dumps(class_.getParameterHelp()) def getModuleAndClassName(full_class_name): module_name, class_name = full_class_name.rsplit(".", 1) if (module_name != "audioled.audio" and module_name != "audioled.effects" and module_name != "audioled.devices" and module_name != "audioled.colors" and module_name != "audioled.audioreactive" and module_name != "audioled.generative" and module_name != "audioled.input" and module_name != "audioled.panelize"): raise RuntimeError("Not allowed") return module_name, class_name def getFullClassName(o): module = o.__class__.__module__ if module is None or module == str.__class__.__module__: return o.__class__.__name__ else: return module + '.' + o.__class__.__name__ def inheritors(klass): subclasses = set() work = [klass] while work: parent = work.pop() for child in parent.__subclasses__(): if child not in subclasses: subclasses.add(child) work.append(child) return subclasses @app.route('/errors', methods=['GET']) def errors_get(): result = {} for error in errors: result[error.node.uid] = error.message return json.dumps(result) @app.route('/project/activeSlot', methods=['POST']) def project_activeSlot_post(): global proj if not request.json: abort(400) value = request.json['slot'] # print("Activating slot {}".format(value)) proj.activateSlot(value) return "OK" @app.route('/project/activeSlot', methods=['GET']) def project_activeSlot_get(): global proj return jsonify({'slot': proj.activeSlotId}) @app.route('/project/assets/<path:path>', methods=['GET']) def project_assets_get(path): global serverconfig global proj asset = serverconfig.getProjectAsset(proj.id, path) return send_file(asset[0], attachment_filename=asset[1], mimetype=asset[2]) @app.route('/project/assets', methods=['POST']) def project_assets_post(): global serverconfig global proj if 'file' not in request.files: print("No file in request") abort(400) file = request.files['file'] if file.filename == '': print("File has no filename") abort(400) if file and '.' in file.filename and file.filename.rsplit( '.', 1)[1].lower() in ['gif']: print("Adding asset to proj {}".format(proj.id)) filename = serverconfig.addProjectAsset(proj.id, file) return jsonify({'filename': filename}) print("Unknown content for asset: {}".format(file.filename)) abort(400) @app.route('/projects', methods=['GET']) def projects_get(): global serverconfig return jsonify(serverconfig.getProjectsMetadata()) @app.route('/projects', methods=['POST']) def projects_post(): global serverconfig if not request.json: abort(400) title = request.json.get('title', '') description = request.json.get('description', '') metadata = serverconfig.createEmptyProject(title, description) return jsonify(metadata) @app.route('/projects/import', methods=['POST']) def projects_import_post(): global serverconfig if not request.json: abort(400) metadata = serverconfig.importProject(request.json) return jsonify(metadata) @app.route('/projects/<uid>/export', methods=['GET']) def projects_project_export(uid): global serverconfig proj = serverconfig.getProject(uid) if proj is not None: print("Exporting project {}".format(uid)) return jsonpickle.encode(proj) abort(404) @app.route('/projects/<uid>', methods=['DELETE']) def projects_project_delete(uid): global serverconfig serverconfig.deleteProject(uid) return "OK" @app.route('/projects/activeProject', methods=['POST']) def projects_activeProject_post(): global serverconfig global proj if not request.json: abort(400) uid = request.json['project'] print("Activating project {}".format(uid)) try: proj = serverconfig.activateProject(uid) except Exception as e: print("Error opening project: {}".format(e)) if serverconfig._activeProject is None: serverconfig.initDefaultProject() abort( 500, "Could not active project. No other project found. Initializing default." ) else: abort(500, "Project could not be activated. Reason: {}".format(e)) return "OK" @app.route('/configuration', methods=['GET']) def configuration_get(): global serverconfig return jsonify({ 'parameters': serverconfig.getConfigurationParameters(), 'values': serverconfig.getFullConfiguration() }) @app.route('/configuration', methods=['UPDATE']) def configuration_put(): global serverconfig if not request.json: abort(400) for key, value in request.json.items(): serverconfig.setConfiguration(key, value) return jsonify(serverconfig.getFullConfiguration()) @app.route('/remote/brightness', methods=['POST']) def remote_brightness_post(): global device value = int(request.args.get('value')) floatVal = float(value / 100) print("Setting brightness: {}".format(floatVal)) device.setBrightness(floatVal) return "OK" @app.route('/remote/favorites/<id>', methods=['POST']) def remote_favorites_id_post(id): filename = "favorites/{}.json".format(id) global proj if os.path.isfile(filename): with open(filename, "r") as f: fg = jsonpickle.decode(f.read()) proj.setFiltergraphForSlot(proj.activeSlotId, fg) return "OK" else: print("Favorite not found: {}".format(filename)) abort(404) def processLED(): global proj global ledThread global event_loop global last_time global current_time global errors global count global record_timings dt = 0 try: with dataLock: last_time = current_time current_time = timer() dt = current_time - last_time count = count + 1 if event_loop is None: event_loop = asyncio.new_event_loop() asyncio.set_event_loop(event_loop) proj.update(dt, event_loop) proj.process() # clear errors (if any have occured in the current run, we wouldn't reach this) errors.clear() except filtergraph.NodeException as ne: print("NodeError in {}: {}".format(ne.node.effect, ne)) errors.clear() errors.append(ne) except Exception as e: print("Unknown error: {}".format(e)) traceback.print_tb(e.__traceback__) finally: # Set the next thread to happen real_process_time = timer() - current_time timeToWait = max(POOL_TIME, 0.01 - real_process_time) if count == 100: if record_timings: proj.getSlot(proj.activeSlotId).printProcessTimings() proj.getSlot(proj.activeSlotId).printUpdateTimings() print("Process time: {}".format(real_process_time)) print("Waiting {}".format(timeToWait)) count = 0 ledThread = threading.Timer(timeToWait, processLED, ()) ledThread.start() def startLEDThread(): # Do initialisation stuff here global ledThread global last_time global current_time # Create your thread current_time = timer() ledThread = threading.Timer(POOL_TIME, processLED, ()) print('starting LED thread') ledThread.start() # Initiate if is_running_from_reloader() is False: startLEDThread() # When you kill Flask (SIGTERM), clear the trigger for the next thread atexit.register(interrupt) return app
def create_app(): app = Flask(__name__, static_url_path='/') def interrupt(): print('cancelling LED thread') global ledThread ledThread.cancel() ledThread.join() print('LED thread cancelled') @app.after_request def add_header(response): response.cache_control.max_age = 0 return response @app.route('/<path:path>') def send_js(path): return send_from_directory('resources', path) @app.route('/nodes', methods=['GET']) def nodes_get(): global fg nodes = [node for node in fg._filterNodes] return jsonpickle.encode(nodes) @app.route('/node/<nodeUid>', methods=['GET']) def node_uid_get(nodeUid): global fg try: node = next(node for node in fg._filterNodes if node.uid == nodeUid) return jsonpickle.encode(node) except StopIteration: abort(404, "Node not found") @app.route('/node/<nodeUid>', methods=['DELETE']) def node_uid_delete(nodeUid): global fg try: node = next(node for node in fg._filterNodes if node.uid == nodeUid) fg.removeEffectNode(node.effect) return "OK" except StopIteration: abort(404, "Node not found") @app.route('/node/<nodeUid>', methods=['UPDATE']) def node_uid_update(nodeUid): global fg if not request.json: abort(400) try: node = next(node for node in fg._filterNodes if node.uid == nodeUid) #data = json.loads(request.json) print(request.json) node.effect.updateParameter(request.json) return jsonpickle.encode(node) except StopIteration: abort(404, "Node not found") @app.route('/node/<nodeUid>/parameter', methods=['GET']) def node_uid_parameter_get(nodeUid): global fg try: node = next(node for node in fg._filterNodes if node.uid == nodeUid) return json.dumps(node.effect.getParameter()) except StopIteration: abort(404, "Node not found") @app.route('/node', methods=['POST']) def node_post(): global fg if not request.json: abort(400) full_class_name = request.json[0] parameters = request.json[1] print(parameters) module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) instance = class_(**parameters) node = fg.addEffectNode(instance) return jsonpickle.encode(node) @app.route('/connections', methods=['GET']) def connections_get(): global fg connections = [con for con in fg._filterConnections] return jsonpickle.encode(connections) @app.route('/connection', methods=['POST']) def connection_post(): global fg if not request.json: abort(400) json = request.json connection = fg.addNodeConnection(json['from_node_uid'], int(json['from_node_channel']), json['to_node_uid'], int(json['to_node_channel'])) return jsonpickle.encode(connection) @app.route('/connection/<connectionUid>', methods=['DELETE']) def connection_uid_delete(connectionUid): global fg try: connection = next(connection for connection in fg._filterConnections if connection.uid == connectionUid) fg.removeConnection(connection.fromNode.effect, connection.fromChannel, connection.toNode.effect, connection.toChannel) return "OK" except StopIteration: abort(404, "Node not found") @app.route('/effects', methods=['GET']) def effects_get(): childclasses = inheritors(effects.Effect) return jsonpickle.encode([child for child in childclasses]) @app.route('/effect/<full_class_name>/args', methods=['GET']) def effect_effectname_args_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) argspec = inspect.getargspec(class_.__init__) argsWithDefaults = dict( zip(argspec.args[-len(argspec.defaults):], argspec.defaults)) result = argsWithDefaults.copy() result.update({ key: None for key in argspec.args[1:len(argspec.args) - len(argspec.defaults)] }) # 1 removes self result.update({ key: default_values[key] for key in default_values if key in result }) print(result) return jsonify(result) @app.route('/effect/<full_class_name>/parameter', methods=['GET']) def effect_effectname_parameters_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) return json.dumps(class_.getParameterDefinition()) def getModuleAndClassName(full_class_name): module_name, class_name = full_class_name.rsplit(".", 1) if module_name != "audioled.audio" and module_name != "audioled.effects" and module_name != "audioled.devices" and module_name != "audioled.colors" and module_name != "audioled.audioreactive" and module_name != "audioled.generative" and module_name != "audioled.input": raise RuntimeError("Not allowed") return module_name, class_name def inheritors(klass): subclasses = set() work = [klass] while work: parent = work.pop() for child in parent.__subclasses__(): if child not in subclasses: subclasses.add(child) work.append(child) return subclasses @app.route('/errors', methods=['GET']) def errors_get(): result = {} for error in errors: result[error.node.uid] = error.message return json.dumps(result) @app.route('/configuration', methods=['GET']) def configuration_get(): config = jsonpickle.encode(fg) return config @app.route('/configuration', methods=['POST']) def configuration_post(): global fg if not request.json: abort(400) fg = jsonpickle.decode(request.json) return "OK" @app.route('/remote/brightness', methods=['POST']) def remote_brightness_post(): global device value = int(request.args.get('value')) floatVal = float(value / 100) print("Setting brightness: {}".format(floatVal)) device.setBrightness(floatVal) return "OK" @app.route('/remote/favorites/<id>', methods=['POST']) def remote_favorites_id_post(id): filename = "favorites/{}.json".format(id) global fg if os.path.isfile(filename): with open(filename, "r") as f: fg = jsonpickle.decode(f.read()) return "OK" else: print("Favorite not found: {}".format(filename)) abort(404) def processLED(): global fg global ledThread global event_loop global last_time global current_time global errors dt = 0 try: with dataLock: last_time = current_time current_time = timer() dt = current_time - last_time if event_loop is None: event_loop = asyncio.new_event_loop() asyncio.set_event_loop(event_loop) fg.update(dt, event_loop) fg.process() # clear errors (if any have occured in the current run, we wouldn't reach this) errors.clear() except filtergraph.NodeException as ne: print("NodeError: {}".format(ne)) errors.clear() errors.append(ne) except Exception as e: print("Unknown error: {}".format(e)) finally: #fg.printProcessTimings() # Set the next thread to happen real_process_time = timer() - current_time timeToWait = max(POOL_TIME, 0.01 - real_process_time) ledThread = threading.Timer(timeToWait, processLED, ()) ledThread.start() def startLEDThread(): # Do initialisation stuff here global ledThread global last_time global current_time # Create your thread current_time = timer() ledThread = threading.Timer(POOL_TIME, processLED, ()) print('starting LED thread') ledThread.start() def loadConfig(json): global fg fg = jsonpickle.decode(json) # Initiate if is_running_from_reloader() == False: startLEDThread() # When you kill Flask (SIGTERM), clear the trigger for the next thread atexit.register(interrupt) return app
def create_app(): logger.info("Creating app") app = Flask(__name__) logger.debug("App created") def store_configuration(): if stop_signal: return try: global serverconfig p = multiprocessing.Process(target=multiprocessing_func, args=(serverconfig, )) p.start() p.join(30) if p.is_alive(): app.logger.warning("Storing configuration took too long") # Update MD5 hashes from file, since data was written in separate process serverconfig.updateMd5HashFromFiles() serverconfig.postStore() except Exception as e: app.logger.error("ERROR on storing configuration: {}".format(e)) sched = BackgroundScheduler(daemon=True) trigger = interval.IntervalTrigger(seconds=5) sched.add_job(store_configuration, trigger=trigger, id='store_config_job', replace_existing=True) # sched.add_job(check_midi, 'interval', seconds=1) sched.start() def interrupt(): try: global stop_lock global stop_signal stop_signal = True app.logger.debug("Waiting for stop lock") app.logger.warning("Signal received. Stopping...") stop_lock.acquire() app.logger.debug("Interrupt") global ledThread global midiThread global proj global midiBluetooth global midiCtrlPortOut global server try: parent = psutil.Process(os.getpid()) children = parent.children(recursive=True) app.logger.warning("Handling signal in {}".format(parent)) for child in children: app.logger.warning( "Child process active: {}".format(child)) except Exception: pass try: if server is not None: app.logger.warning("Shutting down GRPC server") server.stop(2) app.logger.warning("Shutdown GRPC server complete") except Exception as e: app.logger.error( "Error shutting down GRPC server: {}".format(e)) if midiCtrlPortOut is not None: app.logger.warning("Shutting down MIDI control ports") for channel in range(16): midiCtrlPortOut.send( mido.Message('control_change', channel=channel, control=121)) midiCtrlPortOut.close() app.logger.warning("Shutdown MIDI control ports complete") midiCtrlPortOut = None try: if midiBluetooth is not None: app.logger.warning("Shutting down MIDI bluetooth") midiBluetooth.shutdown() app.logger.warning("Shutdown MIDI bluetooth complete") except Exception as e: app.logger.error( "Error shutting down MIDI bluetooth: {}".format(e)) # stop_signal = True try: app.logger.warning("Shutting down MIDI thread") midiThread.join(2) if midiThread.is_alive(): logger.warning("Midi thread not joined. Terminating") midiThread.terminate() app.logger.warning("Shutdown MIDI thread complete") except Exception as e: app.logger.error( "Error shutting down MIDI thread: {}".format(e)) try: app.logger.warning("Shutting down LED Thread") ledThread.join(2) if ledThread.is_alive(): logger.warning("LED thread not joined. Terminating") ledThread.terminate() app.logger.warning("Shutdown LED Thread complete") except Exception as e: app.logger.error( "Error shutting down LED thread: {}".format(e)) try: app.logger.warning("Stopping processing of current project") proj.stopProcessing() app.logger.warning("Processing current project stopped") except Exception as e: app.logger.error( "Error shutting down current project: {}".format(e)) try: app.logger.warning("Shutting down Background Scheduler") # TODO: This contains a thread join and blocks sched._thread.join(2) sched.shutdown(wait=False) app.logger.warning('Shutdown Background scheduler complete') except Exception as e: app.logger.error( "Error shutting down Background scheduler: {}".format(e)) parent = psutil.Process(os.getpid()) children = parent.children(recursive=True) for child in children: app.logger.warning( "Child process still active: {}".format(child)) except Exception as e: app.logger.error("Unhandled exception in signal: {}".format(e)) finally: stop_lock.release() app.logger.warning("End of interrupt") def sigStop(sig, frame): interrupt() os.kill(os.getpid(), signal.SIGTERM) sys.exit(1) @app.after_request def add_header(response): response.cache_control.max_age = 0 return response @app.route('/') def home(): return redirect("./index.html", code=302) @app.route('/<path:path>') def send_js(path): return send_from_directory('resources', path) @app.route('/slot/<int:slotId>/nodes', methods=['GET']) # @lock_preview def slot_slotId_nodes_get(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph nodes = [node for node in fg.getNodes()] return jsonpickle.encode(nodes) @app.route('/slot/<int:slotId>/node/<nodeUid>', methods=['GET']) # @lock_preview def slot_slotId_node_uid_get(slotId, nodeUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: node = next(node for node in fg.getNodes() if node.uid == nodeUid) return jsonpickle.encode(node) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>', methods=['DELETE']) @lock_preview def slot_slotId_node_uid_delete(slotId, nodeUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: node = next(node for node in fg.getNodes() if node.uid == nodeUid) fg.removeEffectNode(node.uid) return "OK" except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>', methods=['PUT']) @lock_preview def slot_slotId_node_uid_update(slotId, nodeUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) try: app.logger.debug(request.json) node = fg.updateNodeParameter(nodeUid, request.json) return jsonpickle.encode(node) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>/parameterDefinition', methods=['GET']) # @lock_preview def slot_slotId_node_uid_parameter_get(slotId, nodeUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: node = next(node for node in fg.getNodes() if node.uid == nodeUid) return json.dumps(node.effect.getParameterDefinition()) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>/modulateableParameters', methods=['GET']) # @lock_preview def slot_slotId_node_uid_parameterModulations_get(slotId, nodeUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: node = next(node for node in fg.getNodes() if node.uid == nodeUid) return json.dumps(node.effect.getModulateableParameters()) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>/effect', methods=['GET']) # @lock_preview def node_uid_effectname_get(slotId, nodeUid): global proj print("Getting slot {}".format(slotId)) fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: node = next(node for node in fg.getNodes() if node.uid == nodeUid) return json.dumps(getFullClassName(node.effect)) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node', methods=['POST']) @lock_preview def slot_slotId_node_post(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) full_class_name = request.json[0] parameters = request.json[1] app.logger.debug(parameters) module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) instance = class_(**parameters) node = None if module_name == 'audioled.modulation': app.logger.info("Adding modulation source") node = fg.addModulationSource(instance) else: app.logger.info("Adding effect node") node = fg.addEffectNode(instance) return jsonpickle.encode(node) @app.route('/slot/<int:slotId>/connections', methods=['GET']) # @lock_preview def slot_slotId_connections_get(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph connections = [con for con in fg.getConnections()] return jsonpickle.encode(connections) @app.route('/slot/<int:slotId>/connection', methods=['POST']) @lock_preview def slot_slotId_connection_post(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) json = request.json connection = fg.addNodeConnection( json['from_node_uid'], int(json['from_node_channel']), json['to_node_uid'], int(json['to_node_channel']), ) return jsonpickle.encode(connection) @app.route('/slot/<int:slotId>/connection/<connectionUid>', methods=['DELETE']) @lock_preview def slot_slotId_connection_uid_delete(slotId, connectionUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: connection = next(connection for connection in fg.getConnections() if connection.uid == connectionUid) fg.removeConnection(connection.uid) return "OK" except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/modulationSources', methods=['GET']) # @lock_preview def slot_slotId_modulationSources_get(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph mods = [mod for mod in fg.getModulationSources()] return jsonpickle.encode(mods) @app.route('/slot/<int:slotId>/modulationSource/<modulationSourceUid>', methods=['DELETE']) @lock_preview def slot_slotId_modulationSourceUid_delete(slotId, modulationSourceUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: mod = next(mod for mod in fg.getModulationSources() if mod.uid == modulationSourceUid) fg.removeModulationSource(mod.uid) return "OK" except StopIteration: abort(404, "Modulation Source not found") @app.route('/slot/<int:slotId>/modulationSource/<modulationUid>', methods=['PUT']) @lock_preview def slot_slotId_modulationSourceUid_update(slotId, modulationUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) try: app.logger.debug(request.json) mod = fg.updateModulationSourceParameter(modulationUid, request.json) return jsonpickle.encode(mod) except StopIteration: abort(404, "Modulation not found") @app.route('/slot/<int:slotId>/modulationSource/<modulationSourceUid>', methods=['GET']) # @lock_preview def slot_slotId_modulationSourceUid_get(slotId, modulationSourceUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: mod = next(mod for mod in fg.getModulationSources() if mod.uid == modulationSourceUid) return jsonpickle.encode(mod) except StopIteration: abort(404, "Modulation Source not found") @app.route('/slot/<int:slotId>/modulations', methods=['GET']) # @lock_preview def slot_slotId_modulations_get(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph modSourceId = request.args.get('modulationSourceUid', None) modDestinationId = request.args.get('modulationDestinationUid', None) mods = [mod for mod in fg.getModulations()] if modSourceId is not None: # for specific modulation source mods = [ mod for mod in mods if mod.modulationSource.uid == modSourceId ] if modDestinationId is not None: # for specific modulation destination".format(modDestinationId)) mods = [ mod for mod in mods if mod.targetNode.uid == modDestinationId ] encVal = jsonpickle.encode(mods) return encVal @app.route('/slot/<int:slotId>/modulation', methods=['POST']) @lock_preview def slot_slotId_modulation_post(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) json = request.json newMod = fg.addModulation(json['modulationsource_uid'], json['target_uid']) return jsonpickle.encode(newMod) @app.route('/slot/<int:slotId>/modulation/<modulationUid>', methods=['GET']) # @lock_preview def slot_slotId_modulationUid_get(slotId, modulationUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: mod = next(mod for mod in fg.getModulations() if mod.uid == modulationUid) return jsonpickle.encode(mod) except StopIteration: abort(404, "Modulation not found") @app.route('/slot/<int:slotId>/modulation/<modulationUid>', methods=['PUT']) @lock_preview def slot_slotId_modulationUid_update(slotId, modulationUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) try: app.logger.debug(request.json) mod = fg.updateModulationParameter(modulationUid, request.json) return jsonpickle.encode(mod) except StopIteration: abort(404, "Modulation not found") @app.route('/slot/<int:slotId>/modulation/<modulationUid>', methods=['DELETE']) @lock_preview def slot_slotId_modulationUid_delete(slotId, modulationUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: mod = next(mod for mod in fg.getModulations() if mod.uid == modulationUid) if mod is not None: fg.removeModulation(modulationUid) return "OK" else: abort(404, "Modulation not found") except StopIteration: abort(404, "Modulation not found") @app.route('/slot/<int:slotId>/configuration', methods=['GET']) # @lock_preview def slot_slotId_configuration_get(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph config = jsonpickle.encode(fg) return config @app.route('/slot/<int:slotId>/configuration', methods=['POST']) def slot_slotId_configuration_post(slotId): global proj if not request.json: abort(400) newGraph = jsonpickle.decode(request.json) if not isinstance(newGraph, filtergraph.FilterGraph): raise RuntimeError("Not a FilterGraph") proj.setFiltergraphForSlot(slotId, newGraph) return "OK" @app.route('/effects', methods=['GET']) def effects_get(): """Returns all effects and modulators """ childclasses = [] childclasses.extend(inheritors(effects.Effect)) childclasses.extend(inheritors(modulation.ModulationSource)) return jsonpickle.encode([child for child in childclasses]) @app.route('/effect/<full_class_name>/description', methods=['GET']) def effect_effectname_description_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) return class_.getEffectDescription() @app.route('/effect/<full_class_name>/args', methods=['GET']) def effect_effectname_args_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) argspec = inspect.getfullargspec(class_.__init__) if argspec.defaults is not None: argsWithDefaults = dict( zip(argspec.args[-len(argspec.defaults):], argspec.defaults)) else: argsWithDefaults = dict() result = argsWithDefaults.copy() if argspec.defaults is not None: result.update({ key: None for key in argspec.args[1:len(argspec.args) - len(argspec.defaults)] }) # 1 removes self result.update({ key: default_values[key] for key in default_values if key in result }) app.logger.debug(result) return jsonify(result) @app.route('/effect/<full_class_name>/parameter', methods=['GET']) def effect_effectname_parameters_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) return json.dumps(class_.getParameterDefinition()) @app.route('/effect/<full_class_name>/parameterHelp', methods=['GET']) def effect_effectname_parameterhelp_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) return json.dumps(class_.getParameterHelp()) def getModuleAndClassName(full_class_name): module_name, class_name = full_class_name.rsplit(".", 1) if (module_name != "audioled.audio" and module_name != "audioled.effects" and module_name != "audioled.devices" and module_name != "audioled.colors" and module_name != "audioled.audioreactive" and module_name != "audioled.generative" and module_name != "audioled.input" and module_name != "audioled.panelize" and module_name != "audioled.modulation"): raise RuntimeError("Not allowed") return module_name, class_name def getFullClassName(o): module = o.__class__.__module__ if module is None or module == str.__class__.__module__: return o.__class__.__name__ else: return module + '.' + o.__class__.__name__ def inheritors(klass): subclasses = set() work = [klass] while work: parent = work.pop() for child in parent.__subclasses__(): if child not in subclasses: subclasses.add(child) work.append(child) return subclasses @app.route('/errors', methods=['GET']) def errors_get(): result = {} for error in errors: result[error.node.uid] = error.message return json.dumps(result) @app.route('/project/activeScene', methods=['POST']) def project_activeScene_post(): global proj if not request.json: abort(400) value = request.json['slot'] app.logger.info("Activating scene {}".format(value)) if proj.activeSceneId != value: proj.activateScene(value) # proj.previewSlot(value) return "OK" @app.route('/project/activeScene', methods=['GET']) def project_activeSlot_get(): global proj app.logger.debug(proj.outputSlotMatrix) return jsonify({ 'activeSlot': proj.previewSlotId, # TODO: Change in FE 'activeScene': proj.activeSceneId, }) @app.route('/project/sceneMatrix', methods=['PUT']) def project_sceneMatrix_put(): global proj if not request.json: abort(400) value = request.json app.logger.debug(value) proj.setSceneMatrix(value) return "OK" @app.route('/project/activateSlot', methods=['POST']) # @lock_preview def project_activateSlot_post(): global proj if not request.json: abort(400) value = request.json['slot'] app.logger.info("Activating slot {}".format(value)) proj.previewSlot(value) return "OK" @app.route('/project/sceneMatrix', methods=['GET']) def project_sceneMatrix_get(): global proj return json.dumps(proj.getSceneMatrix()) @app.route('/project/assets/<path:path>', methods=['GET']) def project_assets_get(path): global serverconfig global proj asset = serverconfig.getProjectAsset(proj.id, path) return send_file(asset[0], attachment_filename=asset[1], mimetype=asset[2]) @app.route('/project/assets', methods=['POST']) def project_assets_post(): global serverconfig global proj if 'file' not in request.files: app.logger.warn("No file in request") abort(400) file = request.files['file'] if file.filename == '': app.logger.warn("File has no filename") abort(400) if file and '.' in file.filename and file.filename.rsplit( '.', 1)[1].lower() in ['gif']: app.logger.info("Adding asset to proj {}".format(proj.id)) filename = serverconfig.addProjectAsset(proj.id, file) return jsonify({'filename': filename}) app.logger.error("Unknown content for asset: {}".format(file.filename)) abort(400) @app.route('/projects', methods=['GET']) def projects_get(): global serverconfig return jsonify(serverconfig.getProjectsMetadata()) @app.route('/projects', methods=['POST']) def projects_post(): global serverconfig if not request.json: abort(400) title = request.json.get('title', '') description = request.json.get('description', '') metadata = serverconfig.createEmptyProject(title, description) return jsonify(metadata) @app.route('/projects/import', methods=['POST']) def projects_import_post(): global serverconfig if not request.json: abort(400) metadata = serverconfig.importProject(request.json) return jsonify(metadata) @app.route('/projects/<uid>/export', methods=['GET']) def projects_project_export(uid): global serverconfig proj = serverconfig.getProject(uid) if proj is not None: app.logger.info("Exporting project {}".format(uid)) return jsonpickle.encode(proj) abort(404) @app.route('/projects/<uid>', methods=['DELETE']) def projects_project_delete(uid): global serverconfig serverconfig.deleteProject(uid) return "OK" @app.route('/projects/activeProject', methods=['POST']) def projects_activeProject_post(): global serverconfig global proj if not request.json: abort(400) uid = request.json['project'] app.logger.info("Activating project {}".format(uid)) try: proj = serverconfig.activateProject(uid) except Exception as e: app.logger.error("Error opening project: {}".format(e)) if serverconfig._activeProject is None: newProj = serverconfig.initDefaultProject() serverconfig.activateProject(newProj.id) abort( 500, "Could not active project. No other project found. Initializing default." ) else: abort(500, "Project could not be activated. Reason: {}".format(e)) return "OK" @app.route('/configuration', methods=['GET']) def configuration_get(): global serverconfig return jsonify({ 'parameters': serverconfig.getConfigurationParameters(), 'values': serverconfig.getFullConfiguration() }) @app.route('/configuration', methods=['PUT']) def configuration_put(): global serverconfig if not request.json: abort(400) try: serverconfig.setConfiguration(request.json) except RuntimeError as e: app.logger.error("ERROR updating configuration: {}".format(e)) abort(400, str(e)) return jsonify(serverconfig.getFullConfiguration()) @app.route('/remote/brightness', methods=['POST']) def remote_brightness_post(): global proj value = int(request.args.get('value')) floatVal = float(value / 100) app.logger.info("Setting brightness: {}".format(floatVal)) proj.setBrightnessForActiveScene(floatVal) return "OK" @app.route('/remote/favorites/<id>', methods=['POST']) def remote_favorites_id_post(id): # TODO: Switch to selecting scenes filename = "favorites/{}.json".format(id) global proj if os.path.isfile(filename): with open(filename, "r") as f: fg = jsonpickle.decode(f.read()) proj.setFiltergraphForSlot(proj.previewSlotId, fg) return "OK" else: app.logger.info("Favorite not found: {}".format(filename)) abort(404) def processLED(): global proj global ledThread global stop_signal global event_loop global last_time global current_time global errors global count global record_timings dt = 0 if stop_signal: return try: with dataLock: last_time = current_time current_time = timer() dt = current_time - last_time count = count + 1 if event_loop is None: event_loop = asyncio.new_event_loop() asyncio.set_event_loop(event_loop) proj.update(dt, event_loop) proj.process() # clear errors (if any have occured in the current run, we wouldn't reach this) errors.clear() except filtergraph.NodeException as ne: if count == 100: app.logger.error("NodeError in {}: {}".format( ne.node.effect, ne)) app.logger.info("Skipping next 100 errors...") count = 0 errors.clear() errors.append(ne) except Exception as e: app.logger.error("Unknown error: {}".format(e)) traceback.print_tb(e.__traceback__) finally: # Set the next thread to happen real_process_time = timer() - current_time timeToWait = max(POOL_TIME, 0.01 - real_process_time) if count == 100: if record_timings: # proj.previewSlot(proj.activeSlotId).printProcessTimings() # TODO: # proj.previewSlot(proj.activeSlotId).printUpdateTimings() # TODO: app.logger.info( "Process time: {}".format(real_process_time)) app.logger.info("Waiting {}".format(timeToWait)) count = 0 if not stop_signal: ledThread = threading.Timer(timeToWait, processLED, ()) ledThread.start() def startLEDThread(): # Do initialisation stuff here global ledThread global last_time global current_time # Create your thread current_time = timer() ledThread = threading.Timer(POOL_TIME, processLED, ()) app.logger.info('starting LED thread') ledThread.start() # Initiate if is_running_from_reloader() is False: startLEDThread() # When you kill Flask (SIGTERM), clear the trigger for the next thread # atexit.register(interrupt) signal.signal(signal.SIGINT, sigStop) signal.signal(signal.SIGUSR1, sigStop) return app
}) @app.context_processor def members(): with dataLock: return dict(members=staticCache["members"]) @app.after_request def response_minify(response): if debug: return response """ minify html response to decrease site traffic """ if response.content_type == u'text/html; charset=utf-8': response.set_data( minify(response.get_data(as_text=True), remove_comments=True, remove_empty_space=True, remove_all_empty_space=True, reduce_empty_attributes=True, reduce_boolean_attributes=False, remove_optional_attribute_quotes=True, convert_charrefs=False) ) return response return response # start thread in the correct process # (when debugging we want to reload with the child) # https://stackoverflow.com/questions/25504149/why-does-running-the-flask-dev-server-run-itself-twice if is_running_from_reloader() == debug: threading.Thread(target=background_job, daemon=True).start() # Do not change debug here! if __name__ == '__main__': app.run(host='0.0.0.0', port=os.environ.get("PORT"), debug=debug, threaded=True)
start_app(False) sys.exit(0) parser = ArgumentParser(description=translate("ma_args_description", default_locale), add_help=False) parser.add_argument("-c", "--console", help=translate("ma_args_console", default_locale), action="store_true") parser.add_argument("-b", "--build", help=translate("ma_args_build", default_locale), action="store_true") parser.add_argument("-h", "--help", help=translate("ma_args_help", default_locale), action="help") args = parser.parse_args() if args.build: build_app(args.console) if not args.console and not frozen: logging.info(translate("launching_no_console", default_locale)) try: Popen(["pythonw", __file__, "start"]) except FileNotFoundError: Popen(["python", __file__, "start"]) else: if not serving.is_running_from_reloader(): logging.info(translate("launching_console", default_locale)) start_app(args.console)
def run_simple_wsgi( hostname: str, port: int, application: "WSGIApplication", use_reloader: bool = False, use_debugger: bool = False, use_evalex: bool = True, extra_files: t.Optional[t.Iterable[str]] = None, exclude_patterns: t.Optional[t.Iterable[str]] = None, reloader_interval: int = 1, reloader_type: str = "stat", threaded: bool = False, processes: int = 1, request_handler: t.Optional[t.Type[WSGIRequestHandler]] = None, static_files: t.Optional[t.Dict[str, t.Union[str, t.Tuple[str, str]]]] = None, passthrough_errors: bool = False, ssl_context: t.Optional[_TSSLContextArg] = None, ) -> None: """Start a WSGI application. Optional features include a reloader, multithreading and fork support. :param hostname: The host to bind to, for example ``'localhost'``. If the value is a path that starts with ``unix://`` it will bind to a Unix socket instead of a TCP socket.. :param port: The port for the server. eg: ``8080`` :param application: the WSGI application to execute :param use_reloader: should the server automatically restart the python process if modules were changed? :param use_debugger: should the werkzeug debugging system be used? :param use_evalex: should the exception evaluation feature be enabled? :param extra_files: a list of files the reloader should watch additionally to the modules. For example configuration files. :param exclude_patterns: List of :mod:`fnmatch` patterns to ignore when running the reloader. For example, ignore cache files that shouldn't reload when updated. :param reloader_interval: the interval for the reloader in seconds. :param reloader_type: the type of reloader to use. The default is auto detection. Valid values are ``'stat'`` and ``'watchdog'``. See :ref:`reloader` for more information. :param threaded: should the process handle each request in a separate thread? :param processes: if greater than 1 then handle each request in a new process up to this maximum number of concurrent processes. :param request_handler: optional parameter that can be used to replace the default one. You can use this to replace it with a different :class:`~BaseHTTPServer.BaseHTTPRequestHandler` subclass. :param static_files: a list or dict of paths for static files. This works exactly like :class:`SharedDataMiddleware`, it's actually just wrapping the application in that middleware before serving. :param passthrough_errors: set this to `True` to disable the error catching. This means that the server will die on errors but it can be useful to hook debuggers in (pdb etc.) :param ssl_context: an SSL context for the connection. Either an :class:`ssl.SSLContext`, a tuple in the form ``(cert_file, pkey_file)``, the string ``'adhoc'`` if the server should automatically create one, or ``None`` to disable SSL (which is the default). """ if not isinstance(port, int): raise TypeError("port must be an integer") if use_debugger: from werkzeug.debug import DebuggedApplication application = DebuggedApplication(application, use_evalex) if static_files: from werkzeug.middleware.shared_data import SharedDataMiddleware application = SharedDataMiddleware(application, static_files) def log_startup(sock: socket.socket) -> None: _navycut_base_logger() if sock.family == af_unix: Console.log.Info(f"Running on {hostname} (Press CTRL+C to quit)") else: if hostname == "0.0.0.0": _running_on_all_addr_logger() display_hostname = get_interface_ip(socket.AF_INET) elif hostname == "::": _running_on_all_addr_logger() display_hostname = get_interface_ip(socket.AF_INET6) else: display_hostname = hostname if ":" in display_hostname: display_hostname = f"[{display_hostname}]" _run_base_server_logger("http" if ssl_context is None else "https", display_hostname, sock.getsockname()[1] ) def inner() -> None: try: fd: t.Optional[int] = int(os.environ["WERKZEUG_SERVER_FD"]) except (LookupError, ValueError): fd = None srv = make_server( hostname, port, application, threaded, processes, request_handler, passthrough_errors, ssl_context, fd=fd, ) if fd is None: log_startup(srv.socket) srv.serve_forever() if use_reloader: # If we're not running already in the subprocess that is the # reloader we want to open up a socket early to make sure the # port is actually available. if not is_running_from_reloader(): if port == 0 and not can_open_by_fd: raise ValueError( "Cannot bind to a random port with enabled " "reloader if the Python interpreter does " "not support socket opening by fd." ) # Create and destroy a socket so that any exceptions are # raised before we spawn a separate Python interpreter and # lose this ability. address_family = select_address_family(hostname, port) server_address = get_sockaddr(hostname, port, address_family) s = socket.socket(address_family, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind(server_address) s.set_inheritable(True) # If we can open the socket by file descriptor, then we can just # reuse this one and our socket will survive the restarts. if can_open_by_fd: os.environ["WERKZEUG_SERVER_FD"] = str(s.fileno()) s.listen(LISTEN_QUEUE) log_startup(s) else: s.close() if address_family == af_unix: server_address = t.cast(str, server_address) Console.log.Info(f"Unlinking {server_address}") os.unlink(server_address) from ._reloader import run_with_reloader as _rwr _rwr( inner, extra_files=extra_files, exclude_patterns=exclude_patterns, interval=reloader_interval, reloader_type=reloader_type, ) else: inner()
def run_simple( hostname: str, port: int, application, use_reloader: bool = False, use_debugger: bool = False, use_evalex: bool = True, extra_files=None, exclude_patterns=None, reloader_interval: int = 1, reloader_type: str = "auto", threaded: bool = False, processes: int = 1, request_handler=None, static_files=None, passthrough_errors: bool = False, ssl_context=None, ) -> None: if not isinstance(port, int): raise TypeError("port must be an integer") def log_startup(sock: serving.socket.socket) -> None: all_addresses_message = ( " * Running on all addresses.\n" " WARNING: This is a development server. Do not use it in" " a production deployment.") if sock.family == serving.af_unix: serving._log("info", " * Running on %s (Press CTRL+C to quit)", hostname) else: if hostname == "0.0.0.0": serving._log("warning", all_addresses_message) display_hostname = serving.get_interface_ip( serving.socket.AF_INET) elif hostname == "::": serving._log("warning", all_addresses_message) display_hostname = serving.get_interface_ip( serving.socket.AF_INET6) else: display_hostname = hostname if ":" in display_hostname: display_hostname = f"[{display_hostname}]" serving._log( "info", " * Running on %s://%s:%d/ (Press CTRL+C to quit)", "http" if ssl_context is None else "https", display_hostname, sock.getsockname()[1], ) def inner() -> None: try: fd: serving.t.Optional[int] = int( serving.os.environ["WERKZEUG_SERVER_FD"]) except (LookupError, ValueError): fd = None global server server = serving.make_server( hostname, port, application, threaded, processes, request_handler, passthrough_errors, ssl_context, fd=fd, ) if fd is None: log_startup(server.socket) if use_reloader: # If we're not running already in the subprocess that is the # reloader we want to open up a socket early to make sure the # port is actually available. if not serving.is_running_from_reloader(): if port == 0 and not serving.can_open_by_fd: raise ValueError("Cannot bind to a random port with enabled " "reloader if the Python interpreter does " "not support socket opening by fd.") # Create and destroy a socket so that any exceptions are # raised before we spawn a separate Python interpreter and # lose this ability. address_family = serving.select_address_family(hostname, port) server_address = serving.get_sockaddr(hostname, port, address_family) s = serving.socket.socket(address_family, serving.socket.SOCK_STREAM) s.setsockopt(serving.socket.SOL_SOCKET, serving.socket.SO_REUSEADDR, 1) s.bind(server_address) s.set_inheritable(True) # If we can open the socket by file descriptor, then we can just # reuse this one and our socket will survive the restarts. if serving.can_open_by_fd: serving.os.environ["WERKZEUG_SERVER_FD"] = str(s.fileno()) s.listen(serving.LISTEN_QUEUE) log_startup(s) else: s.close() if address_family == serving.af_unix: server_address = serving.t.cast(str, server_address) serving._log("info", "Unlinking %s", server_address) serving.os.unlink(server_address) from werkzeug._reloader import run_with_reloader as _rwr _rwr( inner, extra_files=extra_files, exclude_patterns=exclude_patterns, interval=reloader_interval, reloader_type=reloader_type, ) else: inner()
from OpenSSL import SSL from werkzeug.serving import run_simple from werkzeug.serving import make_ssl_devcert from werkzeug.serving import is_running_from_reloader #from myproject import make_app app = Flask(__name__) bestand = open("C:/Users/Daniel van Liempd/Desktop/DANIEL.txt", 'r') #print (bestand.read()) make_ssl_devcert( 'C:/Users/Daniel van Liempd/PycharmProjects/Practicum-cloudinfra/venv', host='Daniel-PC', cn=None) is_running_from_reloader() run_simple(hostname='Daniel-PC', port=8080, application=(app), use_reloader=False, use_debugger=False, use_evalex=True, extra_files=None, reloader_interval=1, reloader_type='auto', threaded=False, processes=1, request_handler=None, static_files=None, passthrough_errors=False,
cost=34, wagon_id=wagons[1].id, place=2, schedule_id=schedules[0].id, is_booked=True, user_id=users[0].id, book_end_date=datetime(2020, 9, 25, 0, 0)), Ticket(departure_stop_id=stops[0].id, arrival_stop_id=stops[3].id, cost=34, wagon_id=wagons[1].id, place=3, schedule_id=schedules[0].id, is_booked=True, user_id=users[0].id, book_end_date=datetime(2020, 9, 25, 0, 0)) ] session.add_all(tickets) session.commit() # if __name__ == '__main__': database_cleaning_thread = StoppableThread() database_cleaning_thread.setDaemon(True) if (is_running_from_reloader()): database_cleaning_thread.start() app.run(debug=True) if (is_running_from_reloader()): database_cleaning_thread.stop()