def schedule(self): # Schedule functions to run at certain intervals. self.scheduler.add_job(self.set_weather, trigger=interval.IntervalTrigger(minutes=15)) self.scheduler.add_job(self.set_time, trigger=interval.IntervalTrigger(seconds=1)) self.scheduler.start()
def add_update_job(self, ws, job_func0, job_func1): trigger0 = interval.IntervalTrigger(seconds=3) scheduler.add_job(lambda: job_func0(ws), trigger=trigger0, id='status_update_job', replace_existing=True) trigger1 = interval.IntervalTrigger(minutes=15) scheduler.add_job(lambda: job_func1(ws), trigger=trigger1, id='cmoncli_update_job', replace_existing=True) pass
def startJob(self, frame): if not self.apscheduler: self.apscheduler = BackgroundScheduler() self.apscheduler.start() if not self.job: trigger = interval.IntervalTrigger(seconds=5 * 10) self.job = self.apscheduler.add_job(lambda: refresh_order(frame), trigger=trigger, id='task_sync_every_5m', replace_existing=True)
def scheduleTimingPlanUpdate(self, update_interval: int) -> int: """ scheduleTimingPlanUpdate takes in the interval as an argument, and for that interval, schedules the update of active timing plan. Arguments: ---------- (1) interval: Time interval (seconds) between successive function call. """ trigger = interval.IntervalTrigger(seconds=update_interval) self.backgroundScheduler.add_job( self.signalController.updateAndSendActiveTimingPlan, trigger=trigger, max_instances=10)
def schedule_daily_execution(self, func, startHour: int, startMinute: int): """ schedules the `func()` as a daily recurring job starting at `startHour:startMinute`. If the `startHour:startMinute` is already passed for the day, then the first execution is scheduled for the next day. NOTES: (1) startHour must be in 24-hour format, hence: (0 <= startHour < 24) . (2) startMinute: (0 <= startMinute < 60). (3) If invalid startHour and/or startMinute is provided, defaults the execution time to midnight """ # Validate startHour and startMinute. If invalid, default to midnight: if startHour < 0 or startHour >= 24 or startMinute < 0 or startMinute >= 60: startHour = 0 startMinute = 0 # Get current date and time now = datetime.datetime.now() #If it is in the past (for current day) create the start time (datetime object) for the data transfer from next day if ((int(now.hour) > startHour) or ((int(now.hour) == startHour) and (int(now.minute) >= startMinute))): start = (datetime.datetime.now() + datetime.timedelta(days=1)).replace(hour=startHour, minute=startMinute, second=0) # Else create the start time (datetime object) for the data transfer starting from current day else: start = (datetime.datetime.now()).replace(hour=startHour, minute=startMinute, second=0) # Create an object of interval class. Use the datetime object created earlier for the start time. trigger = interval.IntervalTrigger(days=1, start_date=start) # Add the recurring job in the background scheduler self.backgroundScheduler.add_job(func, trigger=trigger, max_instances=3)
def scheduleTimingPlanUpdate(self, update_interval: int) -> int: """ scheduleTimingPlanUpdate takes in the interval as an argument, and for that interval, schedules the update of active timing plan. Arguments: ---------- (1) interval: Time interval (seconds) between successive function call. Returns: -------- the ID of the command (incase if one needs to cancel this job in the scheduler) """ trigger = interval.IntervalTrigger(seconds=update_interval) self.backgroundScheduler.add_job( self.signalController.updateAndSendActiveTimingPlan, trigger=trigger, id=str(self.commandId), max_instances=10) return self.commandId
def schedulePhaseControlActivation(self, phases: list, control: int, startSecFromNow: float, endSecFromNow: float): """ Activates the phase control at startSecFrmNow, and keeps it active till endSecFromNow. """ if startSecFromNow == 0.0: startSecFromNow = 0.01 # Jobs that start at time NOW (0.0 sec from now) are incompatible with BackgroundScheduler intervalTrigger = interval.IntervalTrigger( seconds=self.ntcipBackupTime_Sec - 1, start_date=(datetime.datetime.now() + datetime.timedelta(seconds=startSecFromNow)), end_date=(datetime.datetime.now() + datetime.timedelta(seconds=endSecFromNow))) self.backgroundScheduler.add_job( self.signalController.setPhaseControl, args=[control, True, phases, self.scheduleReceiptTime], trigger=intervalTrigger, max_instances=3)
def activateAndScheduleSpecialFunctionMaintenance(self, functionId, startSecFromNow: float, endSecFromNow: float): if startSecFromNow == 0.0: startSecFromNow = 0.01 # Jobs that start at time NOW (0.0 sec from now) are incompatible with BackgroundScheduler self.signalController.updateSpecialFunctionLocalStatus( functionId, True) intervalTrigger = interval.IntervalTrigger( seconds=self.ntcipBackupTime_Sec - 1, start_date=(datetime.datetime.now() + datetime.timedelta(seconds=startSecFromNow)), end_date=(datetime.datetime.now() + datetime.timedelta(seconds=endSecFromNow))) self.backgroundScheduler.add_job( self.signalController.setSpecialFunctionControllerStatus, args=[functionId], trigger=intervalTrigger, max_instances=3) dateTrigger = date.DateTrigger( run_date=(datetime.datetime.now() + datetime.timedelta(seconds=endSecFromNow - 0.1))) self.backgroundScheduler.add_job( self.signalController.updateSpecialFunctionLocalStatus, args=[functionId, False], trigger=dateTrigger, max_instances=3) dateTrigger = date.DateTrigger( run_date=(datetime.datetime.now() + datetime.timedelta(seconds=endSecFromNow - 0.01))) self.backgroundScheduler.add_job( self.signalController.setSpecialFunctionControllerStatus, args=[functionId], trigger=dateTrigger, max_instances=3)
from datetime import datetime import time import os import job2 from apscheduler.triggers import interval from apscheduler.schedulers.background import BackgroundScheduler def tick(): job2.job() if __name__ == '__main__': scheduler = BackgroundScheduler() trigger = interval.IntervalTrigger(seconds=1800) now = int(time.time()) timeArray = time.localtime(now) otherStyleTime = time.strftime("%Y%m%d %H:%M:%S", timeArray) scheduler.add_job(tick, trigger=trigger, id='status_update_job', replace_existing=True) print(otherStyleTime + '创建任务半小时执行一次') scheduler.start() try: # This is here to simulate application activity (which keeps the main thread alive). while True: time.sleep(2) except (KeyboardInterrupt, SystemExit): scheduler.shutdown( ) # Not strictly necessary if daemonic mode is enabled but
def create_app(): logger.info("Creating app") app = Flask(__name__) logger.debug("App created") def store_configuration(): if stop_signal: return try: global serverconfig p = multiprocessing.Process(target=multiprocessing_func, args=(serverconfig, )) p.start() p.join(30) if p.is_alive(): app.logger.warning("Storing configuration took too long") # Update MD5 hashes from file, since data was written in separate process serverconfig.updateMd5HashFromFiles() serverconfig.postStore() except Exception as e: app.logger.error("ERROR on storing configuration: {}".format(e)) sched = BackgroundScheduler(daemon=True) trigger = interval.IntervalTrigger(seconds=5) sched.add_job(store_configuration, trigger=trigger, id='store_config_job', replace_existing=True) # sched.add_job(check_midi, 'interval', seconds=1) sched.start() def interrupt(): try: global stop_lock global stop_signal stop_signal = True app.logger.debug("Waiting for stop lock") app.logger.warning("Signal received. Stopping...") stop_lock.acquire() app.logger.debug("Interrupt") global ledThread global midiThread global proj global midiBluetooth global midiCtrlPortOut global server try: parent = psutil.Process(os.getpid()) children = parent.children(recursive=True) app.logger.warning("Handling signal in {}".format(parent)) for child in children: app.logger.warning( "Child process active: {}".format(child)) except Exception: pass try: if server is not None: app.logger.warning("Shutting down GRPC server") server.stop(2) app.logger.warning("Shutdown GRPC server complete") except Exception as e: app.logger.error( "Error shutting down GRPC server: {}".format(e)) if midiCtrlPortOut is not None: app.logger.warning("Shutting down MIDI control ports") for channel in range(16): midiCtrlPortOut.send( mido.Message('control_change', channel=channel, control=121)) midiCtrlPortOut.close() app.logger.warning("Shutdown MIDI control ports complete") midiCtrlPortOut = None try: if midiBluetooth is not None: app.logger.warning("Shutting down MIDI bluetooth") midiBluetooth.shutdown() app.logger.warning("Shutdown MIDI bluetooth complete") except Exception as e: app.logger.error( "Error shutting down MIDI bluetooth: {}".format(e)) # stop_signal = True try: app.logger.warning("Shutting down MIDI thread") midiThread.join(2) if midiThread.is_alive(): logger.warning("Midi thread not joined. Terminating") midiThread.terminate() app.logger.warning("Shutdown MIDI thread complete") except Exception as e: app.logger.error( "Error shutting down MIDI thread: {}".format(e)) try: app.logger.warning("Shutting down LED Thread") ledThread.join(2) if ledThread.is_alive(): logger.warning("LED thread not joined. Terminating") ledThread.terminate() app.logger.warning("Shutdown LED Thread complete") except Exception as e: app.logger.error( "Error shutting down LED thread: {}".format(e)) try: app.logger.warning("Stopping processing of current project") proj.stopProcessing() app.logger.warning("Processing current project stopped") except Exception as e: app.logger.error( "Error shutting down current project: {}".format(e)) try: app.logger.warning("Shutting down Background Scheduler") # TODO: This contains a thread join and blocks sched._thread.join(2) sched.shutdown(wait=False) app.logger.warning('Shutdown Background scheduler complete') except Exception as e: app.logger.error( "Error shutting down Background scheduler: {}".format(e)) parent = psutil.Process(os.getpid()) children = parent.children(recursive=True) for child in children: app.logger.warning( "Child process still active: {}".format(child)) except Exception as e: app.logger.error("Unhandled exception in signal: {}".format(e)) finally: stop_lock.release() app.logger.warning("End of interrupt") def sigStop(sig, frame): interrupt() os.kill(os.getpid(), signal.SIGTERM) sys.exit(1) @app.after_request def add_header(response): response.cache_control.max_age = 0 return response @app.route('/') def home(): return redirect("./index.html", code=302) @app.route('/<path:path>') def send_js(path): return send_from_directory('resources', path) @app.route('/slot/<int:slotId>/nodes', methods=['GET']) # @lock_preview def slot_slotId_nodes_get(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph nodes = [node for node in fg.getNodes()] return jsonpickle.encode(nodes) @app.route('/slot/<int:slotId>/node/<nodeUid>', methods=['GET']) # @lock_preview def slot_slotId_node_uid_get(slotId, nodeUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: node = next(node for node in fg.getNodes() if node.uid == nodeUid) return jsonpickle.encode(node) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>', methods=['DELETE']) @lock_preview def slot_slotId_node_uid_delete(slotId, nodeUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: node = next(node for node in fg.getNodes() if node.uid == nodeUid) fg.removeEffectNode(node.uid) return "OK" except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>', methods=['PUT']) @lock_preview def slot_slotId_node_uid_update(slotId, nodeUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) try: app.logger.debug(request.json) node = fg.updateNodeParameter(nodeUid, request.json) return jsonpickle.encode(node) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>/parameterDefinition', methods=['GET']) # @lock_preview def slot_slotId_node_uid_parameter_get(slotId, nodeUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: node = next(node for node in fg.getNodes() if node.uid == nodeUid) return json.dumps(node.effect.getParameterDefinition()) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>/modulateableParameters', methods=['GET']) # @lock_preview def slot_slotId_node_uid_parameterModulations_get(slotId, nodeUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: node = next(node for node in fg.getNodes() if node.uid == nodeUid) return json.dumps(node.effect.getModulateableParameters()) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node/<nodeUid>/effect', methods=['GET']) # @lock_preview def node_uid_effectname_get(slotId, nodeUid): global proj print("Getting slot {}".format(slotId)) fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: node = next(node for node in fg.getNodes() if node.uid == nodeUid) return json.dumps(getFullClassName(node.effect)) except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/node', methods=['POST']) @lock_preview def slot_slotId_node_post(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) full_class_name = request.json[0] parameters = request.json[1] app.logger.debug(parameters) module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) instance = class_(**parameters) node = None if module_name == 'audioled.modulation': app.logger.info("Adding modulation source") node = fg.addModulationSource(instance) else: app.logger.info("Adding effect node") node = fg.addEffectNode(instance) return jsonpickle.encode(node) @app.route('/slot/<int:slotId>/connections', methods=['GET']) # @lock_preview def slot_slotId_connections_get(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph connections = [con for con in fg.getConnections()] return jsonpickle.encode(connections) @app.route('/slot/<int:slotId>/connection', methods=['POST']) @lock_preview def slot_slotId_connection_post(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) json = request.json connection = fg.addNodeConnection( json['from_node_uid'], int(json['from_node_channel']), json['to_node_uid'], int(json['to_node_channel']), ) return jsonpickle.encode(connection) @app.route('/slot/<int:slotId>/connection/<connectionUid>', methods=['DELETE']) @lock_preview def slot_slotId_connection_uid_delete(slotId, connectionUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: connection = next(connection for connection in fg.getConnections() if connection.uid == connectionUid) fg.removeConnection(connection.uid) return "OK" except StopIteration: abort(404, "Node not found") @app.route('/slot/<int:slotId>/modulationSources', methods=['GET']) # @lock_preview def slot_slotId_modulationSources_get(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph mods = [mod for mod in fg.getModulationSources()] return jsonpickle.encode(mods) @app.route('/slot/<int:slotId>/modulationSource/<modulationSourceUid>', methods=['DELETE']) @lock_preview def slot_slotId_modulationSourceUid_delete(slotId, modulationSourceUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: mod = next(mod for mod in fg.getModulationSources() if mod.uid == modulationSourceUid) fg.removeModulationSource(mod.uid) return "OK" except StopIteration: abort(404, "Modulation Source not found") @app.route('/slot/<int:slotId>/modulationSource/<modulationUid>', methods=['PUT']) @lock_preview def slot_slotId_modulationSourceUid_update(slotId, modulationUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) try: app.logger.debug(request.json) mod = fg.updateModulationSourceParameter(modulationUid, request.json) return jsonpickle.encode(mod) except StopIteration: abort(404, "Modulation not found") @app.route('/slot/<int:slotId>/modulationSource/<modulationSourceUid>', methods=['GET']) # @lock_preview def slot_slotId_modulationSourceUid_get(slotId, modulationSourceUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: mod = next(mod for mod in fg.getModulationSources() if mod.uid == modulationSourceUid) return jsonpickle.encode(mod) except StopIteration: abort(404, "Modulation Source not found") @app.route('/slot/<int:slotId>/modulations', methods=['GET']) # @lock_preview def slot_slotId_modulations_get(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph modSourceId = request.args.get('modulationSourceUid', None) modDestinationId = request.args.get('modulationDestinationUid', None) mods = [mod for mod in fg.getModulations()] if modSourceId is not None: # for specific modulation source mods = [ mod for mod in mods if mod.modulationSource.uid == modSourceId ] if modDestinationId is not None: # for specific modulation destination".format(modDestinationId)) mods = [ mod for mod in mods if mod.targetNode.uid == modDestinationId ] encVal = jsonpickle.encode(mods) return encVal @app.route('/slot/<int:slotId>/modulation', methods=['POST']) @lock_preview def slot_slotId_modulation_post(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) json = request.json newMod = fg.addModulation(json['modulationsource_uid'], json['target_uid']) return jsonpickle.encode(newMod) @app.route('/slot/<int:slotId>/modulation/<modulationUid>', methods=['GET']) # @lock_preview def slot_slotId_modulationUid_get(slotId, modulationUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: mod = next(mod for mod in fg.getModulations() if mod.uid == modulationUid) return jsonpickle.encode(mod) except StopIteration: abort(404, "Modulation not found") @app.route('/slot/<int:slotId>/modulation/<modulationUid>', methods=['PUT']) @lock_preview def slot_slotId_modulationUid_update(slotId, modulationUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph if not request.json: abort(400) try: app.logger.debug(request.json) mod = fg.updateModulationParameter(modulationUid, request.json) return jsonpickle.encode(mod) except StopIteration: abort(404, "Modulation not found") @app.route('/slot/<int:slotId>/modulation/<modulationUid>', methods=['DELETE']) @lock_preview def slot_slotId_modulationUid_delete(slotId, modulationUid): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph try: mod = next(mod for mod in fg.getModulations() if mod.uid == modulationUid) if mod is not None: fg.removeModulation(modulationUid) return "OK" else: abort(404, "Modulation not found") except StopIteration: abort(404, "Modulation not found") @app.route('/slot/<int:slotId>/configuration', methods=['GET']) # @lock_preview def slot_slotId_configuration_get(slotId): global proj fg = proj.previewSlot(slotId) # type: filtergraph.FilterGraph config = jsonpickle.encode(fg) return config @app.route('/slot/<int:slotId>/configuration', methods=['POST']) def slot_slotId_configuration_post(slotId): global proj if not request.json: abort(400) newGraph = jsonpickle.decode(request.json) if not isinstance(newGraph, filtergraph.FilterGraph): raise RuntimeError("Not a FilterGraph") proj.setFiltergraphForSlot(slotId, newGraph) return "OK" @app.route('/effects', methods=['GET']) def effects_get(): """Returns all effects and modulators """ childclasses = [] childclasses.extend(inheritors(effects.Effect)) childclasses.extend(inheritors(modulation.ModulationSource)) return jsonpickle.encode([child for child in childclasses]) @app.route('/effect/<full_class_name>/description', methods=['GET']) def effect_effectname_description_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) return class_.getEffectDescription() @app.route('/effect/<full_class_name>/args', methods=['GET']) def effect_effectname_args_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) argspec = inspect.getfullargspec(class_.__init__) if argspec.defaults is not None: argsWithDefaults = dict( zip(argspec.args[-len(argspec.defaults):], argspec.defaults)) else: argsWithDefaults = dict() result = argsWithDefaults.copy() if argspec.defaults is not None: result.update({ key: None for key in argspec.args[1:len(argspec.args) - len(argspec.defaults)] }) # 1 removes self result.update({ key: default_values[key] for key in default_values if key in result }) app.logger.debug(result) return jsonify(result) @app.route('/effect/<full_class_name>/parameter', methods=['GET']) def effect_effectname_parameters_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) return json.dumps(class_.getParameterDefinition()) @app.route('/effect/<full_class_name>/parameterHelp', methods=['GET']) def effect_effectname_parameterhelp_get(full_class_name): module_name, class_name = None, None try: module_name, class_name = getModuleAndClassName(full_class_name) except RuntimeError: abort(403) class_ = getattr(importlib.import_module(module_name), class_name) return json.dumps(class_.getParameterHelp()) def getModuleAndClassName(full_class_name): module_name, class_name = full_class_name.rsplit(".", 1) if (module_name != "audioled.audio" and module_name != "audioled.effects" and module_name != "audioled.devices" and module_name != "audioled.colors" and module_name != "audioled.audioreactive" and module_name != "audioled.generative" and module_name != "audioled.input" and module_name != "audioled.panelize" and module_name != "audioled.modulation"): raise RuntimeError("Not allowed") return module_name, class_name def getFullClassName(o): module = o.__class__.__module__ if module is None or module == str.__class__.__module__: return o.__class__.__name__ else: return module + '.' + o.__class__.__name__ def inheritors(klass): subclasses = set() work = [klass] while work: parent = work.pop() for child in parent.__subclasses__(): if child not in subclasses: subclasses.add(child) work.append(child) return subclasses @app.route('/errors', methods=['GET']) def errors_get(): result = {} for error in errors: result[error.node.uid] = error.message return json.dumps(result) @app.route('/project/activeScene', methods=['POST']) def project_activeScene_post(): global proj if not request.json: abort(400) value = request.json['slot'] app.logger.info("Activating scene {}".format(value)) if proj.activeSceneId != value: proj.activateScene(value) # proj.previewSlot(value) return "OK" @app.route('/project/activeScene', methods=['GET']) def project_activeSlot_get(): global proj app.logger.debug(proj.outputSlotMatrix) return jsonify({ 'activeSlot': proj.previewSlotId, # TODO: Change in FE 'activeScene': proj.activeSceneId, }) @app.route('/project/sceneMatrix', methods=['PUT']) def project_sceneMatrix_put(): global proj if not request.json: abort(400) value = request.json app.logger.debug(value) proj.setSceneMatrix(value) return "OK" @app.route('/project/activateSlot', methods=['POST']) # @lock_preview def project_activateSlot_post(): global proj if not request.json: abort(400) value = request.json['slot'] app.logger.info("Activating slot {}".format(value)) proj.previewSlot(value) return "OK" @app.route('/project/sceneMatrix', methods=['GET']) def project_sceneMatrix_get(): global proj return json.dumps(proj.getSceneMatrix()) @app.route('/project/assets/<path:path>', methods=['GET']) def project_assets_get(path): global serverconfig global proj asset = serverconfig.getProjectAsset(proj.id, path) return send_file(asset[0], attachment_filename=asset[1], mimetype=asset[2]) @app.route('/project/assets', methods=['POST']) def project_assets_post(): global serverconfig global proj if 'file' not in request.files: app.logger.warn("No file in request") abort(400) file = request.files['file'] if file.filename == '': app.logger.warn("File has no filename") abort(400) if file and '.' in file.filename and file.filename.rsplit( '.', 1)[1].lower() in ['gif']: app.logger.info("Adding asset to proj {}".format(proj.id)) filename = serverconfig.addProjectAsset(proj.id, file) return jsonify({'filename': filename}) app.logger.error("Unknown content for asset: {}".format(file.filename)) abort(400) @app.route('/projects', methods=['GET']) def projects_get(): global serverconfig return jsonify(serverconfig.getProjectsMetadata()) @app.route('/projects', methods=['POST']) def projects_post(): global serverconfig if not request.json: abort(400) title = request.json.get('title', '') description = request.json.get('description', '') metadata = serverconfig.createEmptyProject(title, description) return jsonify(metadata) @app.route('/projects/import', methods=['POST']) def projects_import_post(): global serverconfig if not request.json: abort(400) metadata = serverconfig.importProject(request.json) return jsonify(metadata) @app.route('/projects/<uid>/export', methods=['GET']) def projects_project_export(uid): global serverconfig proj = serverconfig.getProject(uid) if proj is not None: app.logger.info("Exporting project {}".format(uid)) return jsonpickle.encode(proj) abort(404) @app.route('/projects/<uid>', methods=['DELETE']) def projects_project_delete(uid): global serverconfig serverconfig.deleteProject(uid) return "OK" @app.route('/projects/activeProject', methods=['POST']) def projects_activeProject_post(): global serverconfig global proj if not request.json: abort(400) uid = request.json['project'] app.logger.info("Activating project {}".format(uid)) try: proj = serverconfig.activateProject(uid) except Exception as e: app.logger.error("Error opening project: {}".format(e)) if serverconfig._activeProject is None: newProj = serverconfig.initDefaultProject() serverconfig.activateProject(newProj.id) abort( 500, "Could not active project. No other project found. Initializing default." ) else: abort(500, "Project could not be activated. Reason: {}".format(e)) return "OK" @app.route('/configuration', methods=['GET']) def configuration_get(): global serverconfig return jsonify({ 'parameters': serverconfig.getConfigurationParameters(), 'values': serverconfig.getFullConfiguration() }) @app.route('/configuration', methods=['PUT']) def configuration_put(): global serverconfig if not request.json: abort(400) try: serverconfig.setConfiguration(request.json) except RuntimeError as e: app.logger.error("ERROR updating configuration: {}".format(e)) abort(400, str(e)) return jsonify(serverconfig.getFullConfiguration()) @app.route('/remote/brightness', methods=['POST']) def remote_brightness_post(): global proj value = int(request.args.get('value')) floatVal = float(value / 100) app.logger.info("Setting brightness: {}".format(floatVal)) proj.setBrightnessForActiveScene(floatVal) return "OK" @app.route('/remote/favorites/<id>', methods=['POST']) def remote_favorites_id_post(id): # TODO: Switch to selecting scenes filename = "favorites/{}.json".format(id) global proj if os.path.isfile(filename): with open(filename, "r") as f: fg = jsonpickle.decode(f.read()) proj.setFiltergraphForSlot(proj.previewSlotId, fg) return "OK" else: app.logger.info("Favorite not found: {}".format(filename)) abort(404) def processLED(): global proj global ledThread global stop_signal global event_loop global last_time global current_time global errors global count global record_timings dt = 0 if stop_signal: return try: with dataLock: last_time = current_time current_time = timer() dt = current_time - last_time count = count + 1 if event_loop is None: event_loop = asyncio.new_event_loop() asyncio.set_event_loop(event_loop) proj.update(dt, event_loop) proj.process() # clear errors (if any have occured in the current run, we wouldn't reach this) errors.clear() except filtergraph.NodeException as ne: if count == 100: app.logger.error("NodeError in {}: {}".format( ne.node.effect, ne)) app.logger.info("Skipping next 100 errors...") count = 0 errors.clear() errors.append(ne) except Exception as e: app.logger.error("Unknown error: {}".format(e)) traceback.print_tb(e.__traceback__) finally: # Set the next thread to happen real_process_time = timer() - current_time timeToWait = max(POOL_TIME, 0.01 - real_process_time) if count == 100: if record_timings: # proj.previewSlot(proj.activeSlotId).printProcessTimings() # TODO: # proj.previewSlot(proj.activeSlotId).printUpdateTimings() # TODO: app.logger.info( "Process time: {}".format(real_process_time)) app.logger.info("Waiting {}".format(timeToWait)) count = 0 if not stop_signal: ledThread = threading.Timer(timeToWait, processLED, ()) ledThread.start() def startLEDThread(): # Do initialisation stuff here global ledThread global last_time global current_time # Create your thread current_time = timer() ledThread = threading.Timer(POOL_TIME, processLED, ()) app.logger.info('starting LED thread') ledThread.start() # Initiate if is_running_from_reloader() is False: startLEDThread() # When you kill Flask (SIGTERM), clear the trigger for the next thread # atexit.register(interrupt) signal.signal(signal.SIGINT, sigStop) signal.signal(signal.SIGUSR1, sigStop) return app
def add_db_cleanup_task(self): trigger = interval.IntervalTrigger(hours=1) scheduler.add_job(lambda: self.cleanup_db_check, trigger=trigger, id='cmoncli_db_cleanup_job', replace_existing=True)
def _timer(fn): trigger = interval.IntervalTrigger(seconds=interval_seconds) scheduler.add_job(fn, trigger=trigger)