def start_timescale_thread(app): global timescale_updater_thread with app.app_context(): timescale_updater_thread = AppContextThread( target=do_timescale_thread ) timescale_updater_thread.start()
def do_connection_thread(): sleep(1) global connection_thread global ottd_connection global current_date global month_last_update global year_last_update global shutting_down ottd_connection.req_data() current_date = ottd_connection.sync_data()() if month_last_update != current_date.month: # Trigger Vehicle Sync info(f" [ New Month {current_date.year}-{current_date.month} ]") info(f" [ Requesting Vehicle Updates ] ") ottd_connection.refresh_db_vehicles() month_last_update = current_date.month if year_last_update != current_date.year: info(f" [ New Year {current_date.year} ] ") info(f" [ Requesting City & Vehicle Updates ] ") year_last_update = current_date.year # ottd_connection.refresh_db_vehicles() # ottd_connection.refresh_db_towns() # Set the next thread to happen if not shutting_down: connection_thread = AppContextThread(target=do_connection_thread) connection_thread.start()
def video_feed(): ob = SkeletonsGenerator() while(True): t = AppContextThread(target = get_time,args=[ob]) return Response(gen(ob),mimetype='multipart/x-mixed-replace;boundary=frame') , t.start() '''def exit_cv2_window():
def payload(): # signature = request.headers.get('X-Hub-Signature') # data = request.data # if request.headers.get('X-GitHub-Event') == "push": # print("push") data = request.data if (json.loads(data)["push"] == "True"): print("hey Lets PULL") t = AppContextThread(target=get_pull) t.start() return json.dumps({"success": True})
def start_connection_thread(app): # Do initialisation stuff here global connection_thread global ottd_connection # Create your thread with app.app_context(): connection_thread = AppContextThread(target=do_connection_thread) ottd_connection = OpenTTDConnection() # ottd_connection.scan_vehicles(20) info(f" [ Start Town Scan ]") # ottd_connection.schedule_next_town_scan_batch(947, 10) connection_thread.start()
def index(): if request.method == 'GET': return render_template('index.html', posts=Logs.get_log()) if request.method == 'POST': try: os.remove('logs/last.log') except: pass selected = request.form.getlist('category') selected = True if selected else False t = AppContextThread(target=products, args=(selected, )) t.start() return render_template('index.html')
def do_timescale_thread(): sleep(1) global timescale_updater_thread global current_date global shutting_down CompanyTimescaleController.capture_data(current_date) VehicleTimescaleController.capture_data(current_date) TownTimescaleController.capture_data(current_date) if not shutting_down: timescale_updater_thread = AppContextThread( target=do_timescale_thread ) timescale_updater_thread.start()
def background(): # recieve query req = request.get_json() print(req) res = [] if req["type"] == "current": # if user selected to get current news start crawling news from specified domain article_objs, flag = start_crawl(req["domain"], int(req["n"])) if flag: # if there are new articles, then TF-IDF needs updating, start thread for update() thread = AppContextThread(target=update).start() else: # query the database option chosen, so apply filters article_objs = apply_filters(req) # return only the specified number of articles for i in article_objs[:int(req['n'])]: doc = i.as_dict() doc['text'] = doc[ 'text'][:197] # only take first few characters as a summary res.append(doc) #return response to js front end return make_response(jsonify(res, 200))
def run_valves(socket_io, current_set, prev_set): max_time = max([get_max_sensors_delay(land.sensors) for land in prev_set]) time.sleep(max_time) def open_to_run(): # open all valves to run in the current set # close all valves in the prev set for land in current_set.lands: for valve in land.valves: valve_config = next( filter(lambda x: x.config.active is True, valve.valve_configs), None) send_valve_position(valve, valve_config.run, socket_io) error_list = validate_move_valves(current_set.lands, 'run') if error_list: return error_list for land in prev_set.lands: for valve in land.valves: send_valve_position(valve, 0, socket_io) error_que = Queue() t = AppContextThread(target=lambda q: error_que.put(open_to_run()), args=(error_que, )) t.start() t.join() if not error_que.empty(): return error_que.get()
def test_running_without_flask_context(): """Test running AppContextThread outside of flask app raises an error.""" mock_action = Mock() with pytest.raises(RuntimeError): thread = AppContextThread(target=lambda: mock_action.action()) thread.start() thread.join() mock_action.action.assert_not_called()
def syllabus(): if request.method == 'POST': subject__ = request.form.get('subject') year = request.form.get('year') branch = request.form.get('branch') file = request.files['syllabus'] file.save(secure_filename(file.filename)) item = subject(subject_=subject__, year_=year, branch_=branch, syllabus_=file.read()) db.session.add(item) db.session.commit() g.file = file.filename g.year_ = year g.subject_ = subject__ g.branch_ = branch theard_ = AppContextThread(target = updateSyllabusToFirebase) theard_.start() return make_response("Syllabus Added Successfully")
def uploadFiles(): hostList = [] uploadedFiles = request.files.getlist("file") for uploadedFile in uploadedFiles: fileName = secure_filename(uploadedFile.filename) if fileName != '': fileExt = os.path.splitext(fileName)[1] if fileExt in app.config['UPLOAD_EXTENSIONS']: uploadedFile.save( os.path.join(app.config['UPLOAD_FOLDER'], fileName)) hostList.append(fileName) updateThread = AppContextThread( target=updateDb(hostList, app.config['UPLOAD_FOLDER'])) updateThread.start() updateThread.join() return redirect(url_for('index'))
def create_table(): """ Creates the database. Inserts two users into the users table. Creates and starts the producer and consumer thread to send weekly email updates to the subscribers :return: None """ # db creation db.create_all() try: user = UserModel(email='*****@*****.**', username='******', subscription=True) user.set_password('password') db.session.add(user) user1 = UserModel(email='*****@*****.**', username='******', subscription=True) user1.set_password('password') db.session.add(user1) db.session.commit() except Exception as e: return # Producer thread - to send message to SQS # Consumer thread - to receive message from SQS t1 = AppContextThread(target=producer_thread, daemon=True, args=(db_acc, )) t2 = AppContextThread(target=consumer_thread, daemon=True, args=( db_acc, app, mail, EMAIL_ADDRESS, )) t1.start() t2.start()
def create_app(): appl = Flask(__name__, static_folder=os.path.abspath('')) appl.debug = True appl.secret_key = "" appl.config['SQLALCHEMY_DATABASE_URI'] = '' appl.config['UPLOAD_FOLDER'] = "Parts/static/" db.init_app(appl) login_manager.init_app(appl) with appl.app_context(): db.create_all() Base.metadata.create_all(db.engine) global session from Error import error_routes from Admin import admin from Dashboard import dash from Equipment import equipment from Parts import parts from Locations import locations from Supplier import supplier appl.register_blueprint(routes.Index_bp) appl.register_blueprint(error_routes.Error_bp) appl.register_blueprint(dash.Dash_bp) appl.register_blueprint(admin.Admin_bp) appl.register_blueprint(equipment.Equip_bp) appl.register_blueprint(parts.Parts_bp) appl.register_blueprint(locations.Locat_bp) appl.register_blueprint(supplier.Sup_bp) AppContextThread(target=timer_jobs).start() return appl
def create_load_test(): loadtest = mongo.db.loadtest deployments = mongo.db.deployments predictions = mongo.db.predictions # Remove any existing predictions predictions.delete_many({}) equation_str = str(request.json['equation']) timestamp = str(datetime.now()) list_of_deployments = [] for s in deployments.find(): list_of_deployments = s['list'] loadtest_insert_obj = loadtest.insert_one({ 'status': "Running", 'equation': equation_str, "timestamp": timestamp, "deployments": list_of_deployments, "data": "none" }) loadtest_id = loadtest_insert_obj.inserted_id g.list_of_deployments = list_of_deployments t = AppContextThread(target=run_locust) t.start() t.join() loadtest.update_one({"_id": loadtest_id}, {"$set": { 'status': "Done", "data": g.locust_result }}, upsert=True) return jsonify({'result': str(loadtest_id)})
def preflow_valves(socket_io, current_set, sensors, prev_land): """ :param sensors: sensors from current lands :param current_set: current set that needs to open all valves to preflow :param socket_io: socket communicator :param prev_land: land from prev set that needs to close the valves :return: None """ max_time = get_max_sensors_delay(sensors) time.sleep(max_time) def open_to_preflow(): # open all valves to preflow in the current set # then close all valves in the current land (that belongs to the prev_set) for land in current_set.lands: for valve in land.valves: valve_config = next( filter(lambda x: x.config.active is True, valve.valve_configs), None) send_valve_position(valve, valve_config.preflow, socket_io) error_list = validate_move_valves(current_set.lands, 'preflow') if error_list: return error_list for valve in prev_land.lands: send_valve_position(valve, 0, socket_io) error_que = Queue() t = AppContextThread(target=lambda q: error_que.put(open_to_preflow()), args=(error_que, )) t.start() t.join() if not error_que.empty(): return error_que.get()
def create_app(test_config=None): """Create and configure an instance of the Flask application.""" app = Flask(__name__, instance_relative_config=True) app.config.from_mapping( # a default secret that should be overridden by instance config SECRET_KEY="dev", # store the database in the instance folder DATABASE=os.path.join(app.instance_path, "flaskr.sqlite"), ) if test_config is None: # load the instance config, if it exists, when not testing app.config.from_pyfile("config.py", silent=True) else: # load the test config if passed in app.config.update(test_config) # ensure the instance folder exists try: os.makedirs(app.instance_path) except OSError: pass # register the database commands from flaskr import db db.init_app(app) # apply the blueprints to the app from flaskr import blog app.register_blueprint(blog.bp) # make url_for('index') == url_for('blog.index') # in another app, you might define a separate main index here with # app.route, while giving the blog blueprint a url_prefix, but for # the tutorial the blog will be the main index app.add_url_rule("/", endpoint="index") # socket-reading thread def thread_job(): import OpenSSL from OpenSSL.SSL import TLSv1_2_METHOD, FILETYPE_PEM, VERIFY_FAIL_IF_NO_PEER_CERT import socket from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization def hola(): pass # Create context for de TLS session context = OpenSSL.SSL.Context(TLSv1_2_METHOD) # Load server private key and cert context.use_privatekey_file( os.path.join(app.instance_path, "server_key.pem")) context.use_certificate_file( os.path.join(app.instance_path, "server_cert.pem")) # Add verify mode context.set_verify(VERIFY_FAIL_IF_NO_PEER_CERT, hola) # Load root certificate context.load_verify_locations( cafile=os.path.join(app.instance_path, "certificate.pem")) # Create the initial connection with the above context and a socket soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) soc.setblocking(1) soc.bind((HOST, PORT)) soc.listen(1) conn_ini = OpenSSL.SSL.Connection(context, soc) # Accept client onnection while 1: conn, addr = conn_ini.accept() conn.set_accept_state() print("Connected by " + str(addr)) while 1: try: data = conn.read(1024) # Connect to the flask database conn_db = connect( os.path.join(app.instance_path, "flaskr.sqlite")) curs = conn_db.cursor() evidencias = data.decode().split('\n') for e in evidencias: if e != '': if "_rule" in e: e = e.split('. ')[1] e = "AUDITD: " + e query = 'SELECT * FROM evidence WHERE body="{}";'.format( e) curs.execute(query) rows = curs.fetchall() res = len(rows) if res == 1: continue else: e = "INOTIFY: " + e curs.execute( "INSERT INTO evidence (body) VALUES (?);", [e], ) conn_db.commit() conn_db.close() #print(data.decode()) except OpenSSL.SSL.SysCallError as e: #if e[0] == -1 or e[1] == 'Unexpected EOF': conn.shutdown() break with app.app_context(): t = AppContextThread(target=thread_job) t.start() #t.join() return app
def listen_sensors_thread(socket_io): t1 = AppContextThread(target=thread_wrap(receive_sensor_data), args=(socket_io, )) print("***Listen sensors thread before running***") t1.start() t2 = AppContextThread(target=thread_wrap(check_online_status), args=(socket_io, )) print("***Check Status Sensor-Valve-Check thread before running***") t2.start() t3 = AppContextThread(target=thread_wrap(update_battery_temp), args=(socket_io, )) print("***Battery-Temperature thread before running***") t3.start() t4 = AppContextThread(target=thread_wrap(ping_outside)) print("***PING thread before running***") t4.start() t5 = AppContextThread(target=thread_wrap(get_gps_data)) print("***GPS thread before running***") t5.start() @socket_io.on('connect', namespace='/notification') def test_connect(): if not current_user.is_authenticated: print("NOT AUTHENTICATED!") return False print("CONNECTED")
'name': s.name, 'colour': s.colour, 'score': s.score } for s in sorted(room.snakes, reverse=True)] # Don't need to send an update if snakes hasn't changed if not old_snakes == snakes: socketio.emit('scores_changed', {'snakes': snakes}, room=room_id) old_snakes = snakes.copy() # Deep copies the list if thread_lock == None: thread_lock = True socketio.start_background_task(target=send_room_state) socketio.start_background_task(target=send_scores) @socketio.on('change_direction') def move_snake(data): global user_snakes user_snakes[request.sid].setDirection(data['direction']) with app.test_request_context(): for room in game_rooms.values(): game_loop = AppContextThread(target=room.loop) game_loop.start() room.reset(num_ais=random.randint(3, 5)) if __name__ == '__main__': socketio.run(app, debug=False, host='0.0.0.0')
def pre_process(): thread = AppContextThread(target=data_processor.process_files) thread.start() return "Pre-processing csv files..."
def create_app(): # Todo: Make this handle environment configs better app = Flask(__name__) app.config['FLASK_ENV'] = environ.get('FLASK_ENV') app.config["SECRET_KEY"] = "thisshouldbesetforproduction" app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///db.sqlite" # url app.config["JWT_AUTH_URL_RULE"] = "/api/auth/login" app.config["JWT_SECRET_KEY"] = "thisshouldbesetforproduction" app.config["JWT_EXPIRATION_DELTA"] = timedelta(days=1) # app.config['SQLALCHEMY_ECHO'] = True # Email configuration app.config.update( dict( DEBUG=True, MAIL_SERVER='smtp.sendgrid.net', MAIL_PORT=465, MAIL_USE_TLS=False, MAIL_USE_SSL=True, MAIL_USERNAME='******', MAIL_PASSWORD=environ.get("SENDGRID_API_KEY"), MAIL_DEBUG=False, )) cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) from .models import User from .models.track_models import TrackOut, Track, Equalizer, Compressor, Deesser, Reverb from .routes.auth import authentication_handler, identity_handler JWT(app, authentication_handler, identity_handler) root = logging.getLogger() root.setLevel(logging.INFO) handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.INFO) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) root.addHandler(handler) ''' # db.drop_all() # db.create_all() only creates models within scope ''' with app.app_context(): mail.init_app(app) db.init_app(app) # db.drop_all() db.create_all() db.session.commit() AppContextThread(target=worker, daemon=True).start() ''' WebServer Rendering Routes ''' from .routes.main import main_bp app.register_blueprint(main_bp) ''' Database Interactive Routes ''' from .routes.auth import auth_bp app.register_blueprint(auth_bp) from .routes.users import users_bp app.register_blueprint(users_bp) from .routes.tracks import tracks_bp app.register_blueprint(tracks_bp) from .routes.trackOuts import trackouts_bp app.register_blueprint(trackouts_bp) from .routes.errors import errors_bp app.register_blueprint(errors_bp) return app
def test_handler(): g.test = TEST_G thread = AppContextThread(target=lambda: mock_action.action(g.test)) thread.start() thread.join() return jsonify(TEST_RESULT)