def create_app(): app = Flask(__name__) if 'AXE_CONFIG' in os.environ: app.config.from_envvar('AXE_CONFIG') else: logger.warning('Missing config: AXE_CONFIG.') app.config.update({ 'SCHEDULER_API_ENABLED': True, 'JOBS': [ { 'func': count, 'trigger': 'interval', 'seconds': 3, 'id': 'count', } ] }) scheduler = APScheduler() scheduler.init_app(app) app.wsgi_app = DispatcherMiddleware(app.wsgi_app, { '/metrics': make_wsgi_app() }) return app
def setUp(self): self.app = Flask(__name__) self.scheduler = APScheduler() self.scheduler.api_enabled = True self.scheduler.init_app(self.app) self.scheduler.start() self.client = self.app.test_client()
class TestAPIPrefix(TestCase): def setUp(self): self.app = Flask(__name__) self.scheduler = APScheduler() self.scheduler.api_enabled = True self.scheduler.api_prefix = '/api' self.scheduler.init_app(self.app) self.scheduler.start() self.client = self.app.test_client() def test_api_prefix(self): response = self.client.get(self.scheduler.api_prefix + '/jobs') self.assertEqual(response.status_code, 200) def test_invalid_api_prefix(self): response = self.client.get('/invalidapi/jobs') self.assertEqual(response.status_code, 404)
def stacosys_server(config_pathname): app = Flask(__name__) config.initialize(config_pathname, app) # configure logging logger = logging.getLogger(__name__) configure_logging(logging.INFO) logging.getLogger("werkzeug").level = logging.WARNING logging.getLogger("apscheduler.executors").level = logging.WARNING # initialize database from core import database database.setup() # cron email fetcher app.config.from_object( JobConfig( config.getInt(config.MAIL_POLLING), config.getInt(config.COMMENT_POLLING) ) ) scheduler = APScheduler() scheduler.init_app(app) scheduler.start() logger.info("Start Stacosys application") # generate RSS for all sites from core import rss rss.generate_all() # start Flask from interface import api from interface import form logger.debug("Load interface %s" % api) logger.debug("Load interface %s" % form) app.run( host=config.get(config.HTTP_HOST), port=config.get(config.HTTP_PORT), debug=False, use_reloader=False, )
def create_app(): app = Flask(__name__) if os.environ.get('SITTER_SETTINGS'): app.config.from_envvar('SITTER_SETTINGS') else: for path in (os.path.dirname(os.path.dirname(__file__)), os.path.dirname(__file__)): settings = os.path.join(path, 'settings.cfg') if os.path.isfile(settings): app.config.from_pyfile(settings) break if not os.path.exists(app.config['SESSION_STORAGE_DIR']): os.makedirs(app.config['SESSION_STORAGE_DIR']) kv = KVSessionExtension(FilesystemStore(app.config['SESSION_STORAGE_DIR']), app) # Remove extra white space. app.jinja_env.trim_blocks = True app.jinja_env.lstrip_blocks = True def cleanup_sessions(): kv.cleanup_sessions(app) # Add some scheduled jobs. scheduler = APScheduler() scheduler.init_app(app) scheduler.add_job(id="kv-session-cleanup", func=cleanup_sessions, seconds=10, trigger='interval') scheduler.start() return app
def setUp(self): self.app = Flask(__name__) self.scheduler = APScheduler() self.scheduler.auth = HTTPBasicAuth() self.scheduler.api_enabled = True self.scheduler.init_app(self.app) self.scheduler.start() self.scheduler.authenticate(self._authenticate) self.client = self.app.test_client()
class TestHTTPBasicAuth(TestCase): def setUp(self): self.app = Flask(__name__) self.scheduler = APScheduler() self.scheduler.auth = HTTPBasicAuth() self.scheduler.api_enabled = True self.scheduler.init_app(self.app) self.scheduler.start() self.scheduler.authenticate(self._authenticate) self.client = self.app.test_client() def _authenticate(self, auth): return auth['username'] == 'test' and auth['password'] == 'test' def test_valid_credentials(self): headers = {'Authorization': 'Basic ' + base64.b64encode(b'test:test').decode('ascii')} response = self.client.get(self.scheduler.api_prefix + '', headers=headers) self.assertEqual(response.status_code, 200) def test_invalid_credentials(self): headers = {'Authorization': 'Basic ' + base64.b64encode(b'guest:guest').decode('ascii')} response = self.client.get(self.scheduler.api_prefix + '', headers=headers) self.assertEqual(response.status_code, 401) self.assertEqual(response.headers['WWW-Authenticate'], 'Basic realm="Authentication Required"') def test_invalid_header_format(self): headers = {'Authorization': 'Basic 1231234'} response = self.client.get(self.scheduler.api_prefix + '', headers=headers) self.assertEqual(response.status_code, 401) self.assertEqual(response.headers['WWW-Authenticate'], 'Basic realm="Authentication Required"') def test_missing_credentials(self): response = self.client.get(self.scheduler.api_prefix + '') self.assertEqual(response.status_code, 401) self.assertEqual(response.headers['WWW-Authenticate'], 'Basic realm="Authentication Required"')
class TestEndpointPrefix(TestCase): def setUp(self): self.app = Flask(__name__) self.scheduler = APScheduler() self.scheduler.api_enabled = True self.scheduler.endpoint_prefix = 'api.' self.scheduler.init_app(self.app) self.scheduler.start() self.client = self.app.test_client() def test_endpoint_prefix(self): with self.scheduler.app.test_request_context(): valid_url = True if url_for(self.scheduler.endpoint_prefix + 'get_scheduler_info') else False self.assertTrue(valid_url) def test_invalid_endpoint_prefix(self): with self.scheduler.app.test_request_context(): try: valid_url = url_for('get_scheduler_info') except BuildError as _: valid_url = False self.assertFalse(valid_url)
def srmail_server(config_pathname): app = Flask(__name__) config.initialize(config_pathname, app) # configure logging logger = logging.getLogger(__name__) configure_logging(logging.INFO) # initialize database from core import database database.setup() # cron email fetcher app.config.from_object(JobConfig(config.getInt(config.IMAP_POLLING))) scheduler = APScheduler() scheduler.init_app(app) scheduler.start() logger.info("Starting SRMAIL application") # start Flask from interface import api logger.debug("Load interface %s" % api) app.run( host=config.get(config.HTTP_HOST), port=config.get(config.HTTP_PORT), debug=False, use_reloader=False, ) # Exit application logger.info("Stopping SRMAIL application")
#!/usr/bin/env python3 import sqlite3 from pathlib import Path from flask import Flask, redirect, render_template, url_for from flask_apscheduler import APScheduler class Config(object): SCHEDULER_API_ENABLED = True DUMP_DIRECTORY_PATH = Path('/home/retr0/Desktop/dump') DB = "dump.db" scheduler = APScheduler() app = Flask(__name__, template_folder="") app.config.from_object(Config()) def dump_parse_save_unique(requested_dumps): # insert meterpreter dump here # after the dumps are done, they will be picked up for dump_type in requested_dumps: meta = {'DumpType': dump_type} # headers = [] dump = {} try: with open(app.config['DUMP_DIRECTORY_PATH'] / f"{dump_type}.txt", 'r', encoding='utf-8') as dump_f: idx = None
class Logic: bus_num = 1 db = Database() shared = Manager().dict() scheduler = None lasers = None _comQueue = Queue() _process = Lock() _solenoid = SOLENOID_STATE.UNLOCKED _ultrasonic = ULTRASONIC_STATE.ENABLED _laserState = LaserPattern.LASER_OFF _laserCounter = 0 _patternIndex = 0 _laserValue = 0x00 _command = None def __init__(self): self.laserPattern = LaserPattern.ONE_CYCLES self._timer = 0 self._patternIndex = 0 # Initialize ICc Devices self._bus = SMBus(self.bus_num) self.lasers = LaserControl(self._bus, I2C.LASERS) self.sevenseg = SevenSeg(self._bus, I2C.SEVENSEG).sevenseg self.arduino = ArduinoI2C(self._bus, I2C.ARDUINO) self.photo_resistors = ReceptorControl(self._bus, I2C.PHOTO_RESISTORS) self.lock = BoxLock(self._bus, I2C.SOLENOID) self.switches = SwitchesI2C(self._bus, I2C.SWITCHES) @property def patternIndex(self) -> int: return self._patternIndex @patternIndex.setter def patternIndex(self, value: int): self._patternIndex = value @property def laserCounter(self) -> int: return self._laserCounter @laserCounter.setter def laserCounter(self, value: int): self._laserCounter = value def laserCounterIncrement(self): self.laserCounter = self.laserCounter + 1 @property def laserState(self) -> LaserPattern: return LaserPattern(self.shared.get("laserpattern", LaserPattern.LASER_OFF.value)) @laserState.setter def laserState(self, value: LaserPattern): self.shared["laserpattern"] = value.value @property def laserValue(self) -> int: return self.shared.get("laservalue", 0x00) @laserValue.setter def laserValue(self, value: int): self.shared["laservalue"] = value @property def command(self): return self.shared.get("command", None) @command.setter def command(self, value: str): self.shared["command"] = value def timer_values(self) -> Tuple[int, int, int]: newDatetime = datetime.now() - self.start_time seconds = MAX_TIME - newDatetime.seconds minutes = seconds // 60 secondsToPrint = seconds - minutes * 60 return minutes, secondsToPrint, seconds @property def timer_text(self) -> str: minutes, secondsToPrint, seconds = self.timer_values() if self.state is STATE.WAIT: return "RESET" if self.state is STATE.RUNNING: if minutes < 0 or seconds < 0: self.shared[INTERRUPT.KILL_PLAYER] = True return "DEAD" elif self.state is STATE.EXPLODE: return "DEAD" elif self.state is STATE.WIN: return "SUCCESS!" return "{}:{:2}".format(minutes, str(secondsToPrint).zfill(2)) @property def start_time(self) -> datetime: return self.shared.get("start time", datetime.now()) @start_time.setter def start_time(self, value: datetime): self.shared["start time"] = value @property def ultrasonic(self) -> ULTRASONIC_STATE: return ULTRASONIC_STATE(self.shared.get("ultrasonic", ULTRASONIC_STATE.ENABLED.value)) @ultrasonic.setter def ultrasonic(self, value: ULTRASONIC_STATE): # TODO send the new ultrasonic logic over I2C log.debug("Ultrasonic logic changed from {} to {}".format(self.ultrasonic.value, value.value)) self.shared["ultrasonic"] = value.value @property def comQueue(self) -> Queue: return self._comQueue @comQueue.setter def comQueue(self, value): log.debug("Queue was created") self._comQueue = value @property def solenoid(self) -> SOLENOID_STATE: return SOLENOID_STATE(self.shared.get("solenoid", SOLENOID_STATE.UNLOCKED.value)) @solenoid.setter def solenoid(self, value: SOLENOID_STATE): # send the new solenoid logic over I2C if value is SOLENOID_STATE.UNLOCKED: self.lock.open() else: self.lock.close() log.debug("Solenoid logic changed from {} to {}".format(self.solenoid.value, value.value)) self.shared["solenoid"] = value.value @property def state(self): return STATE(self.shared.get("logic", STATE.WAIT.value)) @state.setter def state(self, value: STATE): log.debug("State changed from {} to {}".format(self.state.value, value.value)) self.shared["logic"] = value.value @property def entered_code(self) -> hex: return self.shared.get("enteredcode", 0xFFF) @entered_code.setter def entered_code(self, value: hex): assert 0x0 <= value <= 0xfff log.debug("Setting new entered code: 0x{}".format(value)) self.shared["enteredcode"] = value @property def keypad_code(self) -> hex: return self.shared.get("code", 0x000) @keypad_code.setter def keypad_code(self, value: hex): assert 0x0 <= value <= 0xfff log.debug("Setting new keypad code: 0x{}".format(value)) self.shared["code"] = value @property def team(self) -> str: return self.shared.get("team", self.db.last.name if self.db.get_rows() else "--") @team.setter def team(self, value: str): log.debug("Setting current team name to: {}".format(value)) self.shared["team"] = value if self.db.get_rows(): self.db.last = Row(name=value) @property def rgb_color(self) -> RGBColor: return RGBColor(self.shared.get("rgb", RGBColor.BLANK.value)) @rgb_color.setter def rgb_color(self, value: RGBColor): log.debug("Setting new rgb color: {}".format(value)) # send the command over i2c to change the rgb color color_map = {'green': 0x1c, 'red': 0xe0, 'blue': 0x03, 'black': 0x00} self.arduino.color = color_map[value.value] self.shared["rgb"] = value.value def run(self, queue: Queue): """ Start the game and make sure there is only a single instance of this process This is the setup function, when it is done, it will start the game loop """ with self._process: # Initialize I2C server self.state = STATE.WAIT # Change logic of game to WAIT self.solenoid = SOLENOID_STATE.LOCKED self.comQueue = queue self.ultrasonic = ULTRASONIC_STATE.ENABLED self.laserPattern = LaserPattern.ONE_CYCLES self.scheduler = APScheduler(scheduler=BackgroundScheduler(), app=current_app) self.scheduler.add_job("loop", self._loop, trigger='interval', seconds=1, max_instances=1, replace_existing=False) self.scheduler.start() # TODO start thread polling sensors try: while True: self.poll_sensors() except KeyboardInterrupt: return # Commented out because we are doing it in other functions def poll_sensors(self): """ Poll all of the sensors and raise a flag if one of them has tripped. If the right wire was clipped at the end of the puzzle, raise the win flag """ pass # self._bus.write_byte_data(I2C.LASERS.value, 0, 9) # for i2c in I2C: # log.debug("Reading from Project_Theseus_API.i2c on {}".format(i2c.name)) # foo = self._bus.read_word_data(i2c.value, 0) # self._send(I2C.SEVEN_SEG, "Hello!") def getNextLaserPatternList(self): if self.laserState is LaserPattern.ONE_CYCLES: return LaserPattern.TWO_CYCLES elif self.laserState is LaserPattern.TWO_CYCLES: return LaserPattern.UP_AND_DOWN elif self.laserState is LaserPattern.UP_AND_DOWN: return LaserPattern.INVERSION elif self.laserState is LaserPattern.INVERSION: return LaserPattern.LASER_OFF else: return self.laserState def getLaserPattern(self): if self.laserState is LaserPattern.ONE_CYCLES: pattern = LaserPatternValues.ONE_CYCLES.value elif self.laserState is LaserPattern.TWO_CYCLES: pattern = LaserPatternValues.TWO_CYCLES.value elif self.laserState is LaserPattern.UP_AND_DOWN: pattern = LaserPatternValues.UP_AND_DOWN.value elif self.laserState is LaserPattern.INVERSION: pattern = LaserPatternValues.INVERSION.value elif self.laserState is LaserPattern.LASER_OFF: pattern = LaserPatternValues.LASER_OFF.value elif self.laserState is LaserPattern.RANDOM: pattern = LaserPatternValues.RANDOM.value elif self.laserState is LaserPattern.STATIC: return self.laserValue else: pattern = None # Increment the patternIndex if pattern is not None: if self.patternIndex < len(pattern): retValue = pattern[self.patternIndex] self.patternIndex += 1 else: self.patternIndex = 0 retValue = pattern[self.patternIndex] if retValue == 0xFF: return random.randint(0, 0x3F) else: return retValue else: # All lasers turn return 0x3F def updateLaserPattern(self): if self.laserCounter < SECONDS_PER_PATTERN: self.laserCounterIncrement() else: self.laserState = self.getNextLaserPatternList() self.patternIndex = 0 # So that we start at the beginning self.laserCounter = 0 # Time per element of pattern self.laserValue = self.getLaserPattern() # Set laser pattern self.lasers.state = self.laserValue def checkCode(self): """ Need to verify that the code they entered was correct. Verifies against the current keycode """ # Get the last entered key key = self.arduino.keypad # The key should be a single hex character between 0 and 9 # The entered_code variable stores the 3 numbers they have entered # If they enter a fourth number they die # Start Game initializes it to 0xFFF so we check that it isn't F # to see if a number has been previously entered str_keypad_code = "{:2X}".format(self.keypad_code) str_entered_code = "{:2X}".format(self.entered_code) # Iterate through characters and verify for i in range(0, 3): # Add latest key to entered code if str_entered_code[i] == "F": if key != None: # We need to fill this space str_entered_code[i] = key # Save in the shared dict self.entered_code = int(str_entered_code) # Now we need to check if it matches if str_entered_code[i] != str_keypad_code[i]: return False else: # The key matched so far, continue pass else: # Key was none, don't update entered and don't check pass else: # entered code is not F, so we need to see if it matches # admitedly this shouldn't be called because of how we check if str_entered_code[i] != str_keypad_code[i]: return False # We are assuming it has matched so far, but if they have entered another key, # it no longer does, and they should fail if key != None: return False else: # They didn't type a key, so they lived and won # They didn't exceed the code size # TODO: Add additional code for more logic later, this will # make them win right now self.state = STATE.WIN self.end_game(success=True) # If you reached here, the codes matched so far, so return True return True def updateLed(self): """ There are 6 switches on the box. The four black correspond to a number in hex, and the 2 white help you know which number in the keypad_code you are looking at. The light will be RED if the white switches are 0 or the current code is wrong, and GREEN if the corresponding number from keypad_code is correct. """ # TODO: Verify that my understanding of how the switches are organized # as far as the number they return is correct # Determined by the white switches. 0 corresponds to none of the numbers # and 1, 2, and 3 respectively correspond to the 1, 2, and 3 numbers in # keypad_code keypad_code_index = self.switches.read_switches()[-2:] # What number does the user want to test? test_number = self.switches.read_switches()[:-2] # Get the keypad code as a string str_keypad_code = "{:2X}".format(self.keypad_code) # set the led based on if they are correct if keypad_code_index > 0: if str_keypad_code[keypad_code_index - 1] == str(test_number): # It matches, make the light green self.rgb_color = RGBColor.GREEN else: # It doesn't match, make the light red self.rgb_color = RGBColor.RED else: # They haven't selected one, make it blue self.rgb_color = RGBColor.BLUE def _loop(self): command_id = None if not self.comQueue.empty(): command = self.comQueue.get() command_id = command[0] print("\n\n\n{}\n\n\n".format(command_id)) # State Actions if self.state is STATE.WAIT: self.sevenseg(0x0) self.laserState = LaserPattern.LASER_OFF elif self.state is STATE.RUNNING: # Update the seven segment display to show the correct time minutes, seconds, total_seconds = self.timer_values(self.sevenseg(int("0x{:02}{:02}".format(minutes, seconds), 16))) # Update what the current laser pattern should be self.updateLaserPattern() # Update LED by checking switches self.updateLed() elif self.state is STATE.EXPLODE: self.sevenseg(0xdead) # TODO randomize laser pattern so that they flash elif self.state is STATE.WIN: self.sevenseg(0xbeef) else: log.error("Reached an unknown state: {}".format(self.state)) # State Transitions if self.state is STATE.WAIT: if self.command == "toggle-game": self.command = None # TODO? Verify that the box is reset before starting the game self.state = STATE.RUNNING self.start_game() elif self.state is STATE.RUNNING: minutes, seconds, total_seconds = self.timer_values() if self.command == "toggle-game" or self.command == "toggle-game": if self.command is not None: self.command = None self.state = STATE.WAIT # FIXME? Delete last row on reset self.end_game(success=False) elif self.shared[INTERRUPT.KILL_PLAYER]: self.state = STATE.EXPLODE self.end_game(success=False) elif self.shared[INTERRUPT.DEFUSED]: self.state = STATE.WIN self.end_game(success=True) # Kill the player if time has run out elif total_seconds <= 0: self.state = STATE.EXPLODE self.end_game(success=False) elif self.laserValue != self.photo_resistors.read_int(): self.state = STATE.EXPLODE self.end_game(success=False) elif not self.checkCode(): # If they reached here the code didn't match self.state = STATE.EXPLODE self.end_game(success=False) elif self.state is STATE.EXPLODE: if self.shared[INTERRUPT.RESET_GAME] or self.shared[INTERRUPT.TOGGLE_TIMER]: self.state = STATE.WAIT elif self.state is STATE.WIN: if self.shared[INTERRUPT.RESET_GAME] or self.shared[INTERRUPT.TOGGLE_TIMER]: self.state = STATE.WAIT def _send(self, device: I2C, message: str): """ Send a command to a device over I2c. Nothing external should call this, only "loop" :param device: :param message: :return: """ assert len(message) < 32 log.debug("Address: 0x{:02x} Message: '{}'".format(device.value, message)) try: self._bus.write_i2c_block_data(device.value, 0x00, [ord(c) for c in message]) except IOError: pass @staticmethod def random_laser_pattern() -> int: return random.randint(0, 0x40) def generateKeyCode() -> int: """ Generate a 3 number code where all numbers are less than 9 and >= 0 """ # We can't go over 9 because our keypad doesn't go above 9 return (random.randint(0, 0x9) << 8) | (random.randint(0, 0x9) << 4) | (random.randint(0, 0x9)) def start_game(self): """ Add a row to the database, generate random data for all the puzzles """ self.solenoid = SOLENOID_STATE.LOCKED self.start_time = datetime.now() self.keypad_code = generateKeyCode() self.entered_code = 0xFFF # Set as this because it is impossible to get on our keypad self.rgb_color = RGBColor.BLUE self.laserState = LaserPattern.ONE_CYCLES row = Row( name=self.team, lasers=self.laserValue, code=self.keypad_code, success=False, time=MAX_TIME, color=self.rgb_color.value, ) log.debug("Adding new row to the database:\n{}".format(row)) self.db.add_row(row) def end_game(self, success: bool = False): log.debug("Game Over") self.db.last = Row( name=self.team, code=self.keypad_code, lasers=self.laserValue, success=success, time=(datetime.now() - self.start_time).seconds )
from logging.config import dictConfig from apscheduler.schedulers import SchedulerAlreadyRunningError from flask import Flask, redirect, render_template, send_from_directory, url_for from flask_apscheduler import APScheduler from flask_cors import CORS from glow.colors import palette from glow.conf import settings from glow.effects import EffectFactory from glow.light import Light, LightManager from glow.strips import GlowStrip, StripFactory, StripManager # from glow.scheduler import init_scheduler light_manager = LightManager() strip_manager = StripManager() scheduler = APScheduler() # init_scheduler(scheduler) # Initialize logging dictConfig(settings.LOGGING) logger = logging.getLogger(__name__) def create_app(): app = Flask(__name__) CORS(app) logger.info("Glow initialized with: {}".format(settings.to_dict())) # ApsScheduler try:
from flask_apscheduler import APScheduler from financial_data.blueprint.tasks.current_price import smoke_task, get_current_price_task scheduler = APScheduler() scheduler.api_enabled = True #scheduler.add_job(id='Current Price Task', func=get_current_price_task('IBOV'), trigger='interval', seconds=60) scheduler.add_job(id='Current Price Task', func=smoke_task, trigger='interval', seconds=60) def init_app(app): scheduler.init_app(app) scheduler.start()
import logging from logging.handlers import TimedRotatingFileHandler from apscheduler.schedulers.gevent import GeventScheduler from flask import Flask from flask_apscheduler import APScheduler from flask_sqlalchemy import SQLAlchemy from app.CuBackgroundScheduler import CuBackgroundScheduler from app.CuGeventScheduler import CuGeventScheduler from config import config # scheduler=APScheduler() # scheduler = APScheduler(scheduler=CuGeventScheduler()) scheduler = APScheduler(scheduler=CuBackgroundScheduler()) db = SQLAlchemy() def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) logging.basicConfig(level=logging.ERROR) formatter = logging.Formatter( "[%(asctime)s][%(filename)s:%(lineno)d][%(levelname)s][%(thread)d] - %(message)s" ) info_handler = TimedRotatingFileHandler("%s/datas/logs/info.log" %
from flask_caching import Cache from flask_jwt_oidc import JwtManager from flask_sqlalchemy import SQLAlchemy from flask_apscheduler import APScheduler from elasticapm.contrib.flask import ElasticAPM from .config import Config from .helper import Api apm = ElasticAPM() db = SQLAlchemy() jwt = JwtManager() cache = Cache() sched = APScheduler() api = Api(prefix='{}'.format(Config.BASE_PATH), doc='{}/'.format(Config.BASE_PATH), default='mds', default_label='MDS related operations')
from flask_apscheduler import APScheduler from flask_caching import Cache from flask_restful import Api from raven.contrib.flask import Sentry rest_api = Api() app_scheduler = APScheduler() sentry = Sentry() cache = Cache()
def setUp(self): self.app = Flask(__name__) self.app.config['SCHEDULER_VIEWS_ENABLED'] = True self.scheduler = APScheduler(app=self.app) self.scheduler.start() self.client = self.app.test_client()
class TestAPI(TestCase): def setUp(self): self.app = Flask(__name__) self.scheduler = APScheduler() self.scheduler.api_enabled = True self.scheduler.init_app(self.app) self.scheduler.start() self.client = self.app.test_client() def test_scheduler_info(self): response = self.client.get('/scheduler') self.assertEqual(response.status_code, 200) info = json.loads(response.get_data(as_text=True)) self.assertIsNotNone(info['current_host']) self.assertEqual(info['allowed_hosts'], ['*']) self.assertTrue(info['running']) def test_add_job(self): job = { 'id': 'job1', 'func': 'tests.test_api:job1', 'trigger': 'date', 'run_date': '2020-12-01T12:30:01+00:00', } response = self.client.post('/scheduler/jobs', data=json.dumps(job)) self.assertEqual(response.status_code, 200) job2 = json.loads(response.get_data(as_text=True)) self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(job.get('trigger'), job2.get('trigger')) self.assertEqual(job.get('run_date'), job2.get('run_date')) def test_add_conflicted_job(self): job = { 'id': 'job1', 'func': 'tests.test_api:job1', 'trigger': 'date', 'run_date': '2020-12-01T12:30:01+00:00', } response = self.client.post('/scheduler/jobs', data=json.dumps(job)) self.assertEqual(response.status_code, 200) response = self.client.post('/scheduler/jobs', data=json.dumps(job)) self.assertEqual(response.status_code, 409) def test_add_invalid_job(self): job = { 'id': None, } response = self.client.post('/scheduler/jobs', data=json.dumps(job)) self.assertEqual(response.status_code, 500) def test_delete_job(self): self.__add_job() response = self.client.delete('/scheduler/jobs/job1') self.assertEqual(response.status_code, 204) response = self.client.get('/scheduler/jobs/job1') self.assertEqual(response.status_code, 404) def test_delete_job_not_found(self): response = self.client.delete('/scheduler/jobs/job1') self.assertEqual(response.status_code, 404) def test_get_job(self): job = self.__add_job() response = self.client.get('/scheduler/jobs/job1') self.assertEqual(response.status_code, 200) job2 = json.loads(response.get_data(as_text=True)) self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(job.get('trigger'), job2.get('trigger')) self.assertEqual(job.get('minutes'), job2.get('minutes')) def test_get_job_not_found(self): response = self.client.get('/scheduler/jobs/job1') self.assertEqual(response.status_code, 404) def test_get_all_jobs(self): job = self.__add_job() response = self.client.get('/scheduler/jobs') self.assertEqual(response.status_code, 200) jobs = json.loads(response.get_data(as_text=True)) self.assertEqual(len(jobs), 1) job2 = jobs[0] self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(job.get('trigger'), job2.get('trigger')) self.assertEqual(job.get('minutes'), job2.get('minutes')) def test_update_job(self): job = self.__add_job() data_to_update = { 'args': [1], 'trigger': 'cron', 'minute': '*/1', 'start_date': '2021-01-01' } response = self.client.patch('/scheduler/jobs/job1', data=json.dumps(data_to_update)) self.assertEqual(response.status_code, 200) job2 = json.loads(response.get_data(as_text=True)) self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(data_to_update.get('args'), job2.get('args')) self.assertEqual(data_to_update.get('trigger'), job2.get('trigger')) self.assertEqual('2021-01-01T00:00:00+00:00', job2.get('start_date')) self.assertEqual('2021-01-01T00:00:00+00:00', job2.get('next_run_time')) def test_update_job_not_found(self): data_to_update = { 'args': [1], 'trigger': 'cron', 'minute': '*/1', 'start_date': '2021-01-01' } response = self.client.patch('/scheduler/jobs/job1', data=json.dumps(data_to_update)) self.assertEqual(response.status_code, 404) def test_update_invalid_job(self): self.__add_job() data_to_update = { 'trigger': 'invalid_trigger', } response = self.client.patch('/scheduler/jobs/job1', data=json.dumps(data_to_update)) self.assertEqual(response.status_code, 500) def test_pause_and_resume_job(self): self.__add_job() response = self.client.post('/scheduler/jobs/job1/pause') self.assertEqual(response.status_code, 200) job = json.loads(response.get_data(as_text=True)) self.assertIsNone(job.get('next_run_time')) response = self.client.post('/scheduler/jobs/job1/resume') self.assertEqual(response.status_code, 200) job = json.loads(response.get_data(as_text=True)) self.assertIsNotNone(job.get('next_run_time')) def test_pause_and_resume_job_not_found(self): response = self.client.post('/scheduler/jobs/job1/pause') self.assertEqual(response.status_code, 404) response = self.client.post('/scheduler/jobs/job1/resume') self.assertEqual(response.status_code, 404) def test_run_job(self): self.__add_job() response = self.client.post('/scheduler/jobs/job1/run') self.assertEqual(response.status_code, 200) job = json.loads(response.get_data(as_text=True)) self.assertIsNotNone(job.get('next_run_time')) def test_run_job_not_found(self): response = self.client.post('/scheduler/jobs/job1/run') self.assertEqual(response.status_code, 404) def __add_job(self): job = { 'id': 'job1', 'func': 'tests.test_api:job1', 'trigger': 'interval', 'minutes': 10, } response = self.client.post('/scheduler/jobs', data=json.dumps(job)) return json.loads(response.get_data(as_text=True))
import fbrefDataGetter import heapq import json import os import pandas as pd import re application = Flask(__name__) SECRET_KEY = os.urandom(24) SESSION_TYPE = "filesystem" application.config.from_object(__name__) Session(application) scheduler = APScheduler() scheduler.init_app(application) scheduler.start() for season in SEASONS: application.apscheduler.add_job( func=fbrefDataGetter.main, args=[season], trigger=IntervalTrigger(hours=24, jitter=15 * 60), id=f"dataScraper_{season}", ) @application.route("/") def home(): dataFiles = os.listdir("./Data") seasons = map(lambda s: re.search("\d+-\d+", s).group(), dataFiles)
# mail setup mail = Mail(app) from views.api import api from views.main import main from views.tasks import tasks, send_notification_mails, process_projects, process_tasks # setup blueprints app.register_blueprint(api, url_prefix='/api') # app.register_blueprint(api) app.register_blueprint(main) # blueprint for background tasks app.register_blueprint(tasks) # scheduler setup scheduler = APScheduler() scheduler.init_app(app) scheduler.start() scheduler.add_job('send_notifications', send_notification_mails, **{ 'trigger': 'interval', 'minutes': 3 }) scheduler.add_job( 'process_tasks', process_tasks, **{ 'trigger': 'interval', 'days': 1, 'start_date': '2017-05-05 00:30:00' }) scheduler.add_job( 'process_projects', process_projects, **{
from flask import Flask, request, Response from sources.twitter_client import mine_twitter from common.storage_manager import save_data import time from flask_apscheduler import APScheduler import json app = Flask(__name__) scheduler = APScheduler() def mining_tasks(): """ Function for test purposes. """ mined_sentiments = mine_twitter() if (len(mined_sentiments) > 0): print("saving it next mined sentiments", mined_sentiments) #insert_sentiments(mined_sentiments) @app.route('/', methods=['GET']) def home(): return "<h1>Augur Api</h1><p>This site is a prototype API for augur a MVP investment tool. </p>" @app.route('/api/v1/recommendations', methods=['GET', 'POST']) def recommendations(): if request.method == "GET": # tweets = load_tweets() # length = len(tweets) # max = 50
print(ordered_columns) df = df[ordered_columns] for col in metric_cols: print(col) df[col] = pd.Series(["{0:.2f}%".format(val * 100) for val in df[col]], index=df.index) #df = df[['Symbol', 'Play', 'Signal', 'Prediction', 'Vol_Chg', 'Wk', 'Mth','Start_Price']] print(df) return render_template('index.html', model_name=model_name, todays_columns=ordered_columns, todays_alerts=df.values, today=datetime.now().strftime('%B %d, %Y')) if __name__ == '__main__': app.config.from_pyfile('gap_app.cfg') scheduler = APScheduler() scheduler.init_app(app) scheduler.start() if app.config['DEBUG'] is False: scheduler.add_job(id='get_finviz_alerts', func='gap_site:process_manager') app.run(host='0.0.0.0', port=8090) else: app.run()
def setUp(self): self.app = Flask(__name__) self.scheduler = APScheduler()
# Login Manager login_manager = LoginManager() login_manager.init_app(app) login_manager.login_view = 'login' #Shared LDAP bind for searching # Permissions # perms = Permissions(app, db, current_user) # API api = Api(app) # Logging formatter = logging.Formatter( "[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s") handler = RotatingFileHandler(config.logfile, maxBytes=10000, backupCount=1) handler.setFormatter(formatter) app.logger.addHandler(handler) app.logger.setLevel(logging.INFO) #handler.setLevel(logging.INFO) # Scheduled Tasks scheduler = APScheduler() scheduler.init_app(app) scheduler.start() atexit.register(lambda: scheduler.shutdown()) from app import views, views_api, models
from flask import Flask app = Flask(__name__) #APScheduler from flask_apscheduler import APScheduler from myapp.jobs import Config app.config.from_object(Config()) scheduler = APScheduler() scheduler.init_app(app) from myapp import views import myapp.services services.request_heartbeat() services.check_traffic_status() services.check_weather_status()
'欢迎使用自动化测试平台\n', '欢迎使用自动化测试平台\n', '欢迎使用自动化测试平台\n', '欢迎使用自动化测试平台\n', '联系QQ253775405\n', '微信15155492421\n', 'github地址\n', 'https://github.com/yangleiqing0/MyFlask.git\n', '遇到任何bug请直接联系我\n', '接定制任务\n', '欢迎使用自动化测试平台\n', '欢迎使用自动化测试平台\n', ]) scheduler = APScheduler() scheduler.add_listener(my_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) scheduler.start() @app.before_first_request def init_scheduler_job(): from modles.job import Job from views.job import scheduler_job jobs = Job.query.filter(Job.is_start == 1).all() for job in jobs: scheduler_job(job, scheduler) def return_app():
'func': 'app:job1', 'args': (1, 2), 'trigger': 'interval', 'seconds': 10 }] SCHEDULER_TIMEZONE = 'Asia/Shanghai' # 配置时区 SCHEDULER_API_ENABLED = True # 添加API # 配置job store # DATABASE_URL="mysql+pymysql://scott:[email protected]:3306/test?charset=utf8mb4" # SCHEDULER_JOBSTORES = { # 'default': SQLAlchemyJobStore(url=DATABASE_URL) # } # 初始化调度器 scheduler = APScheduler(BackgroundScheduler(timezone="Asia/Shanghai")) def job1(a, b): print(str(a) + ' ' + str(b)) # 使用装饰器调用 @scheduler.task('interval', id='job_2', seconds=30, misfire_grace_time=900) def job2(): print('Job 2 executed') if __name__ == '__main__': app = Flask(__name__) app.config.from_object(Config())
def do_start(app): sched = APScheduler() app.config.from_object(Config()) sched.init_app(app) sched.start()
'trigger': 'cron', 'hour': 17, 'minute': 8 }, { 'id': 'job2', 'func': '__main__:job_1', 'args': (3, 4), 'trigger': 'interval', 'seconds': 5 }] def job_1(a, b): # 一個函式,用來做定時任務的任務。 print(str(a) + ' ' + str(b)) app = Flask(__name__) # 例項化flask app.config.from_object(Config()) # 為例項化的flask引入配置 @app.route('/') # 首頁路由 def hello_world(): return 'hello' if __name__ == '__main__': scheduler = APScheduler() # 例項化APScheduler scheduler.init_app(app) # 把任務列表放進flask scheduler.start() # 啟動任務列表 app.run() # 啟動flask
def scheduler_setup(app): """Initialize APScheduler and start running jobs.""" scheduler = APScheduler() scheduler.init_app(app) scheduler.start() return scheduler
from flask import Flask, Response, abort from flask import request from flask_socketio import SocketIO from flask_apscheduler import APScheduler from flask_cors import CORS from utils import random6 app = Flask('simple API for the modbus light automaton') from automaton_client import AutomatonClient c = AutomatonClient() socketio = SocketIO(app) scheduler = APScheduler() scheduler.init_app(app) scheduler.start() cors = CORS(app) def modbus_decorator(func): @functools.wraps(func) def _wrap(*args, **kwargs): if not c.is_open(): if not c.open(): print("unable to connect to %s:%s" % (c.host(), c.port())) if c.is_open(): return func(*args, **kwargs) return abort(500)
from flask_apscheduler import APScheduler from flask import jsonify, make_response from ..utility.errors import set_error_from_code from ..db.models import db, NodesData, ExperimentsData from ..run import app from ..tasks import flash_node_task, launch_experiment_task, stop_experiment_task from datetime import datetime, timedelta import json scheduler = APScheduler() scheduler.init_app(app) scheduler.start() NUM_FAIL_NODE_SUSPECTED = 4 NUM_FAIL_NODE_DEAD = 10 def check_node_status(): nodes = NodesData.query.all() if nodes: hosts = sorted([node.network_address for node in nodes]) from netaddr import IPAddress, iter_iprange ip_list = iter_iprange(hosts[0], hosts[-1]) # Alive, Busy, Suspected, Dead import subprocess
class TestAPI(TestCase): def setUp(self): self.app = Flask(__name__) self.scheduler = APScheduler() self.scheduler.api_enabled = True self.scheduler.init_app(self.app) self.scheduler.start() self.client = self.app.test_client() def test_scheduler_info(self): response = self.client.get(self.scheduler.api_prefix) self.assertEqual(response.status_code, 200) info = json.loads(response.get_data(as_text=True)) self.assertIsNotNone(info['current_host']) self.assertEqual(info['allowed_hosts'], ['*']) self.assertTrue(info['running']) def test_add_job(self): job = { 'id': 'job1', 'func': 'tests.test_api:job1', 'trigger': 'date', 'run_date': '2020-12-01T12:30:01+00:00', } response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job)) self.assertEqual(response.status_code, 200) job2 = json.loads(response.get_data(as_text=True)) self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(job.get('trigger'), job2.get('trigger')) self.assertEqual(job.get('run_date'), job2.get('run_date')) def test_add_conflicted_job(self): job = { 'id': 'job1', 'func': 'tests.test_api:job1', 'trigger': 'date', 'run_date': '2020-12-01T12:30:01+00:00', } response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job)) self.assertEqual(response.status_code, 200) response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job)) self.assertEqual(response.status_code, 409) def test_add_invalid_job(self): job = { 'id': None, } response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job)) self.assertEqual(response.status_code, 500) def test_delete_job(self): self.__add_job() response = self.client.delete(self.scheduler.api_prefix + '/jobs/job1') self.assertEqual(response.status_code, 204) response = self.client.get(self.scheduler.api_prefix + '/jobs/job1') self.assertEqual(response.status_code, 404) def test_delete_job_not_found(self): response = self.client.delete(self.scheduler.api_prefix + '/jobs/job1') self.assertEqual(response.status_code, 404) def test_get_job(self): job = self.__add_job() response = self.client.get(self.scheduler.api_prefix + '/jobs/job1') self.assertEqual(response.status_code, 200) job2 = json.loads(response.get_data(as_text=True)) self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(job.get('trigger'), job2.get('trigger')) self.assertEqual(job.get('minutes'), job2.get('minutes')) def test_get_job_not_found(self): response = self.client.get(self.scheduler.api_prefix + '/jobs/job1') self.assertEqual(response.status_code, 404) def test_get_all_jobs(self): job = self.__add_job() response = self.client.get(self.scheduler.api_prefix + '/jobs') self.assertEqual(response.status_code, 200) jobs = json.loads(response.get_data(as_text=True)) self.assertEqual(len(jobs), 1) job2 = jobs[0] self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(job.get('trigger'), job2.get('trigger')) self.assertEqual(job.get('minutes'), job2.get('minutes')) def test_update_job(self): job = self.__add_job() data_to_update = { 'args': [1], 'trigger': 'cron', 'minute': '*/1', 'start_date': '2021-01-01' } response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1', data=json.dumps(data_to_update)) self.assertEqual(response.status_code, 200) job2 = json.loads(response.get_data(as_text=True)) self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(data_to_update.get('args'), job2.get('args')) self.assertEqual(data_to_update.get('trigger'), job2.get('trigger')) self.assertEqual('2021-01-01T00:00:00+00:00', job2.get('start_date')) self.assertEqual('2021-01-01T00:00:00+00:00', job2.get('next_run_time')) def test_update_job_not_found(self): data_to_update = { 'args': [1], 'trigger': 'cron', 'minute': '*/1', 'start_date': '2021-01-01' } response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1', data=json.dumps(data_to_update)) self.assertEqual(response.status_code, 404) def test_update_invalid_job(self): self.__add_job() data_to_update = { 'trigger': 'invalid_trigger', } response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1', data=json.dumps(data_to_update)) self.assertEqual(response.status_code, 500) def test_pause_and_resume_job(self): self.__add_job() response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/pause') self.assertEqual(response.status_code, 200) job = json.loads(response.get_data(as_text=True)) self.assertIsNone(job.get('next_run_time')) response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/resume') self.assertEqual(response.status_code, 200) job = json.loads(response.get_data(as_text=True)) self.assertIsNotNone(job.get('next_run_time')) def test_pause_and_resume_job_not_found(self): response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/pause') self.assertEqual(response.status_code, 404) response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/resume') self.assertEqual(response.status_code, 404) def test_run_job(self): self.__add_job() response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/run') self.assertEqual(response.status_code, 200) job = json.loads(response.get_data(as_text=True)) self.assertIsNotNone(job.get('next_run_time')) def test_run_job_not_found(self): response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/run') self.assertEqual(response.status_code, 404) def __add_job(self): job = { 'id': 'job1', 'func': 'tests.test_api:job1', 'trigger': 'interval', 'minutes': 10, } response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job)) return json.loads(response.get_data(as_text=True))
from app import app from os import environ from flask_apscheduler import APScheduler if __name__ == '__main__': scheduler = APScheduler() scheduler.add_job(func=gen_update, trigger='interval', id='job', seconds=604800) scheduler.start() app.run()
return {'code': 0, 'data': get_send_job(openid)} @app.route('/now/<string:openid>/<path:message>') def send_now(openid, message): """立即推送""" if not user_exist(openid): return {"code": 101, "msg": "openId 不存在!r"} if send_template(openid, message): return {"code": 0, "msg": "ok"} else: return {"code": 101, "msg": "send error"} if __name__ == '__main__': start = refresh_token() if not start: print("启动失败!") exit() print(f'Running on http://{ip}:{port}/') scheduler = APScheduler(BackgroundScheduler(timezone="Asia/Shanghai")) scheduler.init_app(app) scheduler.add_job(id='refresh_token', func=refresh_token, trigger='interval', seconds=7000, replace_existing=True) scheduler.start() http_server = WSGIServer((ip, port), app) http_server.serve_forever()
appt_noti(apptlink[i]) #test relationship # usertest = User.query.first() # eduscheduler = EduSchedu.query.first() # eduscheduler.students.append(usertest) # database.session.commit() @socketio.on('username') def senduser(): usernametosend = current_user.name instance = Instance.query.first() instance.page_views += 1 database.session.commit() print(usernametosend) socketio.emit('username', usernametosend) scheduler = APScheduler() scheduler.add_job(func=job_function, trigger='cron', hour='0-23', minute='0-59', id='1', day_of_week='0-6') scheduler.start() if __name__ == "__main__": socketio.run(app)
from flask import Flask from flask_apscheduler import APScheduler app = Flask(__name__) scheduler = APScheduler() @app.route("/") def index(): return "Hello World" def schedulerTask(): print("This sheduler work every 2 second") if __name__ == '__main__': scheduler.add_job(id='Sheduler Task', func=schedulerTask, trigger='interval', seconds=2) scheduler.start() app.run(debug=True)
from flask import Flask from flask_apscheduler import APScheduler class Config(object): JOBS = [ { 'id': 'job1', 'func': '__main__:job1', 'args': (1, 2), 'trigger': 'interval', 'seconds': 10 } ] SCHEDULER_VIEWS_ENABLED = True def job1(a, b): print(str(a) + ' ' + str(b)) app = Flask(__name__) app.config.from_object(Config()) app.debug = True scheduler = APScheduler() scheduler.init_app(app) scheduler.start() app.run()
class TestScheduler(TestCase): def setUp(self): self.app = Flask(__name__) self.scheduler = APScheduler() def test_running(self): self.assertFalse(self.scheduler.running) self.scheduler.start() self.assertTrue(self.scheduler.running) def test_start_with_allowed_hosts(self): self.app.config['SCHEDULER_ALLOWED_HOSTS'] = ['any_server_name'] self.scheduler.init_app(self.app) self.scheduler.start() self.assertFalse(self.scheduler.running) def test_start_without_allowed_hosts(self): self.app.config['SCHEDULER_ALLOWED_HOSTS'] = [] self.scheduler.init_app(self.app) self.scheduler.start() self.assertFalse(self.scheduler.running) def test_shutdown(self): self.scheduler.init_app(self.app) self.scheduler.start() self.assertTrue(self.scheduler.running) self.scheduler.shutdown() self.assertFalse(self.scheduler.running) def test_load_jobs_from_config(self): self.app.config['JOBS'] = [ { 'id': 'job1', 'func': 'tests.test_views:job1', 'trigger': 'interval', 'seconds': 10 } ] self.scheduler.init_app(self.app) job = self.scheduler.get_job('job1') self.assertIsNotNone(job)
#! usr/bin/env python """hacky project""" import json from flask import Flask, request from flask_apscheduler import APScheduler from reminder import add_reminder, display_time app = Flask(__name__) scheduler = APScheduler() scheduler.init_app(app) scheduler.start() scheduler.add_job(id='tick', func=display_time, trigger='interval', seconds=30) @app.route("/", methods=['POST']) def get_reminder(): print(request.args) reminder_text = request.args['text'] reminder_delay = int(request.args['delay']) add_reminder(scheduler, reminder_text, reminder_delay) return json.dumps({ 'status':'success', 'text': reminder_text, 'delay': reminder_delay }) if __name__ == "__main__":
def initSchedule(): app.config.from_object(Config()) scheduler = APScheduler() scheduler.init_app(app) scheduler.start() print("Scheduler Initialized\n")
"storageBucket": "hospitalusers-d3cda.appspot.com", "messagingSenderId": "1074652022617", "appId": "1:1074652022617:web:20c1567f52ae88b8b8ae48" } firebase=pyrebase.initialize_app(config) storage=firebase.storage() pp = pprint.PrettyPrinter(indent=4) db = firebase.database() pickdb = pickledb.load('example1.db', False) pickdbLogin = pickledb.load('example.db', False) app = Flask(__name__) scheduler = APScheduler() cors = CORS(app) app.config['CORS_HEADERS'] = 'Content-Type' @app.route("/") @cross_origin() def index(): return "Welcome to the scheduler!" def myconverter(o): if isinstance(o, dt.datetime): return o.__str__()
class TestViews(TestCase): def setUp(self): self.app = Flask(__name__) self.app.config['SCHEDULER_VIEWS_ENABLED'] = True self.scheduler = APScheduler(app=self.app) self.scheduler.start() self.client = self.app.test_client() def test_add_job(self): job = { 'id': 'job1', 'func': 'test_views:job1', 'trigger': 'date', 'run_date': '2020-12-01T12:30:01+00:00', } response = self.client.post('/scheduler/jobs', data=json.dumps(job)) self.assertEqual(response.status_code, 200) job2 = json.loads(response.get_data(as_text=True)) self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(job.get('trigger'), job2.get('trigger')) self.assertEqual(job.get('run_date'), job2.get('run_date')) def test_delete_job(self): self.__add_job() response = self.client.delete('/scheduler/jobs/job1') self.assertEqual(response.status_code, 204) response = self.client.get('/scheduler/jobs/job1') self.assertEqual(response.status_code, 404) def test_get_job(self): job = self.__add_job() response = self.client.get('/scheduler/jobs/job1') self.assertEqual(response.status_code, 200) job2 = json.loads(response.get_data(as_text=True)) self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(job.get('trigger'), job2.get('trigger')) self.assertEqual(job.get('minutes'), job2.get('minutes')) def test_get_all_jobs(self): job = self.__add_job() response = self.client.get('/scheduler/jobs') self.assertEqual(response.status_code, 200) jobs = json.loads(response.get_data(as_text=True)) self.assertEqual(len(jobs), 1) job2 = jobs[0] self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(job.get('trigger'), job2.get('trigger')) self.assertEqual(job.get('minutes'), job2.get('minutes')) def test_update_job(self): job = self.__add_job() data_to_update = { 'args': [1] } response = self.client.patch('/scheduler/jobs/job1', data=json.dumps(data_to_update)) self.assertEqual(response.status_code, 200) job2 = json.loads(response.get_data(as_text=True)) self.assertEqual(job.get('id'), job2.get('id')) self.assertEqual(job.get('func'), job2.get('func')) self.assertEqual(data_to_update.get('args'), job2.get('args')) self.assertEqual(job.get('trigger'), job2.get('trigger')) self.assertEqual(job.get('minutes'), job2.get('minutes')) def test_pause_and_resume_job(self): self.__add_job() response = self.client.post('/scheduler/jobs/job1/pause') self.assertEqual(response.status_code, 200) job = json.loads(response.get_data(as_text=True)) self.assertIsNone(job.get('next_run_time')) response = self.client.post('/scheduler/jobs/job1/resume') self.assertEqual(response.status_code, 200) job = json.loads(response.get_data(as_text=True)) self.assertIsNotNone(job.get('next_run_time')) def __add_job(self): job = { 'id': 'job1', 'func': 'test_views:job1', 'trigger': 'interval', 'minutes': 10, } response = self.client.post('/scheduler/jobs', data=json.dumps(job)) return json.loads(response.get_data(as_text=True))
class TestScheduler(TestCase): def setUp(self): self.app = Flask(__name__) self.scheduler = APScheduler() def test_running(self): self.assertFalse(self.scheduler.running) self.scheduler.start() self.assertTrue(self.scheduler.running) def test_start_with_allowed_hosts(self): self.app.config['SCHEDULER_ALLOWED_HOSTS'] = ['any_server_name'] self.scheduler.init_app(self.app) self.scheduler.start() self.assertFalse(self.scheduler.running) def test_start_without_allowed_hosts(self): self.app.config['SCHEDULER_ALLOWED_HOSTS'] = [] self.scheduler.init_app(self.app) self.scheduler.start() self.assertFalse(self.scheduler.running) def test_shutdown(self): self.scheduler.init_app(self.app) self.scheduler.start() self.assertTrue(self.scheduler.running) self.scheduler.shutdown() self.assertFalse(self.scheduler.running) def test_load_jobs_from_config(self): self.app.config['JOBS'] = [{ 'id': 'job1', 'func': 'tests.test_api:job1', 'trigger': 'interval', 'seconds': 10 }] self.scheduler.init_app(self.app) job = self.scheduler.get_job('job1') self.assertIsNotNone(job)
flask.config['JOBS'] = [ { 'id': 'buildNewlyAddedRSSFeed', 'func': 'server.views:buildNewlyAddedRSSFeed', 'trigger': 'interval', 'seconds': (60*60) }, { 'id': 'buildNewlyAddedAtomFeed', 'func': 'server.views:buildNewlyAddedAtomFeed', 'trigger': 'interval', 'seconds': (60*60) }, { 'id': 'buildNewlyAddedRSSFeedAtStartup', 'func': 'server.views:buildNewlyAddedRSSFeed' }, { 'id': 'buildNewlyAddedAtomFeedAtStartup', 'func': 'server.views:buildNewlyAddedAtomFeed' } ] flask.config['SCHEDULER_VIEWS_ENABLED'] = True flask.debug = True scheduler = APScheduler() scheduler.init_app(flask) scheduler.start() if __name__ == '__main__': flask.run(host = '0.0.0.0', port = 8000)
# "projectId": "hospitalusers-d3cda", # "storageBucket": "hospitalusers-d3cda.appspot.com", # "messagingSenderId": "1074652022617", # "appId": "1:1074652022617:web:20c1567f52ae88b8b8ae48" # } firebase = pyrebase.initialize_app(config) storage = firebase.storage() pp = pprint.PrettyPrinter(indent=4) db = firebase.database() pickdbExpo = pickledb.load('expoFirst.db', False) pickdbDate = pickledb.load('date.db', False) pickdbIndExpo = pickledb.load('expoSecond.db', False) app = Flask(__name__) scheduler = APScheduler() cors = CORS(app) app.config['CORS_HEADERS'] = 'Content-Type' @app.route("/") @cross_origin() def index(): return "Welcome to the scheduler!" @app.route("/getSysInfo", methods=['GET']) @cross_origin() def index2(): f = dict(psutil.virtual_memory()._asdict()) d = {}
from medivisor import views def scheduler_db_confirm(): with scheduler.app.app_context(): db_confirm() def scheduler_db_clear(): with scheduler.app.app_context(): db_clear() if __name__ == "__main__": # Scheduler init scheduler = APScheduler() scheduler.init_app(app) # Scheduler add scheduler.start() scheduler.add_job(id='confirm orders', func=scheduler_db_confirm, trigger='interval', seconds=60) scheduler.add_job(id='clear DB', func=scheduler_db_clear, trigger='interval', seconds=3600) # Start whole app app.run()
def task_init(): scheduler = APScheduler() scheduler.init_app(app) scheduler.start() refresh()
class GameState(Enum): WAITING = 'waiting' INITIAL = 'initial' WAITING_A = 'waiting_a' WAITING_B = 'waiting_b' START_TIMER = 'start_timer' BATTLE_TIMER = 'battle_timer' class Config: SCHEDULER_API_ENABLED = True # create app app = Flask(__name__) scheduler = APScheduler() scheduler.api_enabled = True scheduler.init_app(app) scheduler.start() app = Flask(__name__) socketio = SocketIO(app) @app.route('/') def teams(): return render_template('teams.html', state=read_state()) @app.route('/timer')